From f7ab1f14455886b6ea6bc417298d0517f38093a4 Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Fri, 4 Apr 2025 20:25:25 -0400 Subject: [PATCH 01/22] Add Json to Postgres WithProps --- src/Postgres/Library.fs | 61 ++++ src/Postgres/WithProps.fs | 719 +++++++++++++++++++++++++++++++------- 2 files changed, 648 insertions(+), 132 deletions(-) diff --git a/src/Postgres/Library.fs b/src/Postgres/Library.fs index edf03af..97cb339 100644 --- a/src/Postgres/Library.fs +++ b/src/Postgres/Library.fs @@ -1,5 +1,9 @@ namespace BitBadger.Documents.Postgres +open System.IO +open System.Text +open Npgsql.FSharp + /// The type of index to generate for the document [] type DocumentIndex = @@ -302,3 +306,60 @@ module Results = [] let toExists (row: RowReader) = row.bool "it" + + /// Extract a JSON document, specifying the field in which the document is found + /// The field name containing the JSON document + /// A row reader set to the row with the document to be extracted + /// The JSON from the given field (an empty object if no field exists) + [] + let jsonFromDocument field (row: RowReader) = + row.stringOrNone field |> Option.defaultValue "{}" + + /// Extract a JSON document + /// A row reader set to the row with the document to be extracted + /// The JSON from the row (an empty object if no field exists) + [] + let jsonFromData row = + jsonFromDocument "data" row + + /// Create a JSON array of items for the results of a query + /// The mapping function to extract JSON from the query's results + /// The query from which JSON should be extracted + /// A JSON array as a string; no results will produce an empty array ("[]") + [] + let toJsonArray (mapFunc: RowReader -> string) sqlProps = + let output = StringBuilder("[") + sqlProps + |> Sql.iter (fun it -> + if output.Length > 2 then ignore (output.Append ",") + mapFunc it |> output.Append |> ignore) + output.Append("]").ToString() + + /// Create a JSON array of items for the results of a query + /// The mapping function to extract JSON from the query's results + /// The query from which JSON should be extracted + /// A JSON array as a string; no results will produce an empty array ("[]") + let ToJsonArray(mapFunc: System.Func, sqlProps) = + toJsonArray mapFunc.Invoke sqlProps + + /// Write a JSON array of items for the results of a query to the given StreamWriter + /// The StreamWriter to which results should be written + /// The mapping function to extract JSON from the query's results + /// The query from which JSON should be extracted + [] + let writeJsonArray (writer: StreamWriter) (mapFunc: RowReader -> string) sqlProps = + writer.Write "[" + let mutable isFirst = true + sqlProps + |> Sql.iter (fun it -> + if isFirst then isFirst <- false else writer.Write "," + mapFunc it |> writer.Write) + writer.Write "]" + + /// Write a JSON array of items for the results of a query to the given StreamWriter + /// The StreamWriter to which results should be written + /// The mapping function to extract JSON from the query's results + /// The query from which JSON should be extracted + let WriteJsonArray(writer, mapFunc: System.Func, sqlProps) = + writeJsonArray writer mapFunc.Invoke sqlProps + \ No newline at end of file diff --git a/src/Postgres/WithProps.fs b/src/Postgres/WithProps.fs index da7bc86..fcb7360 100644 --- a/src/Postgres/WithProps.fs +++ b/src/Postgres/WithProps.fs @@ -1,6 +1,7 @@ /// Versions of queries that accept SqlProps as the last parameter module BitBadger.Documents.Postgres.WithProps +open System.IO open BitBadger.Documents open Npgsql.FSharp @@ -14,7 +15,7 @@ module Custom = /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// A list of results for the given query [] let list<'TDoc> query parameters (mapFunc: RowReader -> 'TDoc) sqlProps = @@ -26,22 +27,64 @@ module Custom = /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// A list of results for the given query let List<'TDoc>(query, parameters, mapFunc: System.Func, sqlProps) = backgroundTask { let! results = list<'TDoc> query parameters mapFunc.Invoke sqlProps return ResizeArray results } + /// Execute a query that returns a JSON array of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function between the document and the domain item + /// The SqlProps to use to execute the query + /// A JSON array of results for the given query + [] + let jsonArray query parameters (mapFunc: RowReader -> string) sqlProps = + Sql.query query sqlProps + |> Sql.parameters (FSharpList.ofSeq parameters) + |> toJsonArray mapFunc + + /// Execute a query that returns a JSON array of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function between the document and the domain item + /// The SqlProps to use to execute the query + /// A JSON array of results for the given query + let JsonArray(query, parameters, mapFunc: System.Func, sqlProps) = + jsonArray query parameters mapFunc.Invoke sqlProps + + /// Execute a query, writing its results to the given StreamWriter + /// The query to retrieve the results + /// Parameters to use for the query + /// The StreamWriter to which the results should be written + /// The mapping function between the document and the domain item + /// The SqlProps to use to execute the query + [] + let writeJsonArray query parameters writer (mapFunc: RowReader -> string) sqlProps = + Sql.query query sqlProps + |> Sql.parameters (FSharpList.ofSeq parameters) + |> writeJsonArray writer mapFunc + + /// Execute a query, writing its results to the given StreamWriter + /// The query to retrieve the results + /// Parameters to use for the query + /// The StreamWriter to which the results should be written + /// The mapping function between the document and the domain item + /// The SqlProps to use to execute the query + let WriteJsonArray(query, parameters, writer, mapFunc: System.Func, sqlProps) = + writeJsonArray query parameters writer mapFunc.Invoke sqlProps + /// Execute a query that returns one or no results /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// The SqlProps to use to execute the query - /// Some with the first matching result, or None if not found + /// The SqlProps to use to execute the query + /// Some with the first matching result, or None if not found [] let single<'TDoc> query parameters mapFunc sqlProps = backgroundTask { - let! results = list<'TDoc> query parameters mapFunc sqlProps + let! results = list<'TDoc> $"{query} LIMIT 1" parameters mapFunc sqlProps return FSharpList.tryHead results } @@ -49,18 +92,38 @@ module Custom = /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// The SqlProps to use to execute the query - /// The first matching result, or null if not found + /// The SqlProps to use to execute the query + /// The first matching result, or null if not found let Single<'TDoc when 'TDoc: null and 'TDoc: not struct>( query, parameters, mapFunc: System.Func, sqlProps) = backgroundTask { let! result = single<'TDoc> query parameters mapFunc.Invoke sqlProps return Option.toObj result } + /// Execute a query that returns one or no JSON documents + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function between the document and the domain item + /// The SqlProps to use to execute the query + /// The JSON document with the first matching result, or an empty document if not found + [] + let jsonSingle query parameters mapFunc sqlProps = + let results = jsonArray $"%s{query} LIMIT 1" parameters mapFunc sqlProps + if results = "[]" then "{}" else results[1..results.Length - 1] + + /// Execute a query that returns one or no JSON documents + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function between the document and the domain item + /// The SqlProps to use to execute the query + /// The JSON document with the first matching result, or an empty document if not found + let JsonSingle(query, parameters, mapFunc: System.Func, sqlProps) = + jsonSingle query parameters mapFunc.Invoke sqlProps + /// Execute a query that returns no results /// The query to retrieve the results /// Parameters to use for the query - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let nonQuery query parameters sqlProps = Sql.query query sqlProps @@ -72,7 +135,7 @@ module Custom = /// The query to retrieve the value /// Parameters to use for the query /// The mapping function to obtain the value - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// The scalar value for the query [] let scalar<'T when 'T: struct> query parameters (mapFunc: RowReader -> 'T) sqlProps = @@ -84,7 +147,7 @@ module Custom = /// The query to retrieve the value /// Parameters to use for the query /// The mapping function to obtain the value - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// The scalar value for the query let Scalar<'T when 'T: struct>(query, parameters, mapFunc: System.Func, sqlProps) = scalar<'T> query parameters mapFunc.Invoke sqlProps @@ -94,7 +157,7 @@ module Definition = /// Create a document table /// The table whose existence should be ensured (may include schema) - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let ensureTable name sqlProps = backgroundTask { do! Custom.nonQuery (Query.Definition.ensureTable name) [] sqlProps @@ -104,7 +167,7 @@ module Definition = /// Create an index on documents in the specified table /// The table to be indexed (may include schema) /// The type of document index to create - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let ensureDocumentIndex name idxType sqlProps = Custom.nonQuery (Query.Definition.ensureDocumentIndex name idxType) [] sqlProps @@ -113,7 +176,7 @@ module Definition = /// The table to be indexed (may include schema) /// The name of the index to create /// One or more fields to be indexed - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let ensureFieldIndex tableName indexName fields sqlProps = Custom.nonQuery (Query.Definition.ensureIndexOn tableName indexName fields PostgreSQL) [] sqlProps @@ -125,7 +188,7 @@ module Document = /// Insert a new document /// The table into which the document should be inserted (may include schema) /// The document to be inserted - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let insert<'TDoc> tableName (document: 'TDoc) sqlProps = let query = @@ -149,7 +212,7 @@ module Document = /// Save a document, inserting it if it does not exist and updating it if it does (AKA "upsert") /// The table into which the document should be saved (may include schema) /// The document to be saved - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let save<'TDoc> tableName (document: 'TDoc) sqlProps = Custom.nonQuery (Query.save tableName) [ jsonParam "@data" document ] sqlProps @@ -160,37 +223,37 @@ module Count = /// Count all documents in a table /// The table in which documents should be counted (may include schema) - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// The count of the documents in the table [] let all tableName sqlProps = Custom.scalar (Query.count tableName) [] toCount sqlProps - /// Count matching documents using JSON field comparisons (->> =, etc.) + /// Count matching documents using JSON field comparisons (->> =, etc.) /// The table in which documents should be counted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// The count of matching documents in the table [] let byFields tableName howMatched fields sqlProps = Custom.scalar (Query.byFields (Query.count tableName) howMatched fields) (addFieldParams fields []) toCount sqlProps - /// Count matching documents using a JSON containment query (@>) + /// Count matching documents using a JSON containment query (@>) /// The table in which documents should be counted (may include schema) /// The document to match with the containment query - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// The count of the documents in the table [] let byContains tableName (criteria: 'TContains) sqlProps = Custom.scalar (Query.byContains (Query.count tableName)) [ jsonParam "@criteria" criteria ] toCount sqlProps - /// Count matching documents using a JSON Path match query (@?) + /// Count matching documents using a JSON Path match query (@?) /// The table in which documents should be counted (may include schema) /// The JSON Path expression to be matched - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// The count of the documents in the table [] let byJsonPath tableName jsonPath sqlProps = @@ -204,17 +267,17 @@ module Exists = /// Determine if a document exists for the given ID /// The table in which existence should be checked (may include schema) /// The ID of the document whose existence should be checked - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// True if a document exists, false if not [] let byId tableName (docId: 'TKey) sqlProps = Custom.scalar (Query.exists tableName (Query.whereById docId)) [ idParam docId ] toExists sqlProps - /// Determine if a document exists using JSON field comparisons (->> =, etc.) + /// Determine if a document exists using JSON field comparisons (->> =, etc.) /// The table in which existence should be checked (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// True if any matching documents exist, false if not [] let byFields tableName howMatched fields sqlProps = @@ -224,10 +287,10 @@ module Exists = toExists sqlProps - /// Determine if a document exists using a JSON containment query (@>) + /// Determine if a document exists using a JSON containment query (@>) /// The table in which existence should be checked (may include schema) /// The document to match with the containment query - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// True if any matching documents exist, false if not [] let byContains tableName (criteria: 'TContains) sqlProps = @@ -237,10 +300,10 @@ module Exists = toExists sqlProps - /// Determine if a document exists using a JSON Path match query (@?) + /// Determine if a document exists using a JSON Path match query (@?) /// The table in which existence should be checked (may include schema) /// The JSON Path expression to be matched - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// True if any matching documents exist, false if not [] let byJsonPath tableName jsonPath sqlProps = @@ -250,13 +313,13 @@ module Exists = toExists sqlProps -/// Commands to retrieve documents +/// Commands to retrieve documents as domain objects [] module Find = /// Retrieve all documents in the given table /// The table from which documents should be retrieved (may include schema) - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents from the given table [] let all<'TDoc> tableName sqlProps = @@ -264,7 +327,7 @@ module Find = /// Retrieve all documents in the given table /// The table from which documents should be retrieved (may include schema) - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents from the given table let All<'TDoc>(tableName, sqlProps) = Custom.List<'TDoc>(Query.find tableName, [], fromData<'TDoc>, sqlProps) @@ -272,7 +335,7 @@ module Find = /// Retrieve all documents in the given table ordered by the given fields in the document /// The table from which documents should be retrieved (may include schema) /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents from the given table, ordered by the given fields [] let allOrdered<'TDoc> tableName orderFields sqlProps = @@ -281,7 +344,7 @@ module Find = /// Retrieve all documents in the given table ordered by the given fields in the document /// The table from which documents should be retrieved (may include schema) /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents from the given table, ordered by the given fields let AllOrdered<'TDoc>(tableName, orderFields, sqlProps) = Custom.List<'TDoc>( @@ -290,8 +353,8 @@ module Find = /// Retrieve a document by its ID /// The table from which a document should be retrieved (may include schema) /// The ID of the document to retrieve - /// The SqlProps to use to execute the query - /// Some with the document if found, None otherwise + /// The SqlProps to use to execute the query + /// Some with the document if found, None otherwise [] let byId<'TKey, 'TDoc> tableName (docId: 'TKey) sqlProps = Custom.single (Query.byId (Query.find tableName) docId) [ idParam docId ] fromData<'TDoc> sqlProps @@ -299,17 +362,17 @@ module Find = /// Retrieve a document by its ID /// The table from which a document should be retrieved (may include schema) /// The ID of the document to retrieve - /// The SqlProps to use to execute the query - /// The document if found, null otherwise + /// The SqlProps to use to execute the query + /// The document if found, null otherwise let ById<'TKey, 'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, docId: 'TKey, sqlProps) = Custom.Single<'TDoc>( Query.byId (Query.find tableName) docId, [ idParam docId ], fromData<'TDoc>, sqlProps) - /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// Retrieve documents matching JSON field comparisons (->> =, etc.) /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given fields [] let byFields<'TDoc> tableName howMatched fields sqlProps = @@ -319,11 +382,11 @@ module Find = fromData<'TDoc> sqlProps - /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// Retrieve documents matching JSON field comparisons (->> =, etc.) /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given fields let ByFields<'TDoc>(tableName, howMatched, fields, sqlProps) = Custom.List<'TDoc>( @@ -333,14 +396,14 @@ module Find = sqlProps) /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in /// the document /// /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given fields, ordered by the other given fields [] let byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields sqlProps = @@ -351,14 +414,14 @@ module Find = sqlProps /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in - /// the document + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in the + /// document /// /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given fields, ordered by the other given fields let ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, sqlProps) = Custom.List<'TDoc>( @@ -367,20 +430,20 @@ module Find = fromData<'TDoc>, sqlProps) - /// Retrieve documents matching a JSON containment query (@>) + /// Retrieve documents matching a JSON containment query (@>) /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given containment query [] let byContains<'TDoc> tableName (criteria: obj) sqlProps = Custom.list<'TDoc> (Query.byContains (Query.find tableName)) [ jsonParam "@criteria" criteria ] fromData<'TDoc> sqlProps - /// Retrieve documents matching a JSON containment query (@>) + /// Retrieve documents matching a JSON containment query (@>) /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given containment query let ByContains<'TDoc>(tableName, criteria: obj, sqlProps) = Custom.List<'TDoc>( @@ -390,13 +453,12 @@ module Find = sqlProps) /// - /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the - /// document + /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given containment query, ordered by the given fields [] let byContainsOrdered<'TDoc> tableName (criteria: obj) orderFields sqlProps = @@ -407,13 +469,12 @@ module Find = sqlProps /// - /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the - /// document + /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given containment query, ordered by the given fields let ByContainsOrdered<'TDoc>(tableName, criteria: obj, orderFields, sqlProps) = Custom.List<'TDoc>( @@ -422,20 +483,20 @@ module Find = fromData<'TDoc>, sqlProps) - /// Retrieve documents matching a JSON Path match query (@?) + /// Retrieve documents matching a JSON Path match query (@?) /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given JSON Path expression [] let byJsonPath<'TDoc> tableName jsonPath sqlProps = Custom.list<'TDoc> (Query.byPathMatch (Query.find tableName)) [ "@path", Sql.string jsonPath ] fromData<'TDoc> sqlProps - /// Retrieve documents matching a JSON Path match query (@?) + /// Retrieve documents matching a JSON Path match query (@?) /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given JSON Path expression let ByJsonPath<'TDoc>(tableName, jsonPath, sqlProps) = Custom.List<'TDoc>( @@ -445,12 +506,12 @@ module Find = sqlProps) /// - /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document + /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given JSON Path expression, ordered by the given fields [] let byJsonPathOrdered<'TDoc> tableName jsonPath orderFields sqlProps = @@ -461,12 +522,12 @@ module Find = sqlProps /// - /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document + /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query /// All documents matching the given JSON Path expression, ordered by the given fields let ByJsonPathOrdered<'TDoc>(tableName, jsonPath, orderFields, sqlProps) = Custom.List<'TDoc>( @@ -475,12 +536,12 @@ module Find = fromData<'TDoc>, sqlProps) - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The SqlProps to use to execute the query - /// Some with the first document, or None if not found + /// The SqlProps to use to execute the query + /// Some with the first document, or None if not found [] let firstByFields<'TDoc> tableName howMatched fields sqlProps = Custom.single<'TDoc> @@ -489,12 +550,12 @@ module Find = fromData<'TDoc> sqlProps - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The SqlProps to use to execute the query - /// The first document, or null if not found + /// The SqlProps to use to execute the query + /// The first document, or null if not found let FirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, howMatched, fields, sqlProps) = Custom.Single<'TDoc>( $"{Query.byFields (Query.find tableName) howMatched fields} LIMIT 1", @@ -503,17 +564,15 @@ module Find = sqlProps) /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given /// fields in the document /// /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query - /// - /// Some with the first document ordered by the given fields, or None if not found - /// + /// The SqlProps to use to execute the query + /// Some with the first document ordered by the given fields, or None if not found [] let firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields sqlProps = Custom.single<'TDoc> @@ -523,15 +582,15 @@ module Find = sqlProps /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given /// fields in the document /// /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query - /// The first document ordered by the given fields, or null if not found + /// The SqlProps to use to execute the query + /// The first document ordered by the given fields, or null if not found let FirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( tableName, howMatched, queryFields, orderFields, sqlProps) = Custom.Single<'TDoc>( @@ -540,11 +599,11 @@ module Find = fromData<'TDoc>, sqlProps) - /// Retrieve the first document matching a JSON containment query (@>) + /// Retrieve the first document matching a JSON containment query (@>) /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query - /// The SqlProps to use to execute the query - /// Some with the first document, or None if not found + /// The SqlProps to use to execute the query + /// Some with the first document, or None if not found [] let firstByContains<'TDoc> tableName (criteria: obj) sqlProps = Custom.single<'TDoc> @@ -553,11 +612,11 @@ module Find = fromData<'TDoc> sqlProps - /// Retrieve the first document matching a JSON containment query (@>) + /// Retrieve the first document matching a JSON containment query (@>) /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query - /// The SqlProps to use to execute the query - /// The first document, or null if not found + /// The SqlProps to use to execute the query + /// The first document, or null if not found let FirstByContains<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, criteria: obj, sqlProps) = Custom.Single<'TDoc>( $"{Query.byContains (Query.find tableName)} LIMIT 1", @@ -566,16 +625,14 @@ module Find = sqlProps) /// - /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in - /// the document + /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in the + /// document /// /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query - /// - /// Some with the first document ordered by the given fields, or None if not found - /// + /// The SqlProps to use to execute the query + /// Some with the first document ordered by the given fields, or None if not found [] let firstByContainsOrdered<'TDoc> tableName (criteria: obj) orderFields sqlProps = Custom.single<'TDoc> @@ -585,14 +642,14 @@ module Find = sqlProps /// - /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in - /// the document + /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in the + /// document /// /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query - /// The first document ordered by the given fields, or null if not found + /// The SqlProps to use to execute the query + /// The first document ordered by the given fields, or null if not found let FirstByContainsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( tableName, criteria: obj, orderFields, sqlProps) = Custom.Single<'TDoc>( @@ -601,11 +658,11 @@ module Find = fromData<'TDoc>, sqlProps) - /// Retrieve the first document matching a JSON Path match query (@?) + /// Retrieve the first document matching a JSON Path match query (@?) /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match - /// The SqlProps to use to execute the query - /// Some with the first document, or None if not found + /// The SqlProps to use to execute the query + /// Some with the first document, or None if not found [] let firstByJsonPath<'TDoc> tableName jsonPath sqlProps = Custom.single<'TDoc> @@ -614,11 +671,11 @@ module Find = fromData<'TDoc> sqlProps - /// Retrieve the first document matching a JSON Path match query (@?) + /// Retrieve the first document matching a JSON Path match query (@?) /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match - /// The SqlProps to use to execute the query - /// The first document, or null if not found + /// The SqlProps to use to execute the query + /// The first document, or null if not found let FirstByJsonPath<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, jsonPath, sqlProps) = Custom.Single<'TDoc>( $"{Query.byPathMatch (Query.find tableName)} LIMIT 1", @@ -627,16 +684,14 @@ module Find = sqlProps) /// - /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the + /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the /// document /// /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query - /// - /// Some with the first document ordered by the given fields, or None if not found - /// + /// The SqlProps to use to execute the query + /// Some with the first document ordered by the given fields, or None if not found [] let firstByJsonPathOrdered<'TDoc> tableName jsonPath orderFields sqlProps = Custom.single<'TDoc> @@ -646,14 +701,14 @@ module Find = sqlProps /// - /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the + /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the /// document /// /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match /// Fields by which the results should be ordered - /// The SqlProps to use to execute the query - /// The first document ordered by the given fields, or null if not found + /// The SqlProps to use to execute the query + /// The first document ordered by the given fields, or null if not found let FirstByJsonPathOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( tableName, jsonPath, orderFields, sqlProps) = Custom.Single<'TDoc>( @@ -662,6 +717,406 @@ module Find = fromData<'TDoc>, sqlProps) +/// Commands to retrieve documents as JSON +[] +module Json = + + /// Retrieve all documents in the given table as a JSON array + /// The table from which documents should be retrieved (may include schema) + /// The SqlProps to use to execute the query + /// All documents from the given table as a JSON array + [] + let all tableName sqlProps = + Custom.jsonArray (Query.find tableName) [] jsonFromData sqlProps + + /// Write all documents in the given table to the given StreamWriter + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The SqlProps to use to execute the query + /// All documents from the given table as a JSON array + [] + let writeAll tableName writer sqlProps = + Custom.writeJsonArray (Query.find tableName) [] writer jsonFromData sqlProps + + /// + /// Retrieve all documents in the given table as a JSON array, ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + /// All documents from the given table as a JSON array, ordered by the given fields + [] + let allOrdered tableName orderFields sqlProps = + Custom.jsonArray (Query.find tableName + Query.orderBy orderFields PostgreSQL) [] jsonFromData sqlProps + + /// + /// Write all documents in the given table to the given StreamWriter, ordered by the given fields in the + /// document + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + /// All documents from the given table as a JSON array, ordered by the given fields + [] + let writeAllOrdered tableName writer orderFields sqlProps = + Custom.writeJsonArray + (Query.find tableName + Query.orderBy orderFields PostgreSQL) [] writer jsonFromData sqlProps + + /// Retrieve a JSON document by its ID + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The SqlProps to use to execute the query + /// The JSON document if found, an empty JSON document otherwise + [] + let byId<'TKey> tableName (docId: 'TKey) sqlProps = + Custom.jsonSingle (Query.byId (Query.find tableName) docId) [ idParam docId ] jsonFromData sqlProps + + /// Write a JSON document to the given StreamWriter by its ID + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The ID of the document to retrieve + /// The SqlProps to use to execute the query + [] + let writeById<'TKey> tableName (writer: StreamWriter) (docId: 'TKey) sqlProps = + byId tableName docId sqlProps |> writer.Write + + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqlProps to use to execute the query + /// All JSON documents matching the given fields + [] + let byFields tableName howMatched fields sqlProps = + Custom.jsonArray + (Query.byFields (Query.find tableName) howMatched fields) (addFieldParams fields []) jsonFromData sqlProps + + /// + /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqlProps to use to execute the query + [] + let writeByFields tableName writer howMatched fields sqlProps = + Custom.writeJsonArray + (Query.byFields (Query.find tableName) howMatched fields) + (addFieldParams fields []) + writer + jsonFromData + sqlProps + + /// + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given fields + /// in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + /// All JSON documents matching the given fields, ordered by the other given fields + [] + let byFieldsOrdered tableName howMatched queryFields orderFields sqlProps = + Custom.jsonArray + (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields PostgreSQL) + (addFieldParams queryFields []) + jsonFromData + sqlProps + + /// + /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + [] + let writeByFieldsOrdered tableName writer howMatched queryFields orderFields sqlProps = + Custom.writeJsonArray + (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields PostgreSQL) + (addFieldParams queryFields []) + writer + jsonFromData + sqlProps + + /// Retrieve JSON documents matching a JSON containment query (@>) + /// The table from which documents should be retrieved (may include schema) + /// The document to match with the containment query + /// The SqlProps to use to execute the query + /// All JSON documents matching the given containment query + [] + let byContains tableName (criteria: obj) sqlProps = + Custom.jsonArray + (Query.byContains (Query.find tableName)) [ jsonParam "@criteria" criteria ] jsonFromData sqlProps + + /// + /// Write JSON documents to the given StreamWriter matching a JSON containment query (@>) + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The document to match with the containment query + /// The SqlProps to use to execute the query + [] + let writeByContains tableName writer (criteria: obj) sqlProps = + Custom.writeJsonArray + (Query.byContains (Query.find tableName)) [ jsonParam "@criteria" criteria ] writer jsonFromData sqlProps + + /// + /// Retrieve JSON documents matching a JSON containment query (@>) ordered by the given fields in the + /// document + /// + /// The table from which documents should be retrieved (may include schema) + /// The document to match with the containment query + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + /// All documents matching the given containment query, ordered by the given fields + [] + let byContainsOrdered tableName (criteria: obj) orderFields sqlProps = + Custom.jsonArray + (Query.byContains (Query.find tableName) + Query.orderBy orderFields PostgreSQL) + [ jsonParam "@criteria" criteria ] + jsonFromData + sqlProps + + /// + /// Write JSON documents to the given StreamWriter matching a JSON containment query (@>) ordered + /// by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The document to match with the containment query + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + [] + let writeByContainsOrdered tableName writer (criteria: obj) orderFields sqlProps = + Custom.writeJsonArray + (Query.byContains (Query.find tableName) + Query.orderBy orderFields PostgreSQL) + [ jsonParam "@criteria" criteria ] + writer + jsonFromData + sqlProps + + /// Retrieve JSON documents matching a JSON Path match query (@?) + /// The table from which documents should be retrieved (may include schema) + /// The JSON Path expression to match + /// The SqlProps to use to execute the query + /// All JSON documents matching the given JSON Path expression + [] + let byJsonPath tableName jsonPath sqlProps = + Custom.jsonArray + (Query.byPathMatch (Query.find tableName)) [ "@path", Sql.string jsonPath ] jsonFromData sqlProps + + /// + /// Write JSON documents to the given StreamWriter matching a JSON Path match query (@?) + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The JSON Path expression to match + /// The SqlProps to use to execute the query + [] + let writeByJsonPath tableName writer jsonPath sqlProps = + Custom.writeJsonArray + (Query.byPathMatch (Query.find tableName)) [ "@path", Sql.string jsonPath ] writer jsonFromData sqlProps + + /// + /// Retrieve JSON documents matching a JSON Path match query (@?) ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The JSON Path expression to match + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + /// All JSON documents matching the given JSON Path expression, ordered by the given fields + [] + let byJsonPathOrdered tableName jsonPath orderFields sqlProps = + Custom.jsonArray + (Query.byPathMatch (Query.find tableName) + Query.orderBy orderFields PostgreSQL) + [ "@path", Sql.string jsonPath ] + jsonFromData + sqlProps + + /// + /// Write JSON documents to the given StreamWriter matching a JSON Path match query (@?) ordered by + /// the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The JSON Path expression to match + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + [] + let writeByJsonPathOrdered tableName writer jsonPath orderFields sqlProps = + Custom.writeJsonArray + (Query.byPathMatch (Query.find tableName) + Query.orderBy orderFields PostgreSQL) + [ "@path", Sql.string jsonPath ] + writer + jsonFromData + sqlProps + + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqlProps to use to execute the query + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByFields tableName howMatched fields sqlProps = + Custom.jsonSingle + (Query.byFields (Query.find tableName) howMatched fields) (addFieldParams fields []) jsonFromData sqlProps + + /// + /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// (->> =, etc.) + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqlProps to use to execute the query + [] + let writeFirstByFields tableName (writer: StreamWriter) howMatched fields sqlProps = + firstByFields tableName howMatched fields sqlProps |> writer.Write + + /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByFieldsOrdered tableName howMatched queryFields orderFields sqlProps = + Custom.jsonSingle + $"{Query.byFields (Query.find tableName) howMatched queryFields}{Query.orderBy orderFields PostgreSQL}" + (addFieldParams queryFields []) + jsonFromData + sqlProps + + /// + /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// (->> =, etc.) ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + [] + let writeFirstByFieldsOrdered tableName (writer: StreamWriter) howMatched queryFields orderFields sqlProps = + firstByFieldsOrdered tableName howMatched queryFields orderFields sqlProps |> writer.Write + + /// Retrieve the first JSON document matching a JSON containment query (@>) + /// The table from which a document should be retrieved (may include schema) + /// The document to match with the containment query + /// The SqlProps to use to execute the query + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByContains tableName (criteria: obj) sqlProps = + Custom.jsonSingle + (Query.byContains (Query.find tableName)) [ jsonParam "@criteria" criteria ] jsonFromData sqlProps + + /// + /// Write the first JSON document to the given StreamWriter matching a JSON containment query (@>) + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The document to match with the containment query + /// The SqlProps to use to execute the query + [] + let writeFirstByContains tableName (writer: StreamWriter) (criteria: obj) sqlProps = + firstByContains tableName criteria sqlProps |> writer.Write + + /// + /// Retrieve the first JSON document matching a JSON containment query (@>) ordered by the given fields in + /// the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The document to match with the containment query + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByContainsOrdered tableName (criteria: obj) orderFields sqlProps = + Custom.jsonSingle + $"{Query.byContains (Query.find tableName)}{Query.orderBy orderFields PostgreSQL}" + [ jsonParam "@criteria" criteria ] + jsonFromData + sqlProps + + /// + /// Write the first JSON document to the given StreamWriter matching a JSON containment query (@>) + /// ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The document to match with the containment query + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + [] + let writeFirstByContainsOrdered tableName (writer: StreamWriter) (criteria: obj) orderFields sqlProps = + firstByContainsOrdered tableName criteria orderFields sqlProps |> writer.Write + + /// Retrieve the first JSON document matching a JSON Path match query (@?) + /// The table from which a document should be retrieved (may include schema) + /// The JSON Path expression to match + /// The SqlProps to use to execute the query + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByJsonPath tableName jsonPath sqlProps = + Custom.jsonSingle + (Query.byPathMatch (Query.find tableName)) [ "@path", Sql.string jsonPath ] jsonFromData sqlProps + + /// + /// Write the first JSON document to the given StreamWriter matching a JSON Path match query (@?) + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The JSON Path expression to match + /// The SqlProps to use to execute the query + [] + let writeFirstByJsonPath tableName (writer: StreamWriter) jsonPath sqlProps = + firstByJsonPath tableName jsonPath sqlProps |> writer.Write + + /// + /// Retrieve the first JSON document matching a JSON Path match query (@?) ordered by the given fields in the + /// document + /// + /// The table from which a document should be retrieved (may include schema) + /// The JSON Path expression to match + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByJsonPathOrdered tableName jsonPath orderFields sqlProps = + Custom.jsonSingle + $"{Query.byPathMatch (Query.find tableName)}{Query.orderBy orderFields PostgreSQL}" + [ "@path", Sql.string jsonPath ] + jsonFromData + sqlProps + + /// + /// Write the first JSON document to the given StreamWriter matching a JSON Path match query (@?) + /// ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The JSON Path expression to match + /// Fields by which the results should be ordered + /// The SqlProps to use to execute the query + [] + let writeFirstByJsonPathOrdered tableName (writer: StreamWriter) jsonPath orderFields sqlProps = + firstByJsonPathOrdered tableName jsonPath orderFields sqlProps |> writer.Write + /// Commands to update documents [] module Update = @@ -670,7 +1125,7 @@ module Update = /// The table in which a document should be updated (may include schema) /// The ID of the document to be updated (replaced) /// The new document - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byId tableName (docId: 'TKey) (document: 'TDoc) sqlProps = Custom.nonQuery @@ -682,7 +1137,7 @@ module Update = /// The table in which a document should be updated (may include schema) /// The function to obtain the ID of the document /// The new document - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byFunc tableName (idFunc: 'TDoc -> 'TKey) (document: 'TDoc) sqlProps = byId tableName (idFunc document) document sqlProps @@ -693,7 +1148,7 @@ module Update = /// The table in which a document should be updated (may include schema) /// The function to obtain the ID of the document /// The new document - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query let ByFunc(tableName, idFunc: System.Func<'TDoc, 'TKey>, document: 'TDoc, sqlProps) = byFunc tableName idFunc.Invoke document sqlProps @@ -705,20 +1160,20 @@ module Patch = /// The table in which a document should be patched (may include schema) /// The ID of the document to patch /// The partial document to patch the existing document - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byId tableName (docId: 'TKey) (patch: 'TPatch) sqlProps = Custom.nonQuery (Query.byId (Query.patch tableName) docId) [ idParam docId; jsonParam "@data" patch ] sqlProps /// - /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) + /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) /// /// The table in which documents should be patched (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// The partial document to patch the existing document - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byFields tableName howMatched fields (patch: 'TPatch) sqlProps = Custom.nonQuery @@ -726,11 +1181,11 @@ module Patch = (addFieldParams fields [ jsonParam "@data" patch ]) sqlProps - /// Patch documents using a JSON containment query in the WHERE clause (@>) + /// Patch documents using a JSON containment query in the WHERE clause (@>) /// The table in which documents should be patched (may include schema) /// The document to match the containment query /// The partial document to patch the existing document - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byContains tableName (criteria: 'TContains) (patch: 'TPatch) sqlProps = Custom.nonQuery @@ -738,11 +1193,11 @@ module Patch = [ jsonParam "@data" patch; jsonParam "@criteria" criteria ] sqlProps - /// Patch documents using a JSON Path match query in the WHERE clause (@?) + /// Patch documents using a JSON Path match query in the WHERE clause (@?) /// The table in which documents should be patched (may include schema) /// The JSON Path expression to match /// The partial document to patch the existing document - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byJsonPath tableName jsonPath (patch: 'TPatch) sqlProps = Custom.nonQuery @@ -758,7 +1213,7 @@ module RemoveFields = /// The table in which a document should be modified (may include schema) /// The ID of the document to modify /// One or more field names to remove from the document - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byId tableName (docId: 'TKey) fieldNames sqlProps = Custom.nonQuery @@ -769,7 +1224,7 @@ module RemoveFields = /// Whether to match any or all of the field conditions /// The field conditions to match /// One or more field names to remove from the matching documents - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byFields tableName howMatched fields fieldNames sqlProps = Custom.nonQuery @@ -777,11 +1232,11 @@ module RemoveFields = (addFieldParams fields [ fieldNameParams fieldNames ]) sqlProps - /// Remove fields from documents via a JSON containment query (@>) + /// Remove fields from documents via a JSON containment query (@>) /// The table in which documents should be modified (may include schema) /// The document to match the containment query /// One or more field names to remove from the matching documents - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byContains tableName (criteria: 'TContains) fieldNames sqlProps = Custom.nonQuery @@ -789,11 +1244,11 @@ module RemoveFields = [ jsonParam "@criteria" criteria; fieldNameParams fieldNames ] sqlProps - /// Remove fields from documents via a JSON Path match query (@?) + /// Remove fields from documents via a JSON Path match query (@?) /// The table in which documents should be modified (may include schema) /// The JSON Path expression to match /// One or more field names to remove from the matching documents - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byJsonPath tableName jsonPath fieldNames sqlProps = Custom.nonQuery @@ -808,33 +1263,33 @@ module Delete = /// Delete a document by its ID /// The table in which a document should be deleted (may include schema) /// The ID of the document to delete - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byId tableName (docId: 'TKey) sqlProps = Custom.nonQuery (Query.byId (Query.delete tableName) docId) [ idParam docId ] sqlProps - /// Delete documents by matching a JSON field comparison query (->> =, etc.) + /// Delete documents by matching a JSON field comparison query (->> =, etc.) /// The table in which documents should be deleted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byFields tableName howMatched fields sqlProps = Custom.nonQuery (Query.byFields (Query.delete tableName) howMatched fields) (addFieldParams fields []) sqlProps - /// Delete documents by matching a JSON contains query (@>) + /// Delete documents by matching a JSON contains query (@>) /// The table in which documents should be deleted (may include schema) /// The document to match the containment query - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byContains tableName (criteria: 'TCriteria) sqlProps = Custom.nonQuery (Query.byContains (Query.delete tableName)) [ jsonParam "@criteria" criteria ] sqlProps - /// Delete documents by matching a JSON Path match query (@?) + /// Delete documents by matching a JSON Path match query (@?) /// The table in which documents should be deleted (may include schema) /// The JSON Path expression to match - /// The SqlProps to use to execute the query + /// The SqlProps to use to execute the query [] let byJsonPath tableName jsonPath sqlProps = Custom.nonQuery (Query.byPathMatch (Query.delete tableName)) [ "@path", Sql.string jsonPath ] sqlProps -- 2.47.2 From a3633703424394d120dc65a67328ef4f3c19a7c5 Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Fri, 4 Apr 2025 22:05:09 -0400 Subject: [PATCH 02/22] Add Json throughout Postgres --- src/Postgres/Extensions.fs | 777 ++++++++++++++++++++++++++++++++++--- src/Postgres/Functions.fs | 369 +++++++++++++++++- src/Postgres/Library.fs | 2 - src/Postgres/WithProps.fs | 16 +- 4 files changed, 1106 insertions(+), 58 deletions(-) diff --git a/src/Postgres/Extensions.fs b/src/Postgres/Extensions.fs index 568b51b..e53536c 100644 --- a/src/Postgres/Extensions.fs +++ b/src/Postgres/Extensions.fs @@ -18,6 +18,22 @@ module Extensions = member conn.customList<'TDoc> query parameters (mapFunc: RowReader -> 'TDoc) = Custom.list<'TDoc> query parameters mapFunc (Sql.existingConnection conn) + /// Execute a query that returns a JSON array of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// A JSON array of results for the given query + member conn.customJsonArray query parameters mapFunc = + Custom.jsonArray query parameters mapFunc (Sql.existingConnection conn) + + /// Execute a query, writing its results to the given StreamWriter + /// The query to retrieve the results + /// Parameters to use for the query + /// The StreamWriter to which the results should be written + /// The mapping function to extract the document + member conn.writeCustomJsonArray query parameters writer mapFunc = + Custom.writeJsonArray query parameters writer mapFunc (Sql.existingConnection conn) + /// Execute a query that returns one or no results /// The query to retrieve the results /// Parameters to use for the query @@ -26,6 +42,14 @@ module Extensions = member conn.customSingle<'TDoc> query parameters (mapFunc: RowReader -> 'TDoc) = Custom.single<'TDoc> query parameters mapFunc (Sql.existingConnection conn) + /// Execute a query that returns one or no JSON documents + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The JSON document with the first matching result, or an empty document if not found + member conn.customJsonSingle query parameters mapFunc = + Custom.jsonSingle query parameters mapFunc (Sql.existingConnection conn) + /// Execute a query that returns no results /// The query to retrieve the results /// Parameters to use for the query @@ -269,6 +293,293 @@ module Extensions = member conn.findFirstByJsonPathOrdered<'TDoc> tableName jsonPath orderFields = Find.firstByJsonPathOrdered<'TDoc> tableName jsonPath orderFields (Sql.existingConnection conn) + /// Retrieve all documents in the given table as a JSON array + /// The table from which documents should be retrieved (may include schema) + /// All documents from the given table as a JSON array + member conn.jsonAll tableName = + Json.all tableName (Sql.existingConnection conn) + + /// Write all documents in the given table to the given StreamWriter + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + member conn.writeJsonAll tableName writer = + Json.writeAll tableName writer (Sql.existingConnection conn) + + /// + /// Retrieve all documents in the given table as a JSON array, ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// All documents from the given table as a JSON array, ordered by the given fields + member conn.jsonAllOrdered tableName orderFields = + Json.allOrdered tableName orderFields (Sql.existingConnection conn) + + /// + /// Write all documents in the given table to the given StreamWriter, ordered by the given fields in the + /// document + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Fields by which the results should be ordered + member conn.writeJsonAllOrdered tableName writer orderFields = + Json.writeAllOrdered tableName writer orderFields (Sql.existingConnection conn) + + /// Retrieve a JSON document by its ID + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The JSON document if found, an empty JSON document otherwise + member conn.jsonById<'TKey> tableName (docId: 'TKey) = + Json.byId tableName docId (Sql.existingConnection conn) + + /// Write a JSON document to the given StreamWriter by its ID + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The ID of the document to retrieve + member conn.writeJsonById<'TKey> tableName writer (docId: 'TKey) = + Json.writeById tableName writer docId (Sql.existingConnection conn) + + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// All JSON documents matching the given fields + member conn.jsonByFields tableName howMatched fields = + Json.byFields tableName howMatched fields (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, + /// etc.) + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + member conn.writeJsonByFields tableName writer howMatched fields = + Json.writeByFields tableName writer howMatched fields (Sql.existingConnection conn) + + /// + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// All JSON documents matching the given fields, ordered by the other given fields + member conn.jsonByFieldsOrdered tableName howMatched queryFields orderFields = + Json.byFieldsOrdered tableName howMatched queryFields orderFields (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, + /// etc.) ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + member conn.writeJsonByFieldsOrdered tableName writer howMatched queryFields orderFields = + Json.writeByFieldsOrdered tableName writer howMatched queryFields orderFields (Sql.existingConnection conn) + + /// Retrieve JSON documents matching a JSON containment query (@>) + /// The table from which documents should be retrieved (may include schema) + /// The document to match with the containment query + /// All JSON documents matching the given containment query + member conn.jsonByContains tableName (criteria: obj) = + Json.byContains tableName criteria (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given StreamWriter matching a JSON containment query (@>) + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The document to match with the containment query + member conn.writeJsonByContains tableName writer (criteria: obj) = + Json.writeByContains tableName writer criteria (Sql.existingConnection conn) + + /// + /// Retrieve JSON documents matching a JSON containment query (@>) ordered by the given fields in the + /// document + /// + /// The table from which documents should be retrieved (may include schema) + /// The document to match with the containment query + /// Fields by which the results should be ordered + /// All documents matching the given containment query, ordered by the given fields + member conn.jsonByContainsOrdered tableName (criteria: obj) orderFields = + Json.byContainsOrdered tableName criteria orderFields (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given StreamWriter matching a JSON containment query (@>) + /// ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The document to match with the containment query + /// Fields by which the results should be ordered + member conn.writeJsonByContainsOrdered tableName writer (criteria: obj) orderFields = + Json.writeByContainsOrdered tableName writer criteria orderFields (Sql.existingConnection conn) + + /// Retrieve JSON documents matching a JSON Path match query (@?) + /// The table from which documents should be retrieved (may include schema) + /// The JSON Path expression to match + /// All JSON documents matching the given JSON Path expression + member conn.jsonByJsonPath tableName jsonPath = + Json.byJsonPath tableName jsonPath (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given StreamWriter matching a JSON Path match query (@?) + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The JSON Path expression to match + member conn.writeJsonByJsonPath tableName writer jsonPath = + Json.writeByJsonPath tableName writer jsonPath (Sql.existingConnection conn) + + /// + /// Retrieve JSON documents matching a JSON Path match query (@?) ordered by the given fields in the + /// document + /// + /// The table from which documents should be retrieved (may include schema) + /// The JSON Path expression to match + /// Fields by which the results should be ordered + /// All JSON documents matching the given JSON Path expression, ordered by the given fields + member conn.jsonByJsonPathOrdered tableName jsonPath orderFields = + Json.byJsonPathOrdered tableName jsonPath orderFields (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given StreamWriter matching a JSON Path match query (@?) ordered + /// by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The JSON Path expression to match + /// Fields by which the results should be ordered + member conn.writeJsonByJsonPathOrdered tableName writer jsonPath orderFields = + Json.writeByJsonPathOrdered tableName writer jsonPath orderFields (Sql.existingConnection conn) + + /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The first matching JSON document if found, an empty JSON document otherwise + member conn.jsonFirstByFields tableName howMatched fields = + Json.firstByFields tableName howMatched fields (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// (->> =, etc.) + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + member conn.writeJsonFirstByFields tableName writer howMatched fields = + Json.writeFirstByFields tableName writer howMatched fields (Sql.existingConnection conn) + + /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) ordered by the + /// given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The first matching JSON document if found, an empty JSON document otherwise + member conn.jsonFirstByFieldsOrdered tableName howMatched queryFields orderFields = + Json.firstByFieldsOrdered tableName howMatched queryFields orderFields (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// (->> =, etc.) ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + member conn.writeJsonFirstByFieldsOrdered tableName writer howMatched queryFields orderFields = + Json.writeFirstByFieldsOrdered + tableName writer howMatched queryFields orderFields (Sql.existingConnection conn) + + /// Retrieve the first JSON document matching a JSON containment query (@>) + /// The table from which a document should be retrieved (may include schema) + /// The document to match with the containment query + /// The first matching JSON document if found, an empty JSON document otherwise + member conn.jsonFirstByContains tableName (criteria: obj) = + Json.firstByContains tableName criteria (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given StreamWriter matching a JSON containment query + /// (@>) + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The document to match with the containment query + member conn.writeJsonFirstByContains tableName writer (criteria: obj) = + Json.writeFirstByContains tableName writer criteria (Sql.existingConnection conn) + + /// + /// Retrieve the first JSON document matching a JSON containment query (@>) ordered by the given + /// fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The document to match with the containment query + /// Fields by which the results should be ordered + /// The first matching JSON document if found, an empty JSON document otherwise + member conn.jsonFirstByContainsOrdered tableName (criteria: obj) orderFields = + Json.firstByContainsOrdered tableName criteria orderFields (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given StreamWriter matching a JSON containment query + /// (@>) ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The document to match with the containment query + /// Fields by which the results should be ordered + member conn.writeJsonFirstByContainsOrdered tableName writer (criteria: obj) orderFields = + Json.writeFirstByContainsOrdered tableName writer criteria orderFields (Sql.existingConnection conn) + + /// Retrieve the first JSON document matching a JSON Path match query (@?) + /// The table from which a document should be retrieved (may include schema) + /// The JSON Path expression to match + /// The first matching JSON document if found, an empty JSON document otherwise + member conn.jsonFirstByJsonPath tableName jsonPath = + Json.firstByJsonPath tableName jsonPath (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given StreamWriter matching a JSON Path match query (@?) + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The JSON Path expression to match + member conn.writeJsonFirstByJsonPath tableName writer jsonPath = + Json.writeFirstByJsonPath tableName writer jsonPath (Sql.existingConnection conn) + + /// + /// Retrieve the first JSON document matching a JSON Path match query (@?) ordered by the given fields + /// in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The JSON Path expression to match + /// Fields by which the results should be ordered + /// The first matching JSON document if found, an empty JSON document otherwise + member conn.jsonFirstByJsonPathOrdered tableName jsonPath orderFields = + Json.firstByJsonPathOrdered tableName jsonPath orderFields (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given StreamWriter matching a JSON Path match query (@?) + /// ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The JSON Path expression to match + /// Fields by which the results should be ordered + member conn.writeJsonFirstByJsonPathOrdered tableName writer jsonPath orderFields = + Json.writeFirstByJsonPathOrdered tableName writer jsonPath orderFields (Sql.existingConnection conn) + /// Update (replace) an entire document by its ID /// The table in which a document should be updated (may include schema) /// The ID of the document to be updated (replaced) @@ -381,7 +692,7 @@ open System.Runtime.CompilerServices type NpgsqlConnectionCSharpExtensions = /// Execute a query that returns a list of results - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item @@ -390,8 +701,26 @@ type NpgsqlConnectionCSharpExtensions = static member inline CustomList<'TDoc>(conn, query, parameters, mapFunc: System.Func) = Custom.List<'TDoc>(query, parameters, mapFunc, Sql.existingConnection conn) + /// Execute a query that returns a JSON array of results + /// The NpgsqlConnection on which to run the query + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// A JSON array of results for the given query + static member inline CustomJsonArray(conn, query, parameters, mapFunc) = + Custom.JsonArray(query, parameters, mapFunc, Sql.existingConnection conn) + + /// Execute a query, writing its results to the given StreamWriter + /// The NpgsqlConnection on which to run the query + /// The query to retrieve the results + /// Parameters to use for the query + /// The StreamWriter to which the results should be written + /// The mapping function to extract the document + static member inline WriteCustomJsonArray(conn, query, parameters, writer, mapFunc) = + Custom.WriteJsonArray(query, parameters, writer, mapFunc, Sql.existingConnection conn) + /// Execute a query that returns one or no results - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item @@ -401,8 +730,17 @@ type NpgsqlConnectionCSharpExtensions = conn, query, parameters, mapFunc: System.Func) = Custom.Single<'TDoc>(query, parameters, mapFunc, Sql.existingConnection conn) + /// Execute a query that returns one or no JSON documents + /// The NpgsqlConnection on which to run the query + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The JSON document with the first matching result, or an empty document if not found + static member inline CustomJsonSingle(conn, query, parameters, mapFunc) = + Custom.JsonSingle(query, parameters, mapFunc, Sql.existingConnection conn) + /// Execute a query that returns no results - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The query to retrieve the results /// Parameters to use for the query [] @@ -410,7 +748,7 @@ type NpgsqlConnectionCSharpExtensions = Custom.nonQuery query parameters (Sql.existingConnection conn) /// Execute a query that returns a scalar value - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The query to retrieve the value /// Parameters to use for the query /// The mapping function to obtain the value @@ -421,14 +759,14 @@ type NpgsqlConnectionCSharpExtensions = Custom.Scalar(query, parameters, mapFunc, Sql.existingConnection conn) /// Create a document table - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table whose existence should be ensured (may include schema) [] static member inline EnsureTable(conn, name) = Definition.ensureTable name (Sql.existingConnection conn) /// Create an index on documents in the specified table - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table to be indexed (may include schema) /// The type of document index to create [] @@ -436,7 +774,7 @@ type NpgsqlConnectionCSharpExtensions = Definition.ensureDocumentIndex name idxType (Sql.existingConnection conn) /// Create an index on field(s) within documents in the specified table - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table to be indexed (may include schema) /// The name of the index to create /// One or more fields to be indexed @@ -445,7 +783,7 @@ type NpgsqlConnectionCSharpExtensions = Definition.ensureFieldIndex tableName indexName fields (Sql.existingConnection conn) /// Insert a new document - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table into which the document should be inserted (may include schema) /// The document to be inserted [] @@ -453,7 +791,7 @@ type NpgsqlConnectionCSharpExtensions = insert<'TDoc> tableName document (Sql.existingConnection conn) /// Save a document, inserting it if it does not exist and updating it if it does (AKA "upsert") - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table into which the document should be saved (may include schema) /// The document to be saved [] @@ -461,7 +799,7 @@ type NpgsqlConnectionCSharpExtensions = save<'TDoc> tableName document (Sql.existingConnection conn) /// Count all documents in a table - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which documents should be counted (may include schema) /// The count of the documents in the table [] @@ -469,7 +807,7 @@ type NpgsqlConnectionCSharpExtensions = Count.all tableName (Sql.existingConnection conn) /// Count matching documents using JSON field comparisons (->> =, etc.) - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which documents should be counted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -479,7 +817,7 @@ type NpgsqlConnectionCSharpExtensions = Count.byFields tableName howMatched fields (Sql.existingConnection conn) /// Count matching documents using a JSON containment query (@>) - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which documents should be counted (may include schema) /// The document to match with the containment query /// The count of the documents in the table @@ -488,7 +826,7 @@ type NpgsqlConnectionCSharpExtensions = Count.byContains tableName criteria (Sql.existingConnection conn) /// Count matching documents using a JSON Path match query (@?) - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which documents should be counted (may include schema) /// The JSON Path expression to be matched /// The count of the documents in the table @@ -497,7 +835,7 @@ type NpgsqlConnectionCSharpExtensions = Count.byJsonPath tableName jsonPath (Sql.existingConnection conn) /// Determine if a document exists for the given ID - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which existence should be checked (may include schema) /// The ID of the document whose existence should be checked /// True if a document exists, false if not @@ -506,7 +844,7 @@ type NpgsqlConnectionCSharpExtensions = Exists.byId tableName docId (Sql.existingConnection conn) /// Determine if a document exists using JSON field comparisons (->> =, etc.) - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which existence should be checked (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -516,7 +854,7 @@ type NpgsqlConnectionCSharpExtensions = Exists.byFields tableName howMatched fields (Sql.existingConnection conn) /// Determine if a document exists using a JSON containment query (@>) - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which existence should be checked (may include schema) /// The document to match with the containment query /// True if any matching documents exist, false if not @@ -525,7 +863,7 @@ type NpgsqlConnectionCSharpExtensions = Exists.byContains tableName criteria (Sql.existingConnection conn) /// Determine if a document exists using a JSON Path match query (@?) - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which existence should be checked (may include schema) /// The JSON Path expression to be matched /// True if any matching documents exist, false if not @@ -534,7 +872,7 @@ type NpgsqlConnectionCSharpExtensions = Exists.byJsonPath tableName jsonPath (Sql.existingConnection conn) /// Retrieve all documents in the given table - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// All documents from the given table [] @@ -542,7 +880,7 @@ type NpgsqlConnectionCSharpExtensions = Find.All<'TDoc>(tableName, Sql.existingConnection conn) /// Retrieve all documents in the given table ordered by the given fields in the document - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// Fields by which the results should be ordered /// All documents from the given table, ordered by the given fields @@ -551,7 +889,7 @@ type NpgsqlConnectionCSharpExtensions = Find.AllOrdered<'TDoc>(tableName, orderFields, Sql.existingConnection conn) /// Retrieve a document by its ID - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// The ID of the document to retrieve /// The document if found, null otherwise @@ -560,7 +898,7 @@ type NpgsqlConnectionCSharpExtensions = Find.ById<'TKey, 'TDoc>(tableName, docId, Sql.existingConnection conn) /// Retrieve documents matching JSON field comparisons (->> =, etc.) - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -573,7 +911,7 @@ type NpgsqlConnectionCSharpExtensions = /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in /// the document /// - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -585,7 +923,7 @@ type NpgsqlConnectionCSharpExtensions = tableName, howMatched, queryFields, orderFields, Sql.existingConnection conn) /// Retrieve documents matching a JSON containment query (@>) - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query /// All documents matching the given containment query @@ -597,7 +935,7 @@ type NpgsqlConnectionCSharpExtensions = /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the /// document /// - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query /// Fields by which the results should be ordered @@ -607,7 +945,7 @@ type NpgsqlConnectionCSharpExtensions = Find.ByContainsOrdered<'TDoc>(tableName, criteria, orderFields, Sql.existingConnection conn) /// Retrieve documents matching a JSON Path match query (@?) - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match /// All documents matching the given JSON Path expression @@ -618,7 +956,7 @@ type NpgsqlConnectionCSharpExtensions = /// /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document /// - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match /// Fields by which the results should be ordered @@ -628,7 +966,7 @@ type NpgsqlConnectionCSharpExtensions = Find.ByJsonPathOrdered<'TDoc>(tableName, jsonPath, orderFields, Sql.existingConnection conn) /// Retrieve the first document matching JSON field comparisons (->> =, etc.) - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -642,7 +980,7 @@ type NpgsqlConnectionCSharpExtensions = /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given /// fields in the document /// - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -655,7 +993,7 @@ type NpgsqlConnectionCSharpExtensions = tableName, howMatched, queryFields, orderFields, Sql.existingConnection conn) /// Retrieve the first document matching a JSON containment query (@>) - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query /// The first document, or null if not found @@ -668,7 +1006,7 @@ type NpgsqlConnectionCSharpExtensions = /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in /// the document /// - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query /// Fields by which the results should be ordered @@ -679,7 +1017,7 @@ type NpgsqlConnectionCSharpExtensions = Find.FirstByContainsOrdered<'TDoc>(tableName, criteria, orderFields, Sql.existingConnection conn) /// Retrieve the first document matching a JSON Path match query (@?) - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match /// The first document, or null if not found @@ -691,7 +1029,7 @@ type NpgsqlConnectionCSharpExtensions = /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the /// document /// - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match /// Fields by which the results should be ordered @@ -701,8 +1039,355 @@ type NpgsqlConnectionCSharpExtensions = conn, tableName, jsonPath, orderFields) = Find.FirstByJsonPathOrdered<'TDoc>(tableName, jsonPath, orderFields, Sql.existingConnection conn) + /// Retrieve all documents in the given table as a JSON array + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// All documents from the given table as a JSON array + [] + static member inline JsonAll(conn, tableName) = + Json.all tableName (Sql.existingConnection conn) + + /// Write all documents in the given table to the given StreamWriter + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + [] + static member inline WriteJsonAll(conn, tableName, writer) = + Json.writeAll tableName writer (Sql.existingConnection conn) + + /// + /// Retrieve all documents in the given table as a JSON array, ordered by the given fields in the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// All documents from the given table as a JSON array, ordered by the given fields + [] + static member inline JsonAllOrdered(conn, tableName, orderFields) = + Json.allOrdered tableName orderFields (Sql.existingConnection conn) + + /// + /// Write all documents in the given table to the given StreamWriter, ordered by the given fields in the + /// document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Fields by which the results should be ordered + [] + static member inline WriteJsonAllOrdered(conn, tableName, writer, orderFields) = + Json.writeAllOrdered tableName writer orderFields (Sql.existingConnection conn) + + /// Retrieve a JSON document by its ID + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The JSON document if found, an empty JSON document otherwise + [] + static member inline JsonById<'TKey>(conn, tableName, docId: 'TKey) = + Json.byId tableName docId (Sql.existingConnection conn) + + /// Write a JSON document to the given StreamWriter by its ID + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The ID of the document to retrieve + [] + static member inline WriteJsonById<'TKey>(conn, tableName, writer, docId) = + Json.writeById tableName writer docId (Sql.existingConnection conn) + + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// All JSON documents matching the given fields + [] + static member inline JsonByFields(conn, tableName, howMatched, fields) = + Json.byFields tableName howMatched fields (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, + /// etc.) + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + [] + static member inline WriteJsonByFields(conn, tableName, writer, howMatched, fields) = + Json.writeByFields tableName writer howMatched fields (Sql.existingConnection conn) + + /// + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// All JSON documents matching the given fields, ordered by the other given fields + [] + static member inline JsonByFieldsOrdered(conn, tableName, howMatched, queryFields, orderFields) = + Json.byFieldsOrdered tableName howMatched queryFields orderFields (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, + /// etc.) ordered by the given fields in the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + [] + static member inline WriteJsonByFieldsOrdered(conn, tableName, writer, howMatched, queryFields, orderFields) = + Json.writeByFieldsOrdered tableName writer howMatched queryFields orderFields (Sql.existingConnection conn) + + /// Retrieve JSON documents matching a JSON containment query (@>) + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The document to match with the containment query + /// All JSON documents matching the given containment query + [] + static member inline JsonByContains(conn, tableName, criteria: obj) = + Json.byContains tableName criteria (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given StreamWriter matching a JSON containment query (@>) + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The document to match with the containment query + [] + static member inline WriteJsonByContains(conn, tableName, writer, criteria: obj) = + Json.writeByContains tableName writer criteria (Sql.existingConnection conn) + + /// + /// Retrieve JSON documents matching a JSON containment query (@>) ordered by the given fields in the + /// document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The document to match with the containment query + /// Fields by which the results should be ordered + /// All documents matching the given containment query, ordered by the given fields + [] + static member inline JsonByContainsOrdered(conn, tableName, criteria: obj, orderFields) = + Json.byContainsOrdered tableName criteria orderFields (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given StreamWriter matching a JSON containment query (@>) + /// ordered by the given fields in the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The document to match with the containment query + /// Fields by which the results should be ordered + [] + static member inline WriteJsonByContainsOrdered(conn, tableName, writer, criteria: obj, orderFields) = + Json.writeByContainsOrdered tableName writer criteria orderFields (Sql.existingConnection conn) + + /// Retrieve JSON documents matching a JSON Path match query (@?) + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The JSON Path expression to match + /// All JSON documents matching the given JSON Path expression + [] + static member inline JsonByJsonPath(conn, tableName, jsonPath) = + Json.byJsonPath tableName jsonPath (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given StreamWriter matching a JSON Path match query (@?) + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The JSON Path expression to match + [] + static member inline WriteJsonByJsonPath(conn, tableName, writer, jsonPath) = + Json.writeByJsonPath tableName writer jsonPath (Sql.existingConnection conn) + + /// + /// Retrieve JSON documents matching a JSON Path match query (@?) ordered by the given fields in the + /// document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The JSON Path expression to match + /// Fields by which the results should be ordered + /// All JSON documents matching the given JSON Path expression, ordered by the given fields + [] + static member inline JsonByJsonPathOrdered(conn, tableName, jsonPath, orderFields) = + Json.byJsonPathOrdered tableName jsonPath orderFields (Sql.existingConnection conn) + + /// + /// Write JSON documents to the given StreamWriter matching a JSON Path match query (@?) ordered + /// by the given fields in the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The JSON Path expression to match + /// Fields by which the results should be ordered + [] + static member inline WriteJsonByJsonPathOrdered(conn, tableName, writer, jsonPath, orderFields) = + Json.writeByJsonPathOrdered tableName writer jsonPath orderFields (Sql.existingConnection conn) + + /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) + /// + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The first matching JSON document if found, an empty JSON document otherwise + [] + static member inline JsonFirstByFields(conn, tableName, howMatched, fields) = + Json.firstByFields tableName howMatched fields (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// (->> =, etc.) + /// + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + [] + static member inline WriteJsonFirstByFields(conn, tableName, writer, howMatched, fields) = + Json.writeFirstByFields tableName writer howMatched fields (Sql.existingConnection conn) + + /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) ordered by the + /// given fields in the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The first matching JSON document if found, an empty JSON document otherwise + [] + static member inline JsonFirstByFieldsOrdered(conn, tableName, howMatched, queryFields, orderFields) = + Json.firstByFieldsOrdered tableName howMatched queryFields orderFields (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// (->> =, etc.) ordered by the given fields in the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + [] + static member inline WriteJsonFirstByFieldsOrdered(conn, tableName, writer, howMatched, queryFields, orderFields) = + Json.writeFirstByFieldsOrdered + tableName writer howMatched queryFields orderFields (Sql.existingConnection conn) + + /// Retrieve the first JSON document matching a JSON containment query (@>) + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The document to match with the containment query + /// The first matching JSON document if found, an empty JSON document otherwise + [] + static member inline JsonFirstByContains(conn, tableName, criteria: obj) = + Json.firstByContains tableName criteria (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given StreamWriter matching a JSON containment query + /// (@>) + /// + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The document to match with the containment query + [] + static member inline WriteJsonFirstByContains(conn, tableName, writer, criteria: obj) = + Json.writeFirstByContains tableName writer criteria (Sql.existingConnection conn) + + /// + /// Retrieve the first JSON document matching a JSON containment query (@>) ordered by the given + /// fields in the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The document to match with the containment query + /// Fields by which the results should be ordered + /// The first matching JSON document if found, an empty JSON document otherwise + [] + static member inline JsonFirstByContainsOrdered(conn, tableName, criteria: obj, orderFields) = + Json.firstByContainsOrdered tableName criteria orderFields (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given StreamWriter matching a JSON containment query + /// (@>) ordered by the given fields in the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The document to match with the containment query + /// Fields by which the results should be ordered + [] + static member inline WriteJsonFirstByContainsOrdered(conn, tableName, writer, criteria: obj, orderFields) = + Json.writeFirstByContainsOrdered tableName writer criteria orderFields (Sql.existingConnection conn) + + /// Retrieve the first JSON document matching a JSON Path match query (@?) + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The JSON Path expression to match + /// The first matching JSON document if found, an empty JSON document otherwise + [] + static member inline JsonFirstByJsonPath(conn, tableName, jsonPath) = + Json.firstByJsonPath tableName jsonPath (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given StreamWriter matching a JSON Path match query (@?) + /// + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The JSON Path expression to match + [] + static member inline WriteJsonFirstByJsonPath(conn, tableName, writer, jsonPath) = + Json.writeFirstByJsonPath tableName writer jsonPath (Sql.existingConnection conn) + + /// + /// Retrieve the first JSON document matching a JSON Path match query (@?) ordered by the given fields + /// in the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The JSON Path expression to match + /// Fields by which the results should be ordered + /// The first matching JSON document if found, an empty JSON document otherwise + [] + static member inline JsonFirstByJsonPathOrdered(conn, tableName, jsonPath, orderFields) = + Json.firstByJsonPathOrdered tableName jsonPath orderFields (Sql.existingConnection conn) + + /// + /// Write the first JSON document to the given StreamWriter matching a JSON Path match query (@?) + /// ordered by the given fields in the document + /// + /// The NpgsqlConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The JSON Path expression to match + /// Fields by which the results should be ordered + [] + static member inline WriteJsonFirstByJsonPathOrdered(conn, tableName, writer, jsonPath, orderFields) = + Json.writeFirstByJsonPathOrdered tableName writer jsonPath orderFields (Sql.existingConnection conn) + /// Update (replace) an entire document by its ID - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which a document should be updated (may include schema) /// The ID of the document to be updated (replaced) /// The new document @@ -713,7 +1398,7 @@ type NpgsqlConnectionCSharpExtensions = /// /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the document /// - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which a document should be updated (may include schema) /// The function to obtain the ID of the document /// The new document @@ -722,7 +1407,7 @@ type NpgsqlConnectionCSharpExtensions = Update.ByFunc(tableName, idFunc, document, Sql.existingConnection conn) /// Patch a document by its ID - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which a document should be patched (may include schema) /// The ID of the document to patch /// The partial document to patch the existing document @@ -733,7 +1418,7 @@ type NpgsqlConnectionCSharpExtensions = /// /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) /// - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which documents should be patched (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -743,7 +1428,7 @@ type NpgsqlConnectionCSharpExtensions = Patch.byFields tableName howMatched fields patch (Sql.existingConnection conn) /// Patch documents using a JSON containment query in the WHERE clause (@>) - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which documents should be patched (may include schema) /// The document to match the containment query /// The partial document to patch the existing document @@ -752,7 +1437,7 @@ type NpgsqlConnectionCSharpExtensions = Patch.byContains tableName criteria patch (Sql.existingConnection conn) /// Patch documents using a JSON Path match query in the WHERE clause (@?) - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which documents should be patched (may include schema) /// The JSON Path expression to match /// The partial document to patch the existing document @@ -761,7 +1446,7 @@ type NpgsqlConnectionCSharpExtensions = Patch.byJsonPath tableName jsonPath patch (Sql.existingConnection conn) /// Remove fields from a document by the document's ID - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which a document should be modified (may include schema) /// The ID of the document to modify /// One or more field names to remove from the document @@ -770,7 +1455,7 @@ type NpgsqlConnectionCSharpExtensions = RemoveFields.byId tableName docId fieldNames (Sql.existingConnection conn) /// Remove fields from documents via a comparison on JSON fields in the document - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which documents should be modified (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -780,7 +1465,7 @@ type NpgsqlConnectionCSharpExtensions = RemoveFields.byFields tableName howMatched fields fieldNames (Sql.existingConnection conn) /// Remove fields from documents via a JSON containment query (@>) - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which documents should be modified (may include schema) /// The document to match the containment query /// One or more field names to remove from the matching documents @@ -789,7 +1474,7 @@ type NpgsqlConnectionCSharpExtensions = RemoveFields.byContains tableName criteria fieldNames (Sql.existingConnection conn) /// Remove fields from documents via a JSON Path match query (@?) - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which documents should be modified (may include schema) /// The JSON Path expression to match /// One or more field names to remove from the matching documents @@ -798,7 +1483,7 @@ type NpgsqlConnectionCSharpExtensions = RemoveFields.byJsonPath tableName jsonPath fieldNames (Sql.existingConnection conn) /// Delete a document by its ID - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which a document should be deleted (may include schema) /// The ID of the document to delete [] @@ -806,7 +1491,7 @@ type NpgsqlConnectionCSharpExtensions = Delete.byId tableName docId (Sql.existingConnection conn) /// Delete documents by matching a JSON field comparison query (->> =, etc.) - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which documents should be deleted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -815,7 +1500,7 @@ type NpgsqlConnectionCSharpExtensions = Delete.byFields tableName howMatched fields (Sql.existingConnection conn) /// Delete documents by matching a JSON contains query (@>) - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which documents should be deleted (may include schema) /// The document to match the containment query [] @@ -823,7 +1508,7 @@ type NpgsqlConnectionCSharpExtensions = Delete.byContains tableName criteria (Sql.existingConnection conn) /// Delete documents by matching a JSON Path match query (@?) - /// The NpgsqlConnection on which to run the query + /// The NpgsqlConnection on which to run the query /// The table in which documents should be deleted (may include schema) /// The JSON Path expression to match [] diff --git a/src/Postgres/Functions.fs b/src/Postgres/Functions.fs index fb189e9..910fb90 100644 --- a/src/Postgres/Functions.fs +++ b/src/Postgres/Functions.fs @@ -21,6 +21,40 @@ module Custom = let List<'TDoc>(query, parameters, mapFunc: System.Func) = WithProps.Custom.List<'TDoc>(query, parameters, mapFunc, fromDataSource ()) + /// Execute a query that returns a JSON array of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// A JSON array of results for the given query + [] + let jsonArray query parameters mapFunc = + WithProps.Custom.jsonArray query parameters mapFunc (fromDataSource ()) + + /// Execute a query that returns a JSON array of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// A JSON array of results for the given query + let JsonArray(query, parameters, mapFunc) = + WithProps.Custom.JsonArray(query, parameters, mapFunc, fromDataSource ()) + + /// Execute a query, writing its results to the given StreamWriter + /// The query to retrieve the results + /// Parameters to use for the query + /// The StreamWriter to which the results should be written + /// The mapping function to extract the document + [] + let writeJsonArray query parameters writer mapFunc = + WithProps.Custom.writeJsonArray query parameters writer mapFunc + + /// Execute a query, writing its results to the given StreamWriter + /// The query to retrieve the results + /// Parameters to use for the query + /// The StreamWriter to which the results should be written + /// The mapping function to extract the document + let WriteJsonArray(query, parameters, writer, mapFunc) = + WithProps.Custom.WriteJsonArray(query, parameters, writer, mapFunc, fromDataSource ()) + /// Execute a query that returns one or no results /// The query to retrieve the results /// Parameters to use for the query @@ -39,6 +73,23 @@ module Custom = query, parameters, mapFunc: System.Func) = WithProps.Custom.Single<'TDoc>(query, parameters, mapFunc, fromDataSource ()) + /// Execute a query that returns one or no JSON documents + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The JSON document with the first matching result, or an empty document if not found + [] + let jsonSingle query parameters mapFunc = + WithProps.Custom.jsonSingle query parameters mapFunc (fromDataSource ()) + + /// Execute a query that returns one or no JSON documents + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The JSON document with the first matching result, or an empty document if not found + let JsonSingle(query, parameters, mapFunc) = + WithProps.Custom.JsonSingle(query, parameters, mapFunc, fromDataSource ()) + /// Execute a query that returns no results /// The query to retrieve the results /// Parameters to use for the query @@ -184,7 +235,7 @@ module Exists = WithProps.Exists.byJsonPath tableName jsonPath (fromDataSource ()) -/// Commands to retrieve documents +/// Commands to retrieve documents as domain objects [] module Find = @@ -473,6 +524,322 @@ module Find = WithProps.Find.FirstByJsonPathOrdered<'TDoc>(tableName, jsonPath, orderFields, fromDataSource ()) +/// Commands to retrieve documents as JSON +[] +module Json = + + /// Retrieve all documents in the given table as a JSON array + /// The table from which documents should be retrieved (may include schema) + /// All documents from the given table as a JSON array + [] + let all tableName = + WithProps.Json.all tableName (fromDataSource ()) + + /// Write all documents in the given table to the given StreamWriter + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + [] + let writeAll tableName writer = + WithProps.Json.writeAll tableName writer (fromDataSource ()) + + /// + /// Retrieve all documents in the given table as a JSON array, ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// All documents from the given table as a JSON array, ordered by the given fields + [] + let allOrdered tableName orderFields = + WithProps.Json.allOrdered tableName orderFields (fromDataSource ()) + + /// + /// Write all documents in the given table to the given StreamWriter, ordered by the given fields in the + /// document + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Fields by which the results should be ordered + [] + let writeAllOrdered tableName writer orderFields = + WithProps.Json.writeAllOrdered tableName writer orderFields (fromDataSource ()) + + /// Retrieve a JSON document by its ID + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The JSON document if found, an empty JSON document otherwise + [] + let byId<'TKey> tableName (docId: 'TKey) = + WithProps.Json.byId tableName docId (fromDataSource ()) + + /// Write a JSON document to the given StreamWriter by its ID + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The ID of the document to retrieve + [] + let writeById<'TKey> tableName writer (docId: 'TKey) = + WithProps.Json.writeById tableName writer docId (fromDataSource ()) + + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// All JSON documents matching the given fields + [] + let byFields tableName howMatched fields = + WithProps.Json.byFields tableName howMatched fields (fromDataSource ()) + + /// + /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + [] + let writeByFields tableName writer howMatched fields = + WithProps.Json.writeByFields tableName writer howMatched fields (fromDataSource ()) + + /// + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given fields + /// in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// All JSON documents matching the given fields, ordered by the other given fields + [] + let byFieldsOrdered tableName howMatched queryFields orderFields = + WithProps.Json.byFieldsOrdered tableName howMatched queryFields orderFields (fromDataSource ()) + + /// + /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + [] + let writeByFieldsOrdered tableName writer howMatched queryFields orderFields = + WithProps.Json.writeByFieldsOrdered tableName writer howMatched queryFields orderFields (fromDataSource ()) + + /// Retrieve JSON documents matching a JSON containment query (@>) + /// The table from which documents should be retrieved (may include schema) + /// The document to match with the containment query + /// All JSON documents matching the given containment query + [] + let byContains tableName (criteria: obj) = + WithProps.Json.byContains tableName criteria (fromDataSource ()) + + /// + /// Write JSON documents to the given StreamWriter matching a JSON containment query (@>) + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The document to match with the containment query + [] + let writeByContains tableName writer (criteria: obj) = + WithProps.Json.writeByContains tableName writer criteria (fromDataSource ()) + + /// + /// Retrieve JSON documents matching a JSON containment query (@>) ordered by the given fields in the + /// document + /// + /// The table from which documents should be retrieved (may include schema) + /// The document to match with the containment query + /// Fields by which the results should be ordered + /// All documents matching the given containment query, ordered by the given fields + [] + let byContainsOrdered tableName (criteria: obj) orderFields = + WithProps.Json.byContainsOrdered tableName criteria orderFields (fromDataSource ()) + + /// + /// Write JSON documents to the given StreamWriter matching a JSON containment query (@>) ordered + /// by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The document to match with the containment query + /// Fields by which the results should be ordered + [] + let writeByContainsOrdered tableName writer (criteria: obj) orderFields = + WithProps.Json.writeByContainsOrdered tableName writer criteria orderFields (fromDataSource ()) + + /// Retrieve JSON documents matching a JSON Path match query (@?) + /// The table from which documents should be retrieved (may include schema) + /// The JSON Path expression to match + /// All JSON documents matching the given JSON Path expression + [] + let byJsonPath tableName jsonPath = + WithProps.Json.byJsonPath tableName jsonPath (fromDataSource ()) + + /// + /// Write JSON documents to the given StreamWriter matching a JSON Path match query (@?) + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The JSON Path expression to match + [] + let writeByJsonPath tableName writer jsonPath = + WithProps.Json.writeByJsonPath tableName writer jsonPath (fromDataSource ()) + + /// + /// Retrieve JSON documents matching a JSON Path match query (@?) ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The JSON Path expression to match + /// Fields by which the results should be ordered + /// All JSON documents matching the given JSON Path expression, ordered by the given fields + [] + let byJsonPathOrdered tableName jsonPath orderFields = + WithProps.Json.byJsonPathOrdered tableName jsonPath orderFields (fromDataSource ()) + + /// + /// Write JSON documents to the given StreamWriter matching a JSON Path match query (@?) ordered by + /// the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The JSON Path expression to match + /// Fields by which the results should be ordered + [] + let writeByJsonPathOrdered tableName writer jsonPath orderFields = + WithProps.Json.writeByJsonPathOrdered tableName writer jsonPath orderFields (fromDataSource ()) + + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByFields tableName howMatched fields = + WithProps.Json.firstByFields tableName howMatched fields (fromDataSource ()) + + /// + /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// (->> =, etc.) + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + [] + let writeFirstByFields tableName writer howMatched fields = + WithProps.Json.writeFirstByFields tableName writer howMatched fields (fromDataSource ()) + + /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByFieldsOrdered tableName howMatched queryFields orderFields = + WithProps.Json.firstByFieldsOrdered tableName howMatched queryFields orderFields (fromDataSource ()) + + /// + /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// (->> =, etc.) ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + [] + let writeFirstByFieldsOrdered tableName writer howMatched queryFields orderFields = + WithProps.Json.writeFirstByFieldsOrdered tableName writer howMatched queryFields orderFields (fromDataSource ()) + + /// Retrieve the first JSON document matching a JSON containment query (@>) + /// The table from which a document should be retrieved (may include schema) + /// The document to match with the containment query + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByContains tableName (criteria: obj) = + WithProps.Json.firstByContains tableName criteria (fromDataSource ()) + + /// + /// Write the first JSON document to the given StreamWriter matching a JSON containment query (@>) + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The document to match with the containment query + [] + let writeFirstByContains tableName writer (criteria: obj) = + WithProps.Json.writeFirstByContains tableName writer criteria (fromDataSource ()) + + /// + /// Retrieve the first JSON document matching a JSON containment query (@>) ordered by the given fields in + /// the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The document to match with the containment query + /// Fields by which the results should be ordered + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByContainsOrdered tableName (criteria: obj) orderFields = + WithProps.Json.firstByContainsOrdered tableName criteria orderFields (fromDataSource ()) + + /// + /// Write the first JSON document to the given StreamWriter matching a JSON containment query (@>) + /// ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The document to match with the containment query + /// Fields by which the results should be ordered + [] + let writeFirstByContainsOrdered tableName writer (criteria: obj) orderFields = + WithProps.Json.writeFirstByContainsOrdered tableName writer criteria orderFields (fromDataSource ()) + + /// Retrieve the first JSON document matching a JSON Path match query (@?) + /// The table from which a document should be retrieved (may include schema) + /// The JSON Path expression to match + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByJsonPath tableName jsonPath = + WithProps.Json.firstByJsonPath tableName jsonPath (fromDataSource ()) + + /// + /// Write the first JSON document to the given StreamWriter matching a JSON Path match query (@?) + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The JSON Path expression to match + [] + let writeFirstByJsonPath tableName writer jsonPath = + WithProps.Json.writeFirstByJsonPath tableName writer jsonPath (fromDataSource ()) + + /// + /// Retrieve the first JSON document matching a JSON Path match query (@?) ordered by the given fields in the + /// document + /// + /// The table from which a document should be retrieved (may include schema) + /// The JSON Path expression to match + /// Fields by which the results should be ordered + /// The first matching JSON document if found, an empty JSON document otherwise + [] + let firstByJsonPathOrdered tableName jsonPath orderFields = + WithProps.Json.firstByJsonPathOrdered tableName jsonPath orderFields (fromDataSource ()) + + /// + /// Write the first JSON document to the given StreamWriter matching a JSON Path match query (@?) + /// ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The JSON Path expression to match + /// Fields by which the results should be ordered + [] + let writeFirstByJsonPathOrdered tableName writer jsonPath orderFields = + WithProps.Json.writeFirstByJsonPathOrdered tableName writer jsonPath orderFields (fromDataSource ()) + + /// Commands to update documents [] module Update = diff --git a/src/Postgres/Library.fs b/src/Postgres/Library.fs index 97cb339..4b20b4a 100644 --- a/src/Postgres/Library.fs +++ b/src/Postgres/Library.fs @@ -2,7 +2,6 @@ open System.IO open System.Text -open Npgsql.FSharp /// The type of index to generate for the document [] @@ -362,4 +361,3 @@ module Results = /// The query from which JSON should be extracted let WriteJsonArray(writer, mapFunc: System.Func, sqlProps) = writeJsonArray writer mapFunc.Invoke sqlProps - \ No newline at end of file diff --git a/src/Postgres/WithProps.fs b/src/Postgres/WithProps.fs index fcb7360..249697a 100644 --- a/src/Postgres/WithProps.fs +++ b/src/Postgres/WithProps.fs @@ -37,7 +37,7 @@ module Custom = /// Execute a query that returns a JSON array of results /// The query to retrieve the results /// Parameters to use for the query - /// The mapping function between the document and the domain item + /// The mapping function to extract the document /// The SqlProps to use to execute the query /// A JSON array of results for the given query [] @@ -49,7 +49,7 @@ module Custom = /// Execute a query that returns a JSON array of results /// The query to retrieve the results /// Parameters to use for the query - /// The mapping function between the document and the domain item + /// The mapping function to extract the document /// The SqlProps to use to execute the query /// A JSON array of results for the given query let JsonArray(query, parameters, mapFunc: System.Func, sqlProps) = @@ -59,7 +59,7 @@ module Custom = /// The query to retrieve the results /// Parameters to use for the query /// The StreamWriter to which the results should be written - /// The mapping function between the document and the domain item + /// The mapping function to extract the document /// The SqlProps to use to execute the query [] let writeJsonArray query parameters writer (mapFunc: RowReader -> string) sqlProps = @@ -71,7 +71,7 @@ module Custom = /// The query to retrieve the results /// Parameters to use for the query /// The StreamWriter to which the results should be written - /// The mapping function between the document and the domain item + /// The mapping function to extract the document /// The SqlProps to use to execute the query let WriteJsonArray(query, parameters, writer, mapFunc: System.Func, sqlProps) = writeJsonArray query parameters writer mapFunc.Invoke sqlProps @@ -103,7 +103,7 @@ module Custom = /// Execute a query that returns one or no JSON documents /// The query to retrieve the results /// Parameters to use for the query - /// The mapping function between the document and the domain item + /// The mapping function to extract the document /// The SqlProps to use to execute the query /// The JSON document with the first matching result, or an empty document if not found [] @@ -114,7 +114,7 @@ module Custom = /// Execute a query that returns one or no JSON documents /// The query to retrieve the results /// Parameters to use for the query - /// The mapping function between the document and the domain item + /// The mapping function to extract the document /// The SqlProps to use to execute the query /// The JSON document with the first matching result, or an empty document if not found let JsonSingle(query, parameters, mapFunc: System.Func, sqlProps) = @@ -733,7 +733,6 @@ module Json = /// The table from which documents should be retrieved (may include schema) /// The StreamWriter to which the results should be written /// The SqlProps to use to execute the query - /// All documents from the given table as a JSON array [] let writeAll tableName writer sqlProps = Custom.writeJsonArray (Query.find tableName) [] writer jsonFromData sqlProps @@ -757,7 +756,6 @@ module Json = /// The StreamWriter to which the results should be written /// Fields by which the results should be ordered /// The SqlProps to use to execute the query - /// All documents from the given table as a JSON array, ordered by the given fields [] let writeAllOrdered tableName writer orderFields sqlProps = Custom.writeJsonArray @@ -902,7 +900,7 @@ module Json = writer jsonFromData sqlProps - + /// Retrieve JSON documents matching a JSON Path match query (@?) /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match -- 2.47.2 From 0e489617f7b8b0d10cb51525ebf494b71e1409c6 Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Fri, 4 Apr 2025 23:28:10 -0400 Subject: [PATCH 03/22] Add Custom Json tests --- src/Postgres/Functions.fs | 2 +- src/Postgres/WithProps.fs | 38 ++-- .../BitBadger.Documents.Tests.CSharp.csproj | 1 + src/Tests.CSharp/PostgresCSharpTests.cs | 92 +++++++++- src/Tests/PostgresTests.fs | 165 ++++++++++++++---- 5 files changed, 237 insertions(+), 61 deletions(-) diff --git a/src/Postgres/Functions.fs b/src/Postgres/Functions.fs index 910fb90..7ffca97 100644 --- a/src/Postgres/Functions.fs +++ b/src/Postgres/Functions.fs @@ -45,7 +45,7 @@ module Custom = /// The mapping function to extract the document [] let writeJsonArray query parameters writer mapFunc = - WithProps.Custom.writeJsonArray query parameters writer mapFunc + WithProps.Custom.writeJsonArray query parameters writer mapFunc (fromDataSource ()) /// Execute a query, writing its results to the given StreamWriter /// The query to retrieve the results diff --git a/src/Postgres/WithProps.fs b/src/Postgres/WithProps.fs index 249697a..f0bf7d6 100644 --- a/src/Postgres/WithProps.fs +++ b/src/Postgres/WithProps.fs @@ -109,7 +109,7 @@ module Custom = [] let jsonSingle query parameters mapFunc sqlProps = let results = jsonArray $"%s{query} LIMIT 1" parameters mapFunc sqlProps - if results = "[]" then "{}" else results[1..results.Length - 1] + if results = "[]" then "{}" else results[1..results.Length - 2] /// Execute a query that returns one or no JSON documents /// The query to retrieve the results @@ -545,7 +545,7 @@ module Find = [] let firstByFields<'TDoc> tableName howMatched fields sqlProps = Custom.single<'TDoc> - $"{Query.byFields (Query.find tableName) howMatched fields} LIMIT 1" + (Query.byFields (Query.find tableName) howMatched fields) (addFieldParams fields []) fromData<'TDoc> sqlProps @@ -558,7 +558,7 @@ module Find = /// The first document, or null if not found let FirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, howMatched, fields, sqlProps) = Custom.Single<'TDoc>( - $"{Query.byFields (Query.find tableName) howMatched fields} LIMIT 1", + Query.byFields (Query.find tableName) howMatched fields, addFieldParams fields [], fromData<'TDoc>, sqlProps) @@ -576,7 +576,7 @@ module Find = [] let firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields sqlProps = Custom.single<'TDoc> - $"{Query.byFields (Query.find tableName) howMatched queryFields}{Query.orderBy orderFields PostgreSQL} LIMIT 1" + (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields PostgreSQL) (addFieldParams queryFields []) fromData<'TDoc> sqlProps @@ -594,7 +594,7 @@ module Find = let FirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( tableName, howMatched, queryFields, orderFields, sqlProps) = Custom.Single<'TDoc>( - $"{Query.byFields (Query.find tableName) howMatched queryFields}{Query.orderBy orderFields PostgreSQL} LIMIT 1", + Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields PostgreSQL, addFieldParams queryFields [], fromData<'TDoc>, sqlProps) @@ -607,10 +607,7 @@ module Find = [] let firstByContains<'TDoc> tableName (criteria: obj) sqlProps = Custom.single<'TDoc> - $"{Query.byContains (Query.find tableName)} LIMIT 1" - [ jsonParam "@criteria" criteria ] - fromData<'TDoc> - sqlProps + (Query.byContains (Query.find tableName)) [ jsonParam "@criteria" criteria ] fromData<'TDoc> sqlProps /// Retrieve the first document matching a JSON containment query (@>) /// The table from which a document should be retrieved (may include schema) @@ -619,10 +616,7 @@ module Find = /// The first document, or null if not found let FirstByContains<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, criteria: obj, sqlProps) = Custom.Single<'TDoc>( - $"{Query.byContains (Query.find tableName)} LIMIT 1", - [ jsonParam "@criteria" criteria ], - fromData<'TDoc>, - sqlProps) + Query.byContains (Query.find tableName), [ jsonParam "@criteria" criteria ], fromData<'TDoc>, sqlProps) /// /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in the @@ -636,7 +630,7 @@ module Find = [] let firstByContainsOrdered<'TDoc> tableName (criteria: obj) orderFields sqlProps = Custom.single<'TDoc> - $"{Query.byContains (Query.find tableName)}{Query.orderBy orderFields PostgreSQL} LIMIT 1" + (Query.byContains (Query.find tableName) + Query.orderBy orderFields PostgreSQL) [ jsonParam "@criteria" criteria ] fromData<'TDoc> sqlProps @@ -653,7 +647,7 @@ module Find = let FirstByContainsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( tableName, criteria: obj, orderFields, sqlProps) = Custom.Single<'TDoc>( - $"{Query.byContains (Query.find tableName)}{Query.orderBy orderFields PostgreSQL} LIMIT 1", + Query.byContains (Query.find tableName) + Query.orderBy orderFields PostgreSQL, [ jsonParam "@criteria" criteria ], fromData<'TDoc>, sqlProps) @@ -666,7 +660,7 @@ module Find = [] let firstByJsonPath<'TDoc> tableName jsonPath sqlProps = Custom.single<'TDoc> - $"{Query.byPathMatch (Query.find tableName)} LIMIT 1" + (Query.byPathMatch (Query.find tableName)) [ "@path", Sql.string jsonPath ] fromData<'TDoc> sqlProps @@ -678,7 +672,7 @@ module Find = /// The first document, or null if not found let FirstByJsonPath<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, jsonPath, sqlProps) = Custom.Single<'TDoc>( - $"{Query.byPathMatch (Query.find tableName)} LIMIT 1", + Query.byPathMatch (Query.find tableName), [ "@path", Sql.string jsonPath ], fromData<'TDoc>, sqlProps) @@ -695,7 +689,7 @@ module Find = [] let firstByJsonPathOrdered<'TDoc> tableName jsonPath orderFields sqlProps = Custom.single<'TDoc> - $"{Query.byPathMatch (Query.find tableName)}{Query.orderBy orderFields PostgreSQL} LIMIT 1" + (Query.byPathMatch (Query.find tableName) + Query.orderBy orderFields PostgreSQL) [ "@path", Sql.string jsonPath ] fromData<'TDoc> sqlProps @@ -712,7 +706,7 @@ module Find = let FirstByJsonPathOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( tableName, jsonPath, orderFields, sqlProps) = Custom.Single<'TDoc>( - $"{Query.byPathMatch (Query.find tableName)}{Query.orderBy orderFields PostgreSQL} LIMIT 1", + Query.byPathMatch (Query.find tableName) + Query.orderBy orderFields PostgreSQL, [ "@path", Sql.string jsonPath ], fromData<'TDoc>, sqlProps) @@ -994,7 +988,7 @@ module Json = [] let firstByFieldsOrdered tableName howMatched queryFields orderFields sqlProps = Custom.jsonSingle - $"{Query.byFields (Query.find tableName) howMatched queryFields}{Query.orderBy orderFields PostgreSQL}" + (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields PostgreSQL) (addFieldParams queryFields []) jsonFromData sqlProps @@ -1046,7 +1040,7 @@ module Json = [] let firstByContainsOrdered tableName (criteria: obj) orderFields sqlProps = Custom.jsonSingle - $"{Query.byContains (Query.find tableName)}{Query.orderBy orderFields PostgreSQL}" + (Query.byContains (Query.find tableName) + Query.orderBy orderFields PostgreSQL) [ jsonParam "@criteria" criteria ] jsonFromData sqlProps @@ -1097,7 +1091,7 @@ module Json = [] let firstByJsonPathOrdered tableName jsonPath orderFields sqlProps = Custom.jsonSingle - $"{Query.byPathMatch (Query.find tableName)}{Query.orderBy orderFields PostgreSQL}" + (Query.byPathMatch (Query.find tableName) + Query.orderBy orderFields PostgreSQL) [ "@path", Sql.string jsonPath ] jsonFromData sqlProps diff --git a/src/Tests.CSharp/BitBadger.Documents.Tests.CSharp.csproj b/src/Tests.CSharp/BitBadger.Documents.Tests.CSharp.csproj index b231560..ae872f4 100644 --- a/src/Tests.CSharp/BitBadger.Documents.Tests.CSharp.csproj +++ b/src/Tests.CSharp/BitBadger.Documents.Tests.CSharp.csproj @@ -4,6 +4,7 @@ enable enable latest + 1591 diff --git a/src/Tests.CSharp/PostgresCSharpTests.cs b/src/Tests.CSharp/PostgresCSharpTests.cs index 3a95a2c..5e38dc6 100644 --- a/src/Tests.CSharp/PostgresCSharpTests.cs +++ b/src/Tests.CSharp/PostgresCSharpTests.cs @@ -319,7 +319,7 @@ public static class PostgresCSharpTests "By-JSON Path query not correct"); }) ]); - + /// /// Add the test documents to the database /// @@ -328,6 +328,22 @@ public static class PostgresCSharpTests foreach (var doc in JsonDocument.TestDocuments) await Document.Insert(SqliteDb.TableName, doc); } + /// Set up a stream writer for a test + internal static StreamWriter WriteStream(Stream stream) + { + StreamWriter writer = new(stream); + writer.AutoFlush = true; + return writer; + } + + /// Get the text of the given stream + internal static string StreamText(Stream stream) + { + stream.Position = 0L; + using StreamReader reader = new(stream); + return reader.ReadToEnd(); + } + /// /// Integration tests for the Configuration module of the PostgreSQL library /// @@ -389,6 +405,57 @@ public static class PostgresCSharpTests Expect.isEmpty(docs, "There should have been no documents returned"); }) ]), + TestList("JsonArray", + [ + TestCase("succeeds when data is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + var docs = Custom.JsonArray(Query.Find(PostgresDb.TableName), Parameters.None, Results.JsonFromData); + Expect.stringStarts(docs, "[", "The JSON array should have started with `[`"); + Expect.hasLength(docs.Split("{\"Id\":"), 6, "There should have been 5 documents returned"); + Expect.stringEnds(docs, "]", "The JSON array should have ended with `[`"); + }), + TestCase("succeeds when data is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + var docs = Custom.JsonArray($"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath", + [Tuple.Create("@path", Sql.@string("$.NumValue ? (@ > 100)"))], Results.JsonFromData); + Expect.equal(docs, "[]", "There should have been no documents returned"); + }) + ]), + TestList("WriteJsonArray", + [ + TestCase("succeeds when data is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + Custom.WriteJsonArray(Query.Find(PostgresDb.TableName), Parameters.None, writer, Results.JsonFromData); + + var docs = StreamText(stream); + Expect.stringStarts(docs, "[", "The JSON array should have started with `[`"); + Expect.hasLength(docs.Split("{\"Id\":"), 6, "There should have been 5 documents returned"); + Expect.stringEnds(docs, "]", "The JSON array should have ended with `[`"); + }), + TestCase("succeeds when data is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + Custom.WriteJsonArray($"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath", + [Tuple.Create("@path", Sql.@string("$.NumValue ? (@ > 100)"))], writer, Results.JsonFromData); + + Expect.equal(StreamText(stream), "[]", "There should have been no documents returned"); + }) + ]), TestList("Single", [ TestCase("succeeds when a row is found", async () => @@ -411,6 +478,29 @@ public static class PostgresCSharpTests Expect.isNull(doc, "There should not have been a document returned"); }) ]), + TestList("JsonSingle", + [ + TestCase("succeeds when a row is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + var doc = Custom.JsonSingle($"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id", + [Tuple.Create("@id", Sql.@string("one"))], Results.JsonFromData); + Expect.stringStarts(doc, "{", "The document should have started with an open brace"); + Expect.stringContains(doc, "\"Id\": \"one\"", "An incorrect document was returned"); + Expect.stringEnds(doc, "}", "The document should have ended with a closing brace"); + }), + TestCase("succeeds when a row is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + var doc = Custom.JsonSingle($"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id", + [Tuple.Create("@id", Sql.@string("eighty"))], Results.JsonFromData); + Expect.equal(doc, "{}", "There should not have been a document returned"); + }) + ]), TestList("NonQuery", [ TestCase("succeeds when operating on data", async () => diff --git a/src/Tests/PostgresTests.fs b/src/Tests/PostgresTests.fs index 6043d9c..9bde8d3 100644 --- a/src/Tests/PostgresTests.fs +++ b/src/Tests/PostgresTests.fs @@ -1,5 +1,6 @@ module PostgresTests +open System.IO open Expecto open BitBadger.Documents open BitBadger.Documents.Postgres @@ -68,7 +69,7 @@ let parametersTests = testList "Parameters" [ Expect.equal (idParam "99") ("@id", Sql.string "99") "String ID parameter not constructed correctly" } test "succeeds for non-numeric non-string ID" { - let target = { new obj() with override _.ToString() = "ToString was called" } + let target = { new obj() with override _.ToString() = "ToString was called" } Expect.equal (idParam target) ("@id", Sql.string "ToString was called") @@ -275,6 +276,19 @@ let loadDocs () = backgroundTask { for doc in testDocuments do do! insert PostgresDb.TableName doc } +/// Set up a stream writer for a test +let writeStream (stream: Stream) = + let writer = new StreamWriter(stream) + writer.AutoFlush <- true + writer + +/// Get the text of the given stream +let streamText (stream: Stream) = + stream.Position <- 0L + use reader = new StreamReader(stream) + reader.ReadToEnd() + + /// Integration tests for the Configuration module of the PostgreSQL library let configurationTests = testList "Configuration" [ test "useDataSource disposes existing source" { @@ -317,6 +331,57 @@ let customTests = testList "Custom" [ Expect.isEmpty docs "There should have been no documents returned" } ] + testList "jsonArray" [ + testTask "succeeds when data is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + let docs = Custom.jsonArray (Query.find PostgresDb.TableName) [] jsonFromData + Expect.stringStarts docs "[" "The JSON array should have started with `[`" + Expect.hasLength (docs.Split "{\"Id\":") 6 "There should have been 5 documents returned" + Expect.stringEnds docs "]" "The JSON array should have ended with `[`" + } + testTask "succeeds when data is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + let docs = + Custom.jsonArray + $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath" + [ "@path", Sql.string "$.NumValue ? (@ > 100)" ] + jsonFromData + Expect.equal docs "[]" "There should have been no documents returned" + } + ] + testList "writeJsonArray" [ + testTask "succeeds when data is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + Custom.writeJsonArray (Query.find PostgresDb.TableName) [] writer jsonFromData + + let docs = streamText stream + Expect.stringStarts docs "[" "The JSON array should have started with `[`" + Expect.hasLength (docs.Split "{\"Id\":") 6 "There should have been 5 documents returned" + Expect.stringEnds docs "]" "The JSON array should have ended with `[`" + } + testTask "succeeds when data is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + Custom.writeJsonArray + $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath" + [ "@path", Sql.string "$.NumValue ? (@ > 100)" ] + writer + jsonFromData + + Expect.equal (streamText stream) "[]" "There should have been no documents returned" + } + ] testList "single" [ testTask "succeeds when a row is found" { use db = PostgresDb.BuildDb() @@ -342,6 +407,32 @@ let customTests = testList "Custom" [ Expect.isNone doc "There should not have been a document returned" } ] + testList "jsonSingle" [ + testTask "succeeds when a row is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + let doc = + Custom.jsonSingle + $"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id" + [ "@id", Sql.string "one"] + jsonFromData + Expect.stringStarts doc "{" "The document should have started with an open brace" + Expect.stringContains doc "\"Id\": \"one\"" "An incorrect document was returned" + Expect.stringEnds doc "}" "The document should have ended with a closing brace" + } + testTask "succeeds when a row is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + let doc = + Custom.jsonSingle + $"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id" + [ "@id", Sql.string "eighty" ] + jsonFromData + Expect.equal doc "{}" "There should not have been a document returned" + } + ] testList "nonQuery" [ testTask "succeeds when operating on data" { use db = PostgresDb.BuildDb() @@ -380,7 +471,7 @@ let definitionTests = testList "Definition" [ let keyExists () = Custom.scalar "SELECT EXISTS (SELECT 1 FROM pg_class WHERE relname = 'idx_ensured_key') AS it" [] toExists - + let! exists = tableExists () let! alsoExists = keyExists () Expect.isFalse exists "The table should not exist already" @@ -397,7 +488,7 @@ let definitionTests = testList "Definition" [ let indexExists () = Custom.scalar "SELECT EXISTS (SELECT 1 FROM pg_class WHERE relname = 'idx_ensured_document') AS it" [] toExists - + let! exists = indexExists () Expect.isFalse exists "The index should not exist already" @@ -410,7 +501,7 @@ let definitionTests = testList "Definition" [ use db = PostgresDb.BuildDb() let indexExists () = Custom.scalar "SELECT EXISTS (SELECT 1 FROM pg_class WHERE relname = 'idx_ensured_test') AS it" [] toExists - + let! exists = indexExists () Expect.isFalse exists "The index should not exist already" @@ -451,12 +542,12 @@ let documentTests = testList "Document" [ use db = PostgresDb.BuildDb() let! before = Count.all PostgresDb.TableName Expect.equal before 0 "There should be no documents in the table" - + do! insert PostgresDb.TableName { Key = 0; Text = "one" } do! insert PostgresDb.TableName { Key = 0; Text = "two" } do! insert PostgresDb.TableName { Key = 77; Text = "three" } do! insert PostgresDb.TableName { Key = 0; Text = "four" } - + let! after = Find.allOrdered PostgresDb.TableName [ Field.Named "n:Key" ] Expect.hasLength after 4 "There should have been 4 documents returned" Expect.equal (after |> List.map _.Key) [ 1; 2; 77; 78 ] "The IDs were not generated correctly" @@ -470,12 +561,12 @@ let documentTests = testList "Document" [ use db = PostgresDb.BuildDb() let! before = Count.all PostgresDb.TableName Expect.equal before 0 "There should be no documents in the table" - + do! insert PostgresDb.TableName { emptyDoc with Value = "one" } do! insert PostgresDb.TableName { emptyDoc with Value = "two" } do! insert PostgresDb.TableName { emptyDoc with Id = "abc123"; Value = "three" } do! insert PostgresDb.TableName { emptyDoc with Value = "four" } - + let! after = Find.all PostgresDb.TableName Expect.hasLength after 4 "There should have been 4 documents returned" Expect.hasCountOf after 3u (fun doc -> doc.Id.Length = 32) "Three of the IDs should have been GUIDs" @@ -490,12 +581,12 @@ let documentTests = testList "Document" [ use db = PostgresDb.BuildDb() let! before = Count.all PostgresDb.TableName Expect.equal before 0 "There should be no documents in the table" - + do! insert PostgresDb.TableName { emptyDoc with Value = "one" } do! insert PostgresDb.TableName { emptyDoc with Value = "two" } do! insert PostgresDb.TableName { emptyDoc with Id = "abc123"; Value = "three" } do! insert PostgresDb.TableName { emptyDoc with Value = "four" } - + let! after = Find.all PostgresDb.TableName Expect.hasLength after 4 "There should have been 4 documents returned" Expect.hasCountOf @@ -549,7 +640,7 @@ let countTests = testList "Count" [ testTask "succeeds when items are found" { use db = PostgresDb.BuildDb() do! loadDocs () - + let! theCount = Count.byFields PostgresDb.TableName Any [ Field.Between "NumValue" 15 20; Field.Equal "NumValue" 0 ] Expect.equal theCount 3 "There should have been 3 matching documents" @@ -557,7 +648,7 @@ let countTests = testList "Count" [ testTask "succeeds when items are not found" { use db = PostgresDb.BuildDb() do! loadDocs () - + let! theCount = Count.byFields PostgresDb.TableName All [ Field.Exists "Sub"; Field.Greater "NumValue" 100 ] Expect.equal theCount 0 "There should have been no matching documents" } @@ -672,7 +763,7 @@ let findTests = testList "Find" [ testTask "succeeds when ordering numerically" { use db = PostgresDb.BuildDb() do! loadDocs () - + let! results = Find.allOrdered PostgresDb.TableName [ Field.Named "n:NumValue" ] Expect.hasLength results 5 "There should have been 5 documents returned" Expect.equal @@ -683,7 +774,7 @@ let findTests = testList "Find" [ testTask "succeeds when ordering numerically descending" { use db = PostgresDb.BuildDb() do! loadDocs () - + let! results = Find.allOrdered PostgresDb.TableName [ Field.Named "n:NumValue DESC" ] Expect.hasLength results 5 "There should have been 5 documents returned" Expect.equal @@ -694,7 +785,7 @@ let findTests = testList "Find" [ testTask "succeeds when ordering alphabetically" { use db = PostgresDb.BuildDb() do! loadDocs () - + let! results = Find.allOrdered PostgresDb.TableName [ Field.Named "Id DESC" ] Expect.hasLength results 5 "There should have been 5 documents returned" Expect.equal @@ -750,7 +841,7 @@ let findTests = testList "Find" [ use db = PostgresDb.BuildDb() do! Definition.ensureTable PostgresDb.TableName for doc in ArrayDocument.TestDocuments do do! insert PostgresDb.TableName doc - + let! docs = Find.byFields PostgresDb.TableName All [ Field.InArray "Values" PostgresDb.TableName [ "c" ] ] @@ -760,7 +851,7 @@ let findTests = testList "Find" [ use db = PostgresDb.BuildDb() do! Definition.ensureTable PostgresDb.TableName for doc in ArrayDocument.TestDocuments do do! insert PostgresDb.TableName doc - + let! docs = Find.byFields PostgresDb.TableName All [ Field.InArray "Values" PostgresDb.TableName [ "j" ] ] @@ -1034,7 +1125,7 @@ let updateTests = testList "Update" [ let! before = Count.all PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! Update.byId PostgresDb.TableName "test" { emptyDoc with Id = "x"; Sub = Some { Foo = "blue"; Bar = "red" } } @@ -1045,7 +1136,7 @@ let updateTests = testList "Update" [ use db = PostgresDb.BuildDb() do! loadDocs () - do! Update.byFunc PostgresDb.TableName (_.Id) { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } + do! Update.byFunc PostgresDb.TableName _.Id { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } let! after = Find.byId PostgresDb.TableName "one" Expect.isSome after "There should have been a document returned post-update" Expect.equal @@ -1058,9 +1149,9 @@ let updateTests = testList "Update" [ let! before = Count.all PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test - do! Update.byFunc PostgresDb.TableName (_.Id) { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } + do! Update.byFunc PostgresDb.TableName _.Id { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } } ] ] @@ -1071,7 +1162,7 @@ let patchTests = testList "Patch" [ testTask "succeeds when a document is updated" { use db = PostgresDb.BuildDb() do! loadDocs () - + do! Patch.byId PostgresDb.TableName "one" {| NumValue = 44 |} let! after = Find.byId PostgresDb.TableName "one" Expect.isSome after "There should have been a document returned post-update" @@ -1082,7 +1173,7 @@ let patchTests = testList "Patch" [ let! before = Count.all PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! Patch.byId PostgresDb.TableName "test" {| Foo = "green" |} } @@ -1091,7 +1182,7 @@ let patchTests = testList "Patch" [ testTask "succeeds when a document is updated" { use db = PostgresDb.BuildDb() do! loadDocs () - + do! Patch.byFields PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] {| NumValue = 77 |} let! after = Count.byFields PostgresDb.TableName Any [ Field.Equal "NumValue" 77 ] Expect.equal after 2 "There should have been 2 documents returned" @@ -1101,7 +1192,7 @@ let patchTests = testList "Patch" [ let! before = Count.all PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! Patch.byFields PostgresDb.TableName Any [ Field.Equal "Value" "burgundy" ] {| Foo = "green" |} } @@ -1110,7 +1201,7 @@ let patchTests = testList "Patch" [ testTask "succeeds when a document is updated" { use db = PostgresDb.BuildDb() do! loadDocs () - + do! Patch.byContains PostgresDb.TableName {| Value = "purple" |} {| NumValue = 77 |} let! after = Count.byContains PostgresDb.TableName {| NumValue = 77 |} Expect.equal after 2 "There should have been 2 documents returned" @@ -1120,7 +1211,7 @@ let patchTests = testList "Patch" [ let! before = Count.all PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! Patch.byContains PostgresDb.TableName {| Value = "burgundy" |} {| Foo = "green" |} } @@ -1129,7 +1220,7 @@ let patchTests = testList "Patch" [ testTask "succeeds when a document is updated" { use db = PostgresDb.BuildDb() do! loadDocs () - + do! Patch.byJsonPath PostgresDb.TableName "$.NumValue ? (@ > 10)" {| NumValue = 1000 |} let! after = Count.byJsonPath PostgresDb.TableName "$.NumValue ? (@ > 999)" Expect.equal after 2 "There should have been 2 documents returned" @@ -1139,7 +1230,7 @@ let patchTests = testList "Patch" [ let! before = Count.all PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! Patch.byJsonPath PostgresDb.TableName "$.NumValue ? (@ < 0)" {| Foo = "green" |} } @@ -1172,13 +1263,13 @@ let removeFieldsTests = testList "RemoveFields" [ testTask "succeeds when a field is not removed" { use db = PostgresDb.BuildDb() do! loadDocs () - + // This not raising an exception is the test do! RemoveFields.byId PostgresDb.TableName "two" [ "AFieldThatIsNotThere" ] } testTask "succeeds when no document is matched" { use db = PostgresDb.BuildDb() - + // This not raising an exception is the test do! RemoveFields.byId PostgresDb.TableName "two" [ "Value" ] } @@ -1207,13 +1298,13 @@ let removeFieldsTests = testList "RemoveFields" [ testTask "succeeds when a field is not removed" { use db = PostgresDb.BuildDb() do! loadDocs () - + // This not raising an exception is the test do! RemoveFields.byFields PostgresDb.TableName Any [ Field.Equal "NumValue" 17 ] [ "Nothing" ] } testTask "succeeds when no document is matched" { use db = PostgresDb.BuildDb() - + // This not raising an exception is the test do! RemoveFields.byFields PostgresDb.TableName Any [ Field.NotEqual "Abracadabra" "apple" ] [ "Value" ] } @@ -1242,13 +1333,13 @@ let removeFieldsTests = testList "RemoveFields" [ testTask "succeeds when a field is not removed" { use db = PostgresDb.BuildDb() do! loadDocs () - + // This not raising an exception is the test do! RemoveFields.byContains PostgresDb.TableName {| NumValue = 17 |} [ "Nothing" ] } testTask "succeeds when no document is matched" { use db = PostgresDb.BuildDb() - + // This not raising an exception is the test do! RemoveFields.byContains PostgresDb.TableName {| Abracadabra = "apple" |} [ "Value" ] } @@ -1277,13 +1368,13 @@ let removeFieldsTests = testList "RemoveFields" [ testTask "succeeds when a field is not removed" { use db = PostgresDb.BuildDb() do! loadDocs () - + // This not raising an exception is the test do! RemoveFields.byJsonPath PostgresDb.TableName "$.NumValue ? (@ == 17)" [ "Nothing" ] } testTask "succeeds when no document is matched" { use db = PostgresDb.BuildDb() - + // This not raising an exception is the test do! RemoveFields.byJsonPath PostgresDb.TableName "$.Abracadabra ? (@ == \"apple\")" [ "Value" ] } -- 2.47.2 From 8c1d534d7a2c0c5bc1743a81d8669e04115c576b Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Sat, 5 Apr 2025 16:05:06 -0400 Subject: [PATCH 04/22] Add F# string-returning Json tests --- src/Tests/PostgresTests.fs | 321 +++++++++++++++++++++++++++++++++++++ 1 file changed, 321 insertions(+) diff --git a/src/Tests/PostgresTests.fs b/src/Tests/PostgresTests.fs index 9bde8d3..46d9a2d 100644 --- a/src/Tests/PostgresTests.fs +++ b/src/Tests/PostgresTests.fs @@ -1107,6 +1107,326 @@ let findTests = testList "Find" [ ] ] +/// Verify a JSON array begins with "[" and ends with "]" +let private verifyBeginEnd json = + Expect.stringStarts json "[" "The array should have started with `[`" + Expect.stringEnds json "]" "The array should have ended with `]`" + +/// Verify the presence of a document by its ID +let private verifyDocById docId json = + Expect.stringContains json $"{{\"Id\": \"%s{docId}\"," $"Document `{docId}` not present" + +/// Verify the presence of a document by its ID +let private verifySingleById docId json = + verifyBeginEnd json + Expect.stringContains json $"{{\"Id\": \"%s{docId}\"," $"Document `{docId}` not present" + +/// Verify the presence of any of the given document IDs in the given JSON +let private verifyAnyById (docIds: string list) (json: string) = + match docIds |> List.tryFind (fun it -> json.Contains $"{{\"Id\": \"{it}\"") with + | Some _ -> () + | None -> + let ids = docIds |> String.concat ", " + Expect.isTrue false $"Could not find any of IDs {ids} in {json}" + +/// Verify the JSON for `all` returning data +let private verifyAllData json = + verifyBeginEnd json + for docId in [ "one"; "two"; "three"; "four"; "five" ] do verifyDocById docId json + +/// Verify an empty JSON array +let private verifyEmpty json = + Expect.equal json "[]" "There should be no documents returned" + +/// Verify an empty JSON document +let private verifyNoDoc json = + Expect.equal json "{}" "There should be no document returned" + +/// Verify the JSON for an ordered query +let private verifyExpectedOrder idFirst idSecond idThird idFourth idFifth (json: string) = + let firstIdx = json.IndexOf $"{{\"Id\": \"%s{idFirst}\"," + let secondIdx = json.IndexOf $"{{\"Id\": \"%s{idSecond}\"," + verifyBeginEnd json + Expect.isGreaterThan secondIdx firstIdx $"`{idSecond}` should have been after `{idFirst}`" + match idThird with + | Some id3 -> + let thirdIdx = json.IndexOf $"{{\"Id\": \"%s{id3}\"," + Expect.isGreaterThan thirdIdx secondIdx $"`{id3}` should have been after `{idSecond}`" + match idFourth with + | Some id4 -> + let fourthIdx = json.IndexOf $"{{\"Id\": \"%s{id4}\"," + Expect.isGreaterThan fourthIdx thirdIdx $"`{id4}` should have been after `{id3}`" + match idFifth with + | Some id5 -> + let fifthIdx = json.IndexOf $"{{\"Id\": \"%s{id5}\"," + Expect.isGreaterThan fifthIdx fourthIdx $"`{id5}` should have been after `{id4}`" + | None -> () + | None -> () + | None -> () + Expect.stringEnds json "]" "The array should have ended with `]`" + +/// Integration tests for the Json module of the PostgreSQL library +let jsonTests = testList "Json" [ + testList "all" [ + testTask "succeeds when there is data" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.all PostgresDb.TableName |> verifyAllData + } + testTask "succeeds when there is no data" { + use db = PostgresDb.BuildDb() + Json.all PostgresDb.TableName |> verifyEmpty + } + ] + testList "allOrdered" [ + testTask "succeeds when ordering numerically" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.allOrdered PostgresDb.TableName [ Field.Named "n:NumValue" ] + |> verifyExpectedOrder "one" "three" (Some "two") (Some "four") (Some "five") + } + testTask "succeeds when ordering numerically descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.allOrdered PostgresDb.TableName [ Field.Named "n:NumValue DESC" ] + |> verifyExpectedOrder "five" "four" (Some "two") (Some "three") (Some "one") + } + testTask "succeeds when ordering alphabetically" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.allOrdered PostgresDb.TableName [ Field.Named "Id DESC" ] + |> verifyExpectedOrder "two" "three" (Some "one") (Some "four") (Some "five") + } + ] + testList "byId" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + let json = Json.byId PostgresDb.TableName "two" + Expect.stringStarts json """{"Id": "two",""" "An incorrect document was returned" + Expect.stringEnds json "}" "JSON should have ended with this document" + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.byId PostgresDb.TableName "three hundred eighty-seven" |> verifyNoDoc + } + ] + testList "byFields" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.byFields PostgresDb.TableName All [ Field.In "Value" [ "purple"; "blue" ]; Field.Exists "Sub" ] + |> verifySingleById "four" + } + testTask "succeeds when documents are found using IN with numeric field" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.byFields PostgresDb.TableName All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] |> verifySingleById "three" + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.byFields PostgresDb.TableName All [ Field.Equal "Value" "mauve"; Field.NotEqual "NumValue" 40 ] + |> verifyEmpty + } + testTask "succeeds for InArray when matching documents exist" { + use db = PostgresDb.BuildDb() + do! Definition.ensureTable PostgresDb.TableName + for doc in ArrayDocument.TestDocuments do do! insert PostgresDb.TableName doc + + let json = Json.byFields PostgresDb.TableName All [ Field.InArray "Values" PostgresDb.TableName [ "c" ] ] + verifyBeginEnd json + verifyDocById "first" json + verifyDocById "second" json + } + testTask "succeeds for InArray when no matching documents exist" { + use db = PostgresDb.BuildDb() + do! Definition.ensureTable PostgresDb.TableName + for doc in ArrayDocument.TestDocuments do do! insert PostgresDb.TableName doc + Json.byFields PostgresDb.TableName All [ Field.InArray "Values" PostgresDb.TableName [ "j" ] ] + |> verifyEmpty + } + ] + testList "byFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.byFieldsOrdered PostgresDb.TableName All [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + |> verifyExpectedOrder "five" "four" None None None + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.byFieldsOrdered PostgresDb.TableName All [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + |> verifyExpectedOrder "four" "five" None None None + } + ] + testList "byContains" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + let json = Json.byContains PostgresDb.TableName {| Sub = {| Foo = "green" |} |} + verifyBeginEnd json + verifyDocById "two" json + verifyDocById "four" json + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.byContains PostgresDb.TableName {| Value = "mauve" |} |> verifyEmpty + } + ] + testList "byContainsOrdered" [ + // Id = two, Sub.Bar = blue; Id = four, Sub.Bar = red + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.byContainsOrdered PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar" ] + |> verifyExpectedOrder "two" "four" None None None + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.byContainsOrdered PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar DESC" ] + |> verifyExpectedOrder "four" "two" None None None + } + ] + testList "byJsonPath" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + let json = Json.byJsonPath PostgresDb.TableName "$.NumValue ? (@ < 15)" + verifyBeginEnd json + verifyDocById "one" json + verifyDocById "two" json + verifyDocById "three" json + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.byJsonPath PostgresDb.TableName "$.NumValue ? (@ < 0)" |> verifyEmpty + } + ] + testList "byJsonPathOrdered" [ + // Id = one, NumValue = 0; Id = two, NumValue = 10; Id = three, NumValue = 4 + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.byJsonPathOrdered PostgresDb.TableName "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue" ] + |> verifyExpectedOrder "one" "three" (Some "two") None None + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.byJsonPathOrdered PostgresDb.TableName "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue DESC" ] + |> verifyExpectedOrder "two" "three" (Some "one") None None + } + ] + testList "firstByFields" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.firstByFields PostgresDb.TableName Any [ Field.Equal "Value" "another" ] + |> verifyDocById "two" + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.firstByFields PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] + |> verifyAnyById [ "five"; "four" ] + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.firstByFields PostgresDb.TableName Any [ Field.Equal "Value" "absent" ] |> verifyNoDoc + } + ] + testList "firstByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.firstByFieldsOrdered PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + |> verifyDocById "five" + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.firstByFieldsOrdered + PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + |> verifyDocById "four" + } + ] + testList "firstByContains" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.firstByContains PostgresDb.TableName {| Value = "another" |} |> verifyDocById "two" + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.firstByContains PostgresDb.TableName {| Sub = {| Foo = "green" |} |} |> verifyAnyById [ "two"; "four" ] + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.firstByContains PostgresDb.TableName {| Value = "absent" |} |> verifyNoDoc + } + ] + testList "firstByContainsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.firstByContainsOrdered PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Value" ] + |> verifyDocById "two" + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.firstByContainsOrdered + PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Value DESC" ] + |> verifyDocById "four" + } + ] + testList "firstByJsonPath" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.firstByJsonPath PostgresDb.TableName """$.Value ? (@ == "FIRST!")""" |> verifyDocById "one" + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.firstByJsonPath PostgresDb.TableName """$.Sub.Foo ? (@ == "green")""" + |> verifyAnyById [ "two"; "four" ] + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.firstByJsonPath PostgresDb.TableName """$.Id ? (@ == "nope")""" |> verifyNoDoc + } + ] + testList "firstByJsonPathOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.firstByJsonPathOrdered PostgresDb.TableName """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar" ] + |> verifyDocById "two" + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + Json.firstByJsonPathOrdered + PostgresDb.TableName """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar DESC" ] + |> verifyDocById "four" + } + ] +] + /// Integration tests for the Update module of the PostgreSQL library let updateTests = testList "Update" [ testList "byId" [ @@ -1468,6 +1788,7 @@ let all = testList "Postgres" [ countTests existsTests findTests + jsonTests updateTests patchTests removeFieldsTests -- 2.47.2 From 74961c4ba77c703ebb129d713817c516863ae49a Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Sat, 5 Apr 2025 18:43:25 -0400 Subject: [PATCH 05/22] Add C# string-returning Json tests --- src/Tests.CSharp/PostgresCSharpTests.cs | 418 ++++++++++++++++++++++++ src/Tests/PostgresTests.fs | 1 - 2 files changed, 418 insertions(+), 1 deletion(-) diff --git a/src/Tests.CSharp/PostgresCSharpTests.cs b/src/Tests.CSharp/PostgresCSharpTests.cs index 5e38dc6..8977851 100644 --- a/src/Tests.CSharp/PostgresCSharpTests.cs +++ b/src/Tests.CSharp/PostgresCSharpTests.cs @@ -1288,6 +1288,423 @@ public static class PostgresCSharpTests ]) ]); + /// Verify a JSON array begins with "[" and ends with "]" + private static void VerifyBeginEnd(string json) + { + Expect.stringStarts(json, "[", "The array should have started with `[`"); + Expect.stringEnds(json, "]", "The array should have ended with `]`"); + } + + /// Verify the presence of a document by its ID + private static void VerifyDocById(string json, string docId) + { + Expect.stringContains(json, $"{{\"Id\": \"{docId}\",", $"Document `{docId}` not present"); + } + + /// Verify the presence of a document by its ID + private static void VerifySingleById(string json, string docId) + { + VerifyBeginEnd(json); + Expect.stringContains(json, $"{{\"Id\": \"{docId}\",", $"Document `{docId}` not present"); + } + + /// Verify the presence of any of the given document IDs in the given JSON + private static void VerifyAnyById(string json, IEnumerable docIds) + { + var theIds = docIds.ToList(); + if (theIds.Any(it => json.Contains($"{{\"Id\": \"{it}\""))) return; + var ids = string.Join(", ", theIds); + Expect.isTrue(false, $"Could not find any of IDs {ids} in {json}"); + } + + /// Verify the JSON for `all` returning data + private static void VerifyAllData(string json) + { + VerifyBeginEnd(json); + IEnumerable ids = ["one", "two", "three", "four", "five"]; + foreach (var docId in ids) VerifyDocById(json, docId); + } + + /// Verify an empty JSON array + private static void VerifyEmpty(string json) + { + Expect.equal(json, "[]", "There should be no documents returned"); + } + + /// Verify an empty JSON document + private static void VerifyNoDoc(string json) + { + Expect.equal(json, "{}", "There should be no document returned"); + } + + /// Verify the JSON for an ordered query + private static void VerifyExpectedOrder(string json, string idFirst, string idSecond, string? idThird = null, + string? idFourth = null, string? idFifth = null) + { + var firstIdx = json.IndexOf($"{{\"Id\": \"{idFirst}\",", StringComparison.Ordinal); + var secondIdx = json.IndexOf($"{{\"Id\": \"{idSecond}\",", StringComparison.Ordinal); + VerifyBeginEnd(json); + Expect.isGreaterThan(secondIdx, firstIdx, $"`{idSecond}` should have been after `{idFirst}`"); + if (idThird is null) return; + + var thirdIdx = json.IndexOf($"{{\"Id\": \"{idThird}\",", StringComparison.Ordinal); + Expect.isGreaterThan(thirdIdx, secondIdx, $"`{idThird}` should have been after `{idSecond}`"); + if (idFourth is null) return; + + var fourthIdx = json.IndexOf($"{{\"Id\": \"{idFourth}\",", StringComparison.Ordinal); + Expect.isGreaterThan(fourthIdx, thirdIdx, $"`{idFourth}` should have been after `{idThird}`"); + if (idFifth is null) return; + + var fifthIdx = json.IndexOf($"{{\"Id\": \"{idFifth}\",", StringComparison.Ordinal); + Expect.isGreaterThan(fifthIdx, fourthIdx, $"`{idFifth}` should have been after `{idFourth}`"); + } + + /// + /// Integration tests for the Json module of the PostgreSQL library + /// + private static readonly Test JsonTests = TestList("Json", + [ + TestList("All", + [ + TestCase("succeeds when there is data", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyAllData(Json.All(PostgresDb.TableName)); + }), + TestCase("succeeds when there is no data", async () => + { + await using var db = PostgresDb.BuildDb(); + VerifyEmpty(Json.All(PostgresDb.TableName)); + }) + ]), + TestList("AllOrdered", + [ + TestCase("succeeds when ordering numerically", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyExpectedOrder(Json.AllOrdered(PostgresDb.TableName, [Field.Named("n:NumValue")]), + "one", "three", "two", "four", "five"); + }), + TestCase("succeeds when ordering numerically descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyExpectedOrder(Json.AllOrdered(PostgresDb.TableName, [Field.Named("n:NumValue DESC")]), + "five", "four", "two", "three", "one"); + }), + TestCase("succeeds when ordering alphabetically", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyExpectedOrder(Json.AllOrdered(PostgresDb.TableName, [Field.Named("Id DESC")]), + "two", "three", "one", "four", "five"); + }) + ]), + TestList("ById", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + var json = Json.ById(PostgresDb.TableName, "two"); + Expect.stringStarts(json, """{"Id": "two",""", "An incorrect document was returned"); + Expect.stringEnds(json, "}", "JSON should have ended with this document"); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyNoDoc(Json.ById(PostgresDb.TableName, "three hundred eighty-seven")); + }) + ]), + TestList("ByFields", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifySingleById( + Json.ByFields(PostgresDb.TableName, FieldMatch.All, + [Field.In("Value", ["purple", "blue"]), Field.Exists("Sub")]), + "four"); + }), + TestCase("succeeds when documents are found using IN with numeric field", async() => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifySingleById( + Json.ByFields(PostgresDb.TableName, FieldMatch.All, [Field.In("NumValue", [2, 4, 6, 8])]), + "three"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyEmpty(Json.ByFields(PostgresDb.TableName, FieldMatch.All, + [Field.Equal("Value", "mauve"), Field.NotEqual("NumValue", 40)])); + }), + TestCase("succeeds for InArray when matching documents exist", async () => + { + await using var db = PostgresDb.BuildDb(); + await Definition.EnsureTable(PostgresDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(PostgresDb.TableName, doc); + + var json = Json.ByFields(PostgresDb.TableName, FieldMatch.All, + [Field.InArray("Values", PostgresDb.TableName, ["c"])]); + VerifyBeginEnd(json); + VerifyDocById(json, "first"); + VerifyDocById(json, "second"); + }), + TestCase("succeeds for InArray when no matching documents exist", async () => + { + await using var db = PostgresDb.BuildDb(); + await Definition.EnsureTable(PostgresDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(PostgresDb.TableName, doc); + VerifyEmpty(Json.ByFields(PostgresDb.TableName, FieldMatch.All, + [Field.InArray("Values", PostgresDb.TableName, ["j"])])); + }) + ]), + TestList("ByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyExpectedOrder( + Json.ByFieldsOrdered(PostgresDb.TableName, FieldMatch.All, [Field.Equal("Value", "purple")], + [Field.Named("Id")]), + "five", "four"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyExpectedOrder( + Json.ByFieldsOrdered(PostgresDb.TableName, FieldMatch.All, [Field.Equal("Value", "purple")], + [Field.Named("Id DESC")]), + "four", "five"); + }) + ]), + TestList("ByContains", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + var json = Json.ByContains(PostgresDb.TableName, new { Sub = new { Foo = "green" } }); + VerifyBeginEnd(json); + VerifyDocById(json, "two"); + VerifyDocById(json, "four"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyEmpty(Json.ByContains(PostgresDb.TableName, new { Value = "mauve" })); + }) + ]), + TestList("ByContainsOrdered", + [ + // Id = two, Sub.Bar = blue; Id = four, Sub.Bar = red + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyExpectedOrder( + Json.ByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, + [Field.Named("Sub.Bar")]), + "two", "four"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyExpectedOrder( + Json.ByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, + [Field.Named("Sub.Bar DESC")]), + "four", "two"); + }) + ]), + TestList("ByJsonPath", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + var json = Json.ByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ < 15)"); + VerifyBeginEnd(json); + VerifyDocById(json, "one"); + VerifyDocById(json, "two"); + VerifyDocById(json, "three"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyEmpty(Json.ByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ < 0)")); + }) + ]), + TestList("ByJsonPathOrdered", + [ + // Id = one, NumValue = 0; Id = two, NumValue = 10; Id = three, NumValue = 4 + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyExpectedOrder( + Json.ByJsonPathOrdered(PostgresDb.TableName, "$.NumValue ? (@ < 15)", + [Field.Named("n:NumValue")]), + "one", "three", "two"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyExpectedOrder( + Json.ByJsonPathOrdered(PostgresDb.TableName, "$.NumValue ? (@ < 15)", + [Field.Named("n:NumValue DESC")]), + "two", "three", "one"); + }) + ]), + TestList("FirstByFields", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyDocById( + Json.FirstByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "another")]), + "two"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyAnyById( + Json.FirstByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")]), + ["five", "four"]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyNoDoc(Json.FirstByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "absent")])); + }) + ]), + TestList("FirstByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyDocById( + Json.FirstByFieldsOrdered(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")], + [Field.Named("Id")]), + "five"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyDocById( + Json.FirstByFieldsOrdered(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")], + [Field.Named("Id DESC")]), + "four"); + }) + ]), + TestList("FirstByContains", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyDocById(Json.FirstByContains(PostgresDb.TableName, new { Value = "another" }), "two"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyAnyById(Json.FirstByContains(PostgresDb.TableName, new { Sub = new { Foo = "green" } }), + ["two", "four"]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyNoDoc(Json.FirstByContains(PostgresDb.TableName, new { Value = "absent" })); + }) + ]), + TestList("FirstByContainsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyDocById( + Json.FirstByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, + [Field.Named("Value")]), + "two"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyDocById( + Json.FirstByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, + [Field.Named("Value DESC")]), + "four"); + }) + ]), + TestList("FirstByJsonPath", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyDocById(Json.FirstByJsonPath(PostgresDb.TableName, """$.Value ? (@ == "FIRST!")"""), "one"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyAnyById(Json.FirstByJsonPath(PostgresDb.TableName, """$.Sub.Foo ? (@ == "green")"""), + ["two", "four"]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyNoDoc(Json.FirstByJsonPath(PostgresDb.TableName, """$.Id ? (@ == "nope")""")); + }) + ]), + TestList("FirstByJsonPathOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyDocById( + Json.FirstByJsonPathOrdered(PostgresDb.TableName, """$.Sub.Foo ? (@ == "green")""", + [Field.Named("Sub.Bar")]), + "two"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + VerifyDocById( + Json.FirstByJsonPathOrdered(PostgresDb.TableName, """$.Sub.Foo ? (@ == "green")""", + [Field.Named("Sub.Bar DESC")]), + "four"); + }) + ]) + ]); + /// /// Integration tests for the Update module of the PostgreSQL library /// @@ -1729,6 +2146,7 @@ public static class PostgresCSharpTests CountTests, ExistsTests, FindTests, + JsonTests, UpdateTests, PatchTests, RemoveFieldsTests, diff --git a/src/Tests/PostgresTests.fs b/src/Tests/PostgresTests.fs index 46d9a2d..a5fd012 100644 --- a/src/Tests/PostgresTests.fs +++ b/src/Tests/PostgresTests.fs @@ -1163,7 +1163,6 @@ let private verifyExpectedOrder idFirst idSecond idThird idFourth idFifth (json: | None -> () | None -> () | None -> () - Expect.stringEnds json "]" "The array should have ended with `]`" /// Integration tests for the Json module of the PostgreSQL library let jsonTests = testList "Json" [ -- 2.47.2 From 120a59ff7febda88cb85b31c43b66237c42e0bf9 Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Sat, 5 Apr 2025 19:33:47 -0400 Subject: [PATCH 06/22] Make Json calls and stream writes async --- src/Postgres/Library.fs | 28 ++-- src/Postgres/WithProps.fs | 41 +++-- src/Tests.CSharp/PostgresCSharpTests.cs | 94 ++++++------ src/Tests/PostgresTests.fs | 195 ++++++++++++++---------- 4 files changed, 203 insertions(+), 155 deletions(-) diff --git a/src/Postgres/Library.fs b/src/Postgres/Library.fs index 4b20b4a..62e38ae 100644 --- a/src/Postgres/Library.fs +++ b/src/Postgres/Library.fs @@ -326,13 +326,14 @@ module Results = /// The query from which JSON should be extracted /// A JSON array as a string; no results will produce an empty array ("[]") [] - let toJsonArray (mapFunc: RowReader -> string) sqlProps = + let toJsonArray (mapFunc: RowReader -> string) sqlProps = backgroundTask { let output = StringBuilder("[") - sqlProps - |> Sql.iter (fun it -> - if output.Length > 2 then ignore (output.Append ",") - mapFunc it |> output.Append |> ignore) - output.Append("]").ToString() + do! sqlProps + |> Sql.iterAsync (fun it -> + if output.Length > 2 then ignore (output.Append ",") + mapFunc it |> output.Append |> ignore) + return output.Append("]").ToString() + } /// Create a JSON array of items for the results of a query /// The mapping function to extract JSON from the query's results @@ -346,14 +347,15 @@ module Results = /// The mapping function to extract JSON from the query's results /// The query from which JSON should be extracted [] - let writeJsonArray (writer: StreamWriter) (mapFunc: RowReader -> string) sqlProps = - writer.Write "[" + let writeJsonArray (writer: StreamWriter) (mapFunc: RowReader -> string) sqlProps = backgroundTask { + do! writer.WriteAsync "[" let mutable isFirst = true - sqlProps - |> Sql.iter (fun it -> - if isFirst then isFirst <- false else writer.Write "," - mapFunc it |> writer.Write) - writer.Write "]" + do! sqlProps + |> Sql.iterAsync (fun it -> + if isFirst then isFirst <- false else writer.Write "," + writer.WriteAsync(mapFunc it).ConfigureAwait(false).GetAwaiter().GetResult()) + do! writer.WriteAsync "]" + } /// Write a JSON array of items for the results of a query to the given StreamWriter /// The StreamWriter to which results should be written diff --git a/src/Postgres/WithProps.fs b/src/Postgres/WithProps.fs index f0bf7d6..2acf943 100644 --- a/src/Postgres/WithProps.fs +++ b/src/Postgres/WithProps.fs @@ -107,9 +107,10 @@ module Custom = /// The SqlProps to use to execute the query /// The JSON document with the first matching result, or an empty document if not found [] - let jsonSingle query parameters mapFunc sqlProps = - let results = jsonArray $"%s{query} LIMIT 1" parameters mapFunc sqlProps - if results = "[]" then "{}" else results[1..results.Length - 2] + let jsonSingle query parameters mapFunc sqlProps = backgroundTask { + let! results = jsonArray $"%s{query} LIMIT 1" parameters mapFunc sqlProps + return if results = "[]" then "{}" else results[1..results.Length - 2] + } /// Execute a query that returns one or no JSON documents /// The query to retrieve the results @@ -972,8 +973,10 @@ module Json = /// The field conditions to match /// The SqlProps to use to execute the query [] - let writeFirstByFields tableName (writer: StreamWriter) howMatched fields sqlProps = - firstByFields tableName howMatched fields sqlProps |> writer.Write + let writeFirstByFields tableName (writer: StreamWriter) howMatched fields sqlProps = backgroundTask { + let! json = firstByFields tableName howMatched fields sqlProps + do! writer.WriteAsync json + } /// /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) ordered by the given @@ -1005,7 +1008,10 @@ module Json = /// The SqlProps to use to execute the query [] let writeFirstByFieldsOrdered tableName (writer: StreamWriter) howMatched queryFields orderFields sqlProps = - firstByFieldsOrdered tableName howMatched queryFields orderFields sqlProps |> writer.Write + backgroundTask { + let! json = firstByFieldsOrdered tableName howMatched queryFields orderFields sqlProps + do! writer.WriteAsync json + } /// Retrieve the first JSON document matching a JSON containment query (@>) /// The table from which a document should be retrieved (may include schema) @@ -1025,8 +1031,10 @@ module Json = /// The document to match with the containment query /// The SqlProps to use to execute the query [] - let writeFirstByContains tableName (writer: StreamWriter) (criteria: obj) sqlProps = - firstByContains tableName criteria sqlProps |> writer.Write + let writeFirstByContains tableName (writer: StreamWriter) (criteria: obj) sqlProps = backgroundTask { + let! json = firstByContains tableName criteria sqlProps + do! writer.WriteAsync json + } /// /// Retrieve the first JSON document matching a JSON containment query (@>) ordered by the given fields in @@ -1056,7 +1064,10 @@ module Json = /// The SqlProps to use to execute the query [] let writeFirstByContainsOrdered tableName (writer: StreamWriter) (criteria: obj) orderFields sqlProps = - firstByContainsOrdered tableName criteria orderFields sqlProps |> writer.Write + backgroundTask { + let! json = firstByContainsOrdered tableName criteria orderFields sqlProps + do! writer.WriteAsync json + } /// Retrieve the first JSON document matching a JSON Path match query (@?) /// The table from which a document should be retrieved (may include schema) @@ -1076,8 +1087,10 @@ module Json = /// The JSON Path expression to match /// The SqlProps to use to execute the query [] - let writeFirstByJsonPath tableName (writer: StreamWriter) jsonPath sqlProps = - firstByJsonPath tableName jsonPath sqlProps |> writer.Write + let writeFirstByJsonPath tableName (writer: StreamWriter) jsonPath sqlProps = backgroundTask { + let! json = firstByJsonPath tableName jsonPath sqlProps + do! writer.WriteAsync json + } /// /// Retrieve the first JSON document matching a JSON Path match query (@?) ordered by the given fields in the @@ -1106,8 +1119,10 @@ module Json = /// Fields by which the results should be ordered /// The SqlProps to use to execute the query [] - let writeFirstByJsonPathOrdered tableName (writer: StreamWriter) jsonPath orderFields sqlProps = - firstByJsonPathOrdered tableName jsonPath orderFields sqlProps |> writer.Write + let writeFirstByJsonPathOrdered tableName (writer: StreamWriter) jsonPath orderFields sqlProps = backgroundTask { + let! json = firstByJsonPathOrdered tableName jsonPath orderFields sqlProps + do! writer.WriteAsync json + } /// Commands to update documents [] diff --git a/src/Tests.CSharp/PostgresCSharpTests.cs b/src/Tests.CSharp/PostgresCSharpTests.cs index 8977851..521676b 100644 --- a/src/Tests.CSharp/PostgresCSharpTests.cs +++ b/src/Tests.CSharp/PostgresCSharpTests.cs @@ -412,7 +412,8 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); - var docs = Custom.JsonArray(Query.Find(PostgresDb.TableName), Parameters.None, Results.JsonFromData); + var docs = await Custom.JsonArray(Query.Find(PostgresDb.TableName), Parameters.None, + Results.JsonFromData); Expect.stringStarts(docs, "[", "The JSON array should have started with `[`"); Expect.hasLength(docs.Split("{\"Id\":"), 6, "There should have been 5 documents returned"); Expect.stringEnds(docs, "]", "The JSON array should have ended with `[`"); @@ -422,7 +423,8 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); - var docs = Custom.JsonArray($"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath", + var docs = await Custom.JsonArray( + $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath", [Tuple.Create("@path", Sql.@string("$.NumValue ? (@ > 100)"))], Results.JsonFromData); Expect.equal(docs, "[]", "There should have been no documents returned"); }) @@ -436,7 +438,8 @@ public static class PostgresCSharpTests await using MemoryStream stream = new(); await using var writer = WriteStream(stream); - Custom.WriteJsonArray(Query.Find(PostgresDb.TableName), Parameters.None, writer, Results.JsonFromData); + await Custom.WriteJsonArray(Query.Find(PostgresDb.TableName), Parameters.None, writer, + Results.JsonFromData); var docs = StreamText(stream); Expect.stringStarts(docs, "[", "The JSON array should have started with `[`"); @@ -450,7 +453,7 @@ public static class PostgresCSharpTests await using MemoryStream stream = new(); await using var writer = WriteStream(stream); - Custom.WriteJsonArray($"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath", + await Custom.WriteJsonArray($"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath", [Tuple.Create("@path", Sql.@string("$.NumValue ? (@ > 100)"))], writer, Results.JsonFromData); Expect.equal(StreamText(stream), "[]", "There should have been no documents returned"); @@ -485,7 +488,7 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); - var doc = Custom.JsonSingle($"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id", + var doc = await Custom.JsonSingle($"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id", [Tuple.Create("@id", Sql.@string("one"))], Results.JsonFromData); Expect.stringStarts(doc, "{", "The document should have started with an open brace"); Expect.stringContains(doc, "\"Id\": \"one\"", "An incorrect document was returned"); @@ -496,7 +499,7 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); - var doc = Custom.JsonSingle($"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id", + var doc = await Custom.JsonSingle($"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id", [Tuple.Create("@id", Sql.@string("eighty"))], Results.JsonFromData); Expect.equal(doc, "{}", "There should not have been a document returned"); }) @@ -1370,12 +1373,12 @@ public static class PostgresCSharpTests { await using var db = PostgresDb.BuildDb(); await LoadDocs(); - VerifyAllData(Json.All(PostgresDb.TableName)); + VerifyAllData(await Json.All(PostgresDb.TableName)); }), TestCase("succeeds when there is no data", async () => { await using var db = PostgresDb.BuildDb(); - VerifyEmpty(Json.All(PostgresDb.TableName)); + VerifyEmpty(await Json.All(PostgresDb.TableName)); }) ]), TestList("AllOrdered", @@ -1384,21 +1387,21 @@ public static class PostgresCSharpTests { await using var db = PostgresDb.BuildDb(); await LoadDocs(); - VerifyExpectedOrder(Json.AllOrdered(PostgresDb.TableName, [Field.Named("n:NumValue")]), + VerifyExpectedOrder(await Json.AllOrdered(PostgresDb.TableName, [Field.Named("n:NumValue")]), "one", "three", "two", "four", "five"); }), TestCase("succeeds when ordering numerically descending", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); - VerifyExpectedOrder(Json.AllOrdered(PostgresDb.TableName, [Field.Named("n:NumValue DESC")]), + VerifyExpectedOrder(await Json.AllOrdered(PostgresDb.TableName, [Field.Named("n:NumValue DESC")]), "five", "four", "two", "three", "one"); }), TestCase("succeeds when ordering alphabetically", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); - VerifyExpectedOrder(Json.AllOrdered(PostgresDb.TableName, [Field.Named("Id DESC")]), + VerifyExpectedOrder(await Json.AllOrdered(PostgresDb.TableName, [Field.Named("Id DESC")]), "two", "three", "one", "four", "five"); }) ]), @@ -1409,7 +1412,7 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); - var json = Json.ById(PostgresDb.TableName, "two"); + var json = await Json.ById(PostgresDb.TableName, "two"); Expect.stringStarts(json, """{"Id": "two",""", "An incorrect document was returned"); Expect.stringEnds(json, "}", "JSON should have ended with this document"); }), @@ -1417,7 +1420,7 @@ public static class PostgresCSharpTests { await using var db = PostgresDb.BuildDb(); await LoadDocs(); - VerifyNoDoc(Json.ById(PostgresDb.TableName, "three hundred eighty-seven")); + VerifyNoDoc(await Json.ById(PostgresDb.TableName, "three hundred eighty-seven")); }) ]), TestList("ByFields", @@ -1427,7 +1430,7 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); VerifySingleById( - Json.ByFields(PostgresDb.TableName, FieldMatch.All, + await Json.ByFields(PostgresDb.TableName, FieldMatch.All, [Field.In("Value", ["purple", "blue"]), Field.Exists("Sub")]), "four"); }), @@ -1436,14 +1439,14 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); VerifySingleById( - Json.ByFields(PostgresDb.TableName, FieldMatch.All, [Field.In("NumValue", [2, 4, 6, 8])]), + await Json.ByFields(PostgresDb.TableName, FieldMatch.All, [Field.In("NumValue", [2, 4, 6, 8])]), "three"); }), TestCase("succeeds when documents are not found", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); - VerifyEmpty(Json.ByFields(PostgresDb.TableName, FieldMatch.All, + VerifyEmpty(await Json.ByFields(PostgresDb.TableName, FieldMatch.All, [Field.Equal("Value", "mauve"), Field.NotEqual("NumValue", 40)])); }), TestCase("succeeds for InArray when matching documents exist", async () => @@ -1452,7 +1455,7 @@ public static class PostgresCSharpTests await Definition.EnsureTable(PostgresDb.TableName); foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(PostgresDb.TableName, doc); - var json = Json.ByFields(PostgresDb.TableName, FieldMatch.All, + var json = await Json.ByFields(PostgresDb.TableName, FieldMatch.All, [Field.InArray("Values", PostgresDb.TableName, ["c"])]); VerifyBeginEnd(json); VerifyDocById(json, "first"); @@ -1463,7 +1466,7 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await Definition.EnsureTable(PostgresDb.TableName); foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(PostgresDb.TableName, doc); - VerifyEmpty(Json.ByFields(PostgresDb.TableName, FieldMatch.All, + VerifyEmpty(await Json.ByFields(PostgresDb.TableName, FieldMatch.All, [Field.InArray("Values", PostgresDb.TableName, ["j"])])); }) ]), @@ -1474,7 +1477,7 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); VerifyExpectedOrder( - Json.ByFieldsOrdered(PostgresDb.TableName, FieldMatch.All, [Field.Equal("Value", "purple")], + await Json.ByFieldsOrdered(PostgresDb.TableName, FieldMatch.All, [Field.Equal("Value", "purple")], [Field.Named("Id")]), "five", "four"); }), @@ -1483,7 +1486,7 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); VerifyExpectedOrder( - Json.ByFieldsOrdered(PostgresDb.TableName, FieldMatch.All, [Field.Equal("Value", "purple")], + await Json.ByFieldsOrdered(PostgresDb.TableName, FieldMatch.All, [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]), "four", "five"); }) @@ -1495,7 +1498,7 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); - var json = Json.ByContains(PostgresDb.TableName, new { Sub = new { Foo = "green" } }); + var json = await Json.ByContains(PostgresDb.TableName, new { Sub = new { Foo = "green" } }); VerifyBeginEnd(json); VerifyDocById(json, "two"); VerifyDocById(json, "four"); @@ -1504,7 +1507,7 @@ public static class PostgresCSharpTests { await using var db = PostgresDb.BuildDb(); await LoadDocs(); - VerifyEmpty(Json.ByContains(PostgresDb.TableName, new { Value = "mauve" })); + VerifyEmpty(await Json.ByContains(PostgresDb.TableName, new { Value = "mauve" })); }) ]), TestList("ByContainsOrdered", @@ -1515,7 +1518,7 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); VerifyExpectedOrder( - Json.ByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, + await Json.ByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, [Field.Named("Sub.Bar")]), "two", "four"); }), @@ -1524,7 +1527,7 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); VerifyExpectedOrder( - Json.ByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, + await Json.ByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, [Field.Named("Sub.Bar DESC")]), "four", "two"); }) @@ -1536,7 +1539,7 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); - var json = Json.ByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ < 15)"); + var json = await Json.ByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ < 15)"); VerifyBeginEnd(json); VerifyDocById(json, "one"); VerifyDocById(json, "two"); @@ -1546,7 +1549,7 @@ public static class PostgresCSharpTests { await using var db = PostgresDb.BuildDb(); await LoadDocs(); - VerifyEmpty(Json.ByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ < 0)")); + VerifyEmpty(await Json.ByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ < 0)")); }) ]), TestList("ByJsonPathOrdered", @@ -1557,7 +1560,7 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); VerifyExpectedOrder( - Json.ByJsonPathOrdered(PostgresDb.TableName, "$.NumValue ? (@ < 15)", + await Json.ByJsonPathOrdered(PostgresDb.TableName, "$.NumValue ? (@ < 15)", [Field.Named("n:NumValue")]), "one", "three", "two"); }), @@ -1566,7 +1569,7 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); VerifyExpectedOrder( - Json.ByJsonPathOrdered(PostgresDb.TableName, "$.NumValue ? (@ < 15)", + await Json.ByJsonPathOrdered(PostgresDb.TableName, "$.NumValue ? (@ < 15)", [Field.Named("n:NumValue DESC")]), "two", "three", "one"); }) @@ -1578,7 +1581,7 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); VerifyDocById( - Json.FirstByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "another")]), + await Json.FirstByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "another")]), "two"); }), TestCase("succeeds when multiple documents are found", async () => @@ -1586,14 +1589,15 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); VerifyAnyById( - Json.FirstByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")]), + await Json.FirstByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")]), ["five", "four"]); }), TestCase("succeeds when a document is not found", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); - VerifyNoDoc(Json.FirstByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "absent")])); + VerifyNoDoc(await Json.FirstByFields(PostgresDb.TableName, FieldMatch.Any, + [Field.Equal("Value", "absent")])); }) ]), TestList("FirstByFieldsOrdered", @@ -1603,8 +1607,8 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); VerifyDocById( - Json.FirstByFieldsOrdered(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")], - [Field.Named("Id")]), + await Json.FirstByFieldsOrdered(PostgresDb.TableName, FieldMatch.Any, + [Field.Equal("Value", "purple")], [Field.Named("Id")]), "five"); }), TestCase("succeeds when sorting descending", async () => @@ -1612,8 +1616,8 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); VerifyDocById( - Json.FirstByFieldsOrdered(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")], - [Field.Named("Id DESC")]), + await Json.FirstByFieldsOrdered(PostgresDb.TableName, FieldMatch.Any, + [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]), "four"); }) ]), @@ -1623,20 +1627,20 @@ public static class PostgresCSharpTests { await using var db = PostgresDb.BuildDb(); await LoadDocs(); - VerifyDocById(Json.FirstByContains(PostgresDb.TableName, new { Value = "another" }), "two"); + VerifyDocById(await Json.FirstByContains(PostgresDb.TableName, new { Value = "another" }), "two"); }), TestCase("succeeds when multiple documents are found", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); - VerifyAnyById(Json.FirstByContains(PostgresDb.TableName, new { Sub = new { Foo = "green" } }), + VerifyAnyById(await Json.FirstByContains(PostgresDb.TableName, new { Sub = new { Foo = "green" } }), ["two", "four"]); }), TestCase("succeeds when a document is not found", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); - VerifyNoDoc(Json.FirstByContains(PostgresDb.TableName, new { Value = "absent" })); + VerifyNoDoc(await Json.FirstByContains(PostgresDb.TableName, new { Value = "absent" })); }) ]), TestList("FirstByContainsOrdered", @@ -1646,7 +1650,7 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); VerifyDocById( - Json.FirstByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, + await Json.FirstByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, [Field.Named("Value")]), "two"); }), @@ -1655,7 +1659,7 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); VerifyDocById( - Json.FirstByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, + await Json.FirstByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, [Field.Named("Value DESC")]), "four"); }) @@ -1666,20 +1670,20 @@ public static class PostgresCSharpTests { await using var db = PostgresDb.BuildDb(); await LoadDocs(); - VerifyDocById(Json.FirstByJsonPath(PostgresDb.TableName, """$.Value ? (@ == "FIRST!")"""), "one"); + VerifyDocById(await Json.FirstByJsonPath(PostgresDb.TableName, """$.Value ? (@ == "FIRST!")"""), "one"); }), TestCase("succeeds when multiple documents are found", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); - VerifyAnyById(Json.FirstByJsonPath(PostgresDb.TableName, """$.Sub.Foo ? (@ == "green")"""), + VerifyAnyById(await Json.FirstByJsonPath(PostgresDb.TableName, """$.Sub.Foo ? (@ == "green")"""), ["two", "four"]); }), TestCase("succeeds when a document is not found", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); - VerifyNoDoc(Json.FirstByJsonPath(PostgresDb.TableName, """$.Id ? (@ == "nope")""")); + VerifyNoDoc(await Json.FirstByJsonPath(PostgresDb.TableName, """$.Id ? (@ == "nope")""")); }) ]), TestList("FirstByJsonPathOrdered", @@ -1689,7 +1693,7 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); VerifyDocById( - Json.FirstByJsonPathOrdered(PostgresDb.TableName, """$.Sub.Foo ? (@ == "green")""", + await Json.FirstByJsonPathOrdered(PostgresDb.TableName, """$.Sub.Foo ? (@ == "green")""", [Field.Named("Sub.Bar")]), "two"); }), @@ -1698,7 +1702,7 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); VerifyDocById( - Json.FirstByJsonPathOrdered(PostgresDb.TableName, """$.Sub.Foo ? (@ == "green")""", + await Json.FirstByJsonPathOrdered(PostgresDb.TableName, """$.Sub.Foo ? (@ == "green")""", [Field.Named("Sub.Bar DESC")]), "four"); }) diff --git a/src/Tests/PostgresTests.fs b/src/Tests/PostgresTests.fs index a5fd012..8cd730b 100644 --- a/src/Tests/PostgresTests.fs +++ b/src/Tests/PostgresTests.fs @@ -336,16 +336,16 @@ let customTests = testList "Custom" [ use db = PostgresDb.BuildDb() do! loadDocs () - let docs = Custom.jsonArray (Query.find PostgresDb.TableName) [] jsonFromData + let! docs = Custom.jsonArray (Query.find PostgresDb.TableName) [] jsonFromData Expect.stringStarts docs "[" "The JSON array should have started with `[`" - Expect.hasLength (docs.Split "{\"Id\":") 6 "There should have been 5 documents returned" + Expect.hasLength ((string docs).Split "{\"Id\":") 6 "There should have been 5 documents returned" Expect.stringEnds docs "]" "The JSON array should have ended with `[`" } testTask "succeeds when data is not found" { use db = PostgresDb.BuildDb() do! loadDocs () - let docs = + let! docs = Custom.jsonArray $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath" [ "@path", Sql.string "$.NumValue ? (@ > 100)" ] @@ -360,7 +360,7 @@ let customTests = testList "Custom" [ use stream = new MemoryStream() use writer = writeStream stream - Custom.writeJsonArray (Query.find PostgresDb.TableName) [] writer jsonFromData + do! Custom.writeJsonArray (Query.find PostgresDb.TableName) [] writer jsonFromData let docs = streamText stream Expect.stringStarts docs "[" "The JSON array should have started with `[`" @@ -373,11 +373,11 @@ let customTests = testList "Custom" [ use stream = new MemoryStream() use writer = writeStream stream - Custom.writeJsonArray - $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath" - [ "@path", Sql.string "$.NumValue ? (@ > 100)" ] - writer - jsonFromData + do! Custom.writeJsonArray + $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath" + [ "@path", Sql.string "$.NumValue ? (@ > 100)" ] + writer + jsonFromData Expect.equal (streamText stream) "[]" "There should have been no documents returned" } @@ -412,7 +412,7 @@ let customTests = testList "Custom" [ use db = PostgresDb.BuildDb() do! loadDocs () - let doc = + let! doc = Custom.jsonSingle $"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id" [ "@id", Sql.string "one"] @@ -425,7 +425,7 @@ let customTests = testList "Custom" [ use db = PostgresDb.BuildDb() do! loadDocs () - let doc = + let! doc = Custom.jsonSingle $"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id" [ "@id", Sql.string "eighty" ] @@ -1113,16 +1113,16 @@ let private verifyBeginEnd json = Expect.stringEnds json "]" "The array should have ended with `]`" /// Verify the presence of a document by its ID -let private verifyDocById docId json = +let private verifyDocById json docId = Expect.stringContains json $"{{\"Id\": \"%s{docId}\"," $"Document `{docId}` not present" /// Verify the presence of a document by its ID -let private verifySingleById docId json = +let private verifySingleById json docId = verifyBeginEnd json Expect.stringContains json $"{{\"Id\": \"%s{docId}\"," $"Document `{docId}` not present" /// Verify the presence of any of the given document IDs in the given JSON -let private verifyAnyById (docIds: string list) (json: string) = +let private verifyAnyById (json: string) (docIds: string list) = match docIds |> List.tryFind (fun it -> json.Contains $"{{\"Id\": \"{it}\"") with | Some _ -> () | None -> @@ -1132,7 +1132,7 @@ let private verifyAnyById (docIds: string list) (json: string) = /// Verify the JSON for `all` returning data let private verifyAllData json = verifyBeginEnd json - for docId in [ "one"; "two"; "three"; "four"; "five" ] do verifyDocById docId json + [ "one"; "two"; "three"; "four"; "five" ] |> List.iter (verifyDocById json) /// Verify an empty JSON array let private verifyEmpty json = @@ -1143,7 +1143,7 @@ let private verifyNoDoc json = Expect.equal json "{}" "There should be no document returned" /// Verify the JSON for an ordered query -let private verifyExpectedOrder idFirst idSecond idThird idFourth idFifth (json: string) = +let private verifyExpectedOrder (json: string) idFirst idSecond idThird idFourth idFifth = let firstIdx = json.IndexOf $"{{\"Id\": \"%s{idFirst}\"," let secondIdx = json.IndexOf $"{{\"Id\": \"%s{idSecond}\"," verifyBeginEnd json @@ -1170,31 +1170,33 @@ let jsonTests = testList "Json" [ testTask "succeeds when there is data" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.all PostgresDb.TableName |> verifyAllData + let! json = Json.all PostgresDb.TableName + verifyAllData json } testTask "succeeds when there is no data" { use db = PostgresDb.BuildDb() - Json.all PostgresDb.TableName |> verifyEmpty + let! json = Json.all PostgresDb.TableName + verifyEmpty json } ] testList "allOrdered" [ testTask "succeeds when ordering numerically" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.allOrdered PostgresDb.TableName [ Field.Named "n:NumValue" ] - |> verifyExpectedOrder "one" "three" (Some "two") (Some "four") (Some "five") + let! json = Json.allOrdered PostgresDb.TableName [ Field.Named "n:NumValue" ] + verifyExpectedOrder json "one" "three" (Some "two") (Some "four") (Some "five") } testTask "succeeds when ordering numerically descending" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.allOrdered PostgresDb.TableName [ Field.Named "n:NumValue DESC" ] - |> verifyExpectedOrder "five" "four" (Some "two") (Some "three") (Some "one") + let! json = Json.allOrdered PostgresDb.TableName [ Field.Named "n:NumValue DESC" ] + verifyExpectedOrder json "five" "four" (Some "two") (Some "three") (Some "one") } testTask "succeeds when ordering alphabetically" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.allOrdered PostgresDb.TableName [ Field.Named "Id DESC" ] - |> verifyExpectedOrder "two" "three" (Some "one") (Some "four") (Some "five") + let! json = Json.allOrdered PostgresDb.TableName [ Field.Named "Id DESC" ] + verifyExpectedOrder json "two" "three" (Some "one") (Some "four") (Some "five") } ] testList "byId" [ @@ -1202,64 +1204,70 @@ let jsonTests = testList "Json" [ use db = PostgresDb.BuildDb() do! loadDocs () - let json = Json.byId PostgresDb.TableName "two" + let! json = Json.byId PostgresDb.TableName "two" Expect.stringStarts json """{"Id": "two",""" "An incorrect document was returned" Expect.stringEnds json "}" "JSON should have ended with this document" } testTask "succeeds when a document is not found" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.byId PostgresDb.TableName "three hundred eighty-seven" |> verifyNoDoc + let! json = Json.byId PostgresDb.TableName "three hundred eighty-seven" + verifyNoDoc json } ] testList "byFields" [ testTask "succeeds when documents are found" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.byFields PostgresDb.TableName All [ Field.In "Value" [ "purple"; "blue" ]; Field.Exists "Sub" ] - |> verifySingleById "four" + let! json = + Json.byFields PostgresDb.TableName All [ Field.In "Value" [ "purple"; "blue" ]; Field.Exists "Sub" ] + verifySingleById json "four" } testTask "succeeds when documents are found using IN with numeric field" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.byFields PostgresDb.TableName All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] |> verifySingleById "three" + let! json = Json.byFields PostgresDb.TableName All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] + verifySingleById json "three" } testTask "succeeds when documents are not found" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.byFields PostgresDb.TableName All [ Field.Equal "Value" "mauve"; Field.NotEqual "NumValue" 40 ] - |> verifyEmpty + let! json = + Json.byFields PostgresDb.TableName All [ Field.Equal "Value" "mauve"; Field.NotEqual "NumValue" 40 ] + verifyEmpty json } testTask "succeeds for InArray when matching documents exist" { use db = PostgresDb.BuildDb() do! Definition.ensureTable PostgresDb.TableName for doc in ArrayDocument.TestDocuments do do! insert PostgresDb.TableName doc - let json = Json.byFields PostgresDb.TableName All [ Field.InArray "Values" PostgresDb.TableName [ "c" ] ] + let! json = Json.byFields PostgresDb.TableName All [ Field.InArray "Values" PostgresDb.TableName [ "c" ] ] verifyBeginEnd json - verifyDocById "first" json - verifyDocById "second" json + verifyDocById json "first" + verifyDocById json "second" } testTask "succeeds for InArray when no matching documents exist" { use db = PostgresDb.BuildDb() do! Definition.ensureTable PostgresDb.TableName for doc in ArrayDocument.TestDocuments do do! insert PostgresDb.TableName doc - Json.byFields PostgresDb.TableName All [ Field.InArray "Values" PostgresDb.TableName [ "j" ] ] - |> verifyEmpty + let! json = Json.byFields PostgresDb.TableName All [ Field.InArray "Values" PostgresDb.TableName [ "j" ] ] + verifyEmpty json } ] testList "byFieldsOrdered" [ testTask "succeeds when sorting ascending" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.byFieldsOrdered PostgresDb.TableName All [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] - |> verifyExpectedOrder "five" "four" None None None + let! json = + Json.byFieldsOrdered PostgresDb.TableName All [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + verifyExpectedOrder json "five" "four" None None None } testTask "succeeds when sorting descending" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.byFieldsOrdered PostgresDb.TableName All [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] - |> verifyExpectedOrder "four" "five" None None None + let! json = + Json.byFieldsOrdered PostgresDb.TableName All [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + verifyExpectedOrder json "four" "five" None None None } ] testList "byContains" [ @@ -1267,15 +1275,16 @@ let jsonTests = testList "Json" [ use db = PostgresDb.BuildDb() do! loadDocs () - let json = Json.byContains PostgresDb.TableName {| Sub = {| Foo = "green" |} |} + let! json = Json.byContains PostgresDb.TableName {| Sub = {| Foo = "green" |} |} verifyBeginEnd json - verifyDocById "two" json - verifyDocById "four" json + verifyDocById json "two" + verifyDocById json "four" } testTask "succeeds when documents are not found" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.byContains PostgresDb.TableName {| Value = "mauve" |} |> verifyEmpty + let! json = Json.byContains PostgresDb.TableName {| Value = "mauve" |} + verifyEmpty json } ] testList "byContainsOrdered" [ @@ -1283,14 +1292,17 @@ let jsonTests = testList "Json" [ testTask "succeeds when sorting ascending" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.byContainsOrdered PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar" ] - |> verifyExpectedOrder "two" "four" None None None + let! json = + Json.byContainsOrdered PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar" ] + verifyExpectedOrder json "two" "four" None None None } testTask "succeeds when sorting descending" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.byContainsOrdered PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar DESC" ] - |> verifyExpectedOrder "four" "two" None None None + let! json = + Json.byContainsOrdered + PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar DESC" ] + verifyExpectedOrder json "four" "two" None None None } ] testList "byJsonPath" [ @@ -1298,16 +1310,17 @@ let jsonTests = testList "Json" [ use db = PostgresDb.BuildDb() do! loadDocs () - let json = Json.byJsonPath PostgresDb.TableName "$.NumValue ? (@ < 15)" + let! json = Json.byJsonPath PostgresDb.TableName "$.NumValue ? (@ < 15)" verifyBeginEnd json - verifyDocById "one" json - verifyDocById "two" json - verifyDocById "three" json + verifyDocById json "one" + verifyDocById json "two" + verifyDocById json "three" } testTask "succeeds when documents are not found" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.byJsonPath PostgresDb.TableName "$.NumValue ? (@ < 0)" |> verifyEmpty + let! json = Json.byJsonPath PostgresDb.TableName "$.NumValue ? (@ < 0)" + verifyEmpty json } ] testList "byJsonPathOrdered" [ @@ -1315,113 +1328,127 @@ let jsonTests = testList "Json" [ testTask "succeeds when sorting ascending" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.byJsonPathOrdered PostgresDb.TableName "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue" ] - |> verifyExpectedOrder "one" "three" (Some "two") None None + let! json = Json.byJsonPathOrdered PostgresDb.TableName "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue" ] + verifyExpectedOrder json "one" "three" (Some "two") None None } testTask "succeeds when sorting descending" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.byJsonPathOrdered PostgresDb.TableName "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue DESC" ] - |> verifyExpectedOrder "two" "three" (Some "one") None None + let! json = + Json.byJsonPathOrdered PostgresDb.TableName "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue DESC" ] + verifyExpectedOrder json "two" "three" (Some "one") None None } ] testList "firstByFields" [ testTask "succeeds when a document is found" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.firstByFields PostgresDb.TableName Any [ Field.Equal "Value" "another" ] - |> verifyDocById "two" + let! json = Json.firstByFields PostgresDb.TableName Any [ Field.Equal "Value" "another" ] + verifyDocById json "two" } testTask "succeeds when multiple documents are found" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.firstByFields PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] - |> verifyAnyById [ "five"; "four" ] + let! json = Json.firstByFields PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] + verifyAnyById json [ "five"; "four" ] } testTask "succeeds when a document is not found" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.firstByFields PostgresDb.TableName Any [ Field.Equal "Value" "absent" ] |> verifyNoDoc + let! json = Json.firstByFields PostgresDb.TableName Any [ Field.Equal "Value" "absent" ] + verifyNoDoc json } ] testList "firstByFieldsOrdered" [ testTask "succeeds when sorting ascending" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.firstByFieldsOrdered PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] - |> verifyDocById "five" + let! json = + Json.firstByFieldsOrdered PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + verifyDocById json "five" } testTask "succeeds when sorting descending" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.firstByFieldsOrdered - PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] - |> verifyDocById "four" + let! json = + Json.firstByFieldsOrdered + PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + verifyDocById json "four" } ] testList "firstByContains" [ testTask "succeeds when a document is found" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.firstByContains PostgresDb.TableName {| Value = "another" |} |> verifyDocById "two" + let! json = Json.firstByContains PostgresDb.TableName {| Value = "another" |} + verifyDocById json "two" } testTask "succeeds when multiple documents are found" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.firstByContains PostgresDb.TableName {| Sub = {| Foo = "green" |} |} |> verifyAnyById [ "two"; "four" ] + let! json = Json.firstByContains PostgresDb.TableName {| Sub = {| Foo = "green" |} |} + verifyAnyById json [ "two"; "four" ] } testTask "succeeds when a document is not found" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.firstByContains PostgresDb.TableName {| Value = "absent" |} |> verifyNoDoc + let! json = Json.firstByContains PostgresDb.TableName {| Value = "absent" |} + verifyNoDoc json } ] testList "firstByContainsOrdered" [ testTask "succeeds when sorting ascending" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.firstByContainsOrdered PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Value" ] - |> verifyDocById "two" + let! json = + Json.firstByContainsOrdered PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Value" ] + verifyDocById json "two" } testTask "succeeds when sorting descending" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.firstByContainsOrdered - PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Value DESC" ] - |> verifyDocById "four" + let! json = + Json.firstByContainsOrdered + PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Value DESC" ] + verifyDocById json "four" } ] testList "firstByJsonPath" [ testTask "succeeds when a document is found" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.firstByJsonPath PostgresDb.TableName """$.Value ? (@ == "FIRST!")""" |> verifyDocById "one" + let! json = Json.firstByJsonPath PostgresDb.TableName """$.Value ? (@ == "FIRST!")""" + verifyDocById json "one" } testTask "succeeds when multiple documents are found" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.firstByJsonPath PostgresDb.TableName """$.Sub.Foo ? (@ == "green")""" - |> verifyAnyById [ "two"; "four" ] + let! json = Json.firstByJsonPath PostgresDb.TableName """$.Sub.Foo ? (@ == "green")""" + verifyAnyById json [ "two"; "four" ] } testTask "succeeds when a document is not found" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.firstByJsonPath PostgresDb.TableName """$.Id ? (@ == "nope")""" |> verifyNoDoc + let! json = Json.firstByJsonPath PostgresDb.TableName """$.Id ? (@ == "nope")""" + verifyNoDoc json } ] testList "firstByJsonPathOrdered" [ testTask "succeeds when sorting ascending" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.firstByJsonPathOrdered PostgresDb.TableName """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar" ] - |> verifyDocById "two" + let! json = + Json.firstByJsonPathOrdered + PostgresDb.TableName """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar" ] + verifyDocById json "two" } testTask "succeeds when sorting descending" { use db = PostgresDb.BuildDb() do! loadDocs () - Json.firstByJsonPathOrdered - PostgresDb.TableName """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar DESC" ] - |> verifyDocById "four" + let! json = + Json.firstByJsonPathOrdered + PostgresDb.TableName """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar DESC" ] + verifyDocById json "four" } ] ] -- 2.47.2 From 5e5dbd3b80b20464cc021c13e76453d71faeb4f7 Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Sat, 5 Apr 2025 21:55:40 -0400 Subject: [PATCH 07/22] Add Json write Postgres tests --- src/Postgres/WithProps.fs | 6 +- src/Tests.CSharp/PostgresCSharpTests.cs | 417 ++++++++++++++++++++++++ src/Tests/PostgresTests.fs | 393 ++++++++++++++++++++++ 3 files changed, 814 insertions(+), 2 deletions(-) diff --git a/src/Postgres/WithProps.fs b/src/Postgres/WithProps.fs index 2acf943..f0f7c39 100644 --- a/src/Postgres/WithProps.fs +++ b/src/Postgres/WithProps.fs @@ -771,8 +771,10 @@ module Json = /// The ID of the document to retrieve /// The SqlProps to use to execute the query [] - let writeById<'TKey> tableName (writer: StreamWriter) (docId: 'TKey) sqlProps = - byId tableName docId sqlProps |> writer.Write + let writeById<'TKey> tableName (writer: StreamWriter) (docId: 'TKey) sqlProps = backgroundTask { + let! json = byId tableName docId sqlProps + do! writer.WriteAsync json + } /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) /// The table from which documents should be retrieved (may include schema) diff --git a/src/Tests.CSharp/PostgresCSharpTests.cs b/src/Tests.CSharp/PostgresCSharpTests.cs index 521676b..efc7b90 100644 --- a/src/Tests.CSharp/PostgresCSharpTests.cs +++ b/src/Tests.CSharp/PostgresCSharpTests.cs @@ -1706,6 +1706,423 @@ public static class PostgresCSharpTests [Field.Named("Sub.Bar DESC")]), "four"); }) + ]), + TestList("WriteAll", + [ + TestCase("succeeds when there is data", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteAll(PostgresDb.TableName, writer); + VerifyAllData(StreamText(stream)); + }), + TestCase("succeeds when there is no data", async () => + { + await using var db = PostgresDb.BuildDb(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteAll(PostgresDb.TableName, writer); + VerifyEmpty(StreamText(stream)); + }) + ]), + TestList("WriteAllOrdered", + [ + TestCase("succeeds when ordering numerically", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteAllOrdered(PostgresDb.TableName, writer, [Field.Named("n:NumValue")]); + VerifyExpectedOrder(StreamText(stream), "one", "three", "two", "four", "five"); + }), + TestCase("succeeds when ordering numerically descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteAllOrdered(PostgresDb.TableName, writer, [Field.Named("n:NumValue DESC")]); + VerifyExpectedOrder(StreamText(stream), "five", "four", "two", "three", "one"); + }), + TestCase("succeeds when ordering alphabetically", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteAllOrdered(PostgresDb.TableName, writer, [Field.Named("Id DESC")]); + VerifyExpectedOrder(StreamText(stream), "two", "three", "one", "four", "five"); + }) + ]), + TestList("WriteById", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteById(PostgresDb.TableName, writer, "two"); + var json = StreamText(stream); + Expect.stringStarts(json, """{"Id": "two",""", "An incorrect document was returned"); + Expect.stringEnds(json, "}", "JSON should have ended with this document"); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteById(PostgresDb.TableName, writer, "three hundred eighty-seven"); + VerifyNoDoc(StreamText(stream)); + }) + ]), + TestList("WriteByFields", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.In("Value", ["purple", "blue"]), Field.Exists("Sub")]); + VerifySingleById(StreamText(stream), "four"); + }), + TestCase("succeeds when documents are found using IN with numeric field", async() => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.In("NumValue", [2, 4, 6, 8])]); + VerifySingleById(StreamText(stream), "three"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.Equal("Value", "mauve"), Field.NotEqual("NumValue", 40)]); + VerifyEmpty(StreamText(stream)); + }), + TestCase("succeeds for InArray when matching documents exist", async () => + { + await using var db = PostgresDb.BuildDb(); + await Definition.EnsureTable(PostgresDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(PostgresDb.TableName, doc); + + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", PostgresDb.TableName, ["c"])]); + var json = StreamText(stream); + VerifyBeginEnd(json); + VerifyDocById(json, "first"); + VerifyDocById(json, "second"); + }), + TestCase("succeeds for InArray when no matching documents exist", async () => + { + await using var db = PostgresDb.BuildDb(); + await Definition.EnsureTable(PostgresDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(PostgresDb.TableName, doc); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", PostgresDb.TableName, ["j"])]); + VerifyEmpty(StreamText(stream)); + }) + ]), + TestList("WriteByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.All, + [Field.Equal("Value", "purple")], [Field.Named("Id")]); + VerifyExpectedOrder(StreamText(stream), "five", "four"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.All, + [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]); + VerifyExpectedOrder(StreamText(stream), "four", "five"); + }) + ]), + TestList("WriteByContains", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByContains(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }); + var json = StreamText(stream); + VerifyBeginEnd(json); + VerifyDocById(json, "two"); + VerifyDocById(json, "four"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByContains(PostgresDb.TableName, writer, new { Value = "mauve" }); + VerifyEmpty(StreamText(stream)); + }) + ]), + TestList("WriteByContainsOrdered", + [ + // Id = two, Sub.Bar = blue; Id = four, Sub.Bar = red + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByContainsOrdered(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }, + [Field.Named("Sub.Bar")]); + VerifyExpectedOrder(StreamText(stream), "two", "four"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByContainsOrdered(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }, + [Field.Named("Sub.Bar DESC")]); + VerifyExpectedOrder(StreamText(stream), "four", "two"); + }) + ]), + TestList("WriteByJsonPath", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByJsonPath(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)"); + var json = StreamText(stream); + VerifyBeginEnd(json); + VerifyDocById(json, "one"); + VerifyDocById(json, "two"); + VerifyDocById(json, "three"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByJsonPath(PostgresDb.TableName, writer, "$.NumValue ? (@ < 0)"); + VerifyEmpty(StreamText(stream)); + }) + ]), + TestList("WriteByJsonPathOrdered", + [ + // Id = one, NumValue = 0; Id = two, NumValue = 10; Id = three, NumValue = 4 + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByJsonPathOrdered(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)", + [Field.Named("n:NumValue")]); + VerifyExpectedOrder(StreamText(stream), "one", "three", "two"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByJsonPathOrdered(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)", + [Field.Named("n:NumValue DESC")]); + VerifyExpectedOrder(StreamText(stream), "two", "three", "one"); + }) + ]), + TestList("WriteFirstByFields", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "another")]); + VerifyDocById(StreamText(stream), "two"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "purple")]); + VerifyAnyById(StreamText(stream), ["five", "four"]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "absent")]); + VerifyNoDoc(StreamText(stream)); + }) + ]), + TestList("WriteFirstByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteFirstByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "purple")], [Field.Named("Id")]); + VerifyDocById(StreamText(stream), "five"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteFirstByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]); + VerifyDocById(StreamText(stream), "four"); + }) + ]), + TestList("WriteFirstByContains", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteFirstByContains(PostgresDb.TableName, writer, new { Value = "another" }); + VerifyDocById(StreamText(stream), "two"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteFirstByContains(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }); + VerifyAnyById(StreamText(stream), ["two", "four"]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteFirstByContains(PostgresDb.TableName, writer, new { Value = "absent" }); + VerifyNoDoc(StreamText(stream)); + }) + ]), + TestList("WriteFirstByContainsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteFirstByContainsOrdered(PostgresDb.TableName, writer, + new { Sub = new { Foo = "green" } }, [Field.Named("Value")]); + VerifyDocById(StreamText(stream), "two"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteFirstByContainsOrdered(PostgresDb.TableName, writer, + new { Sub = new { Foo = "green" } }, [Field.Named("Value DESC")]); + VerifyDocById(StreamText(stream), "four"); + }) + ]), + TestList("WriteFirstByJsonPath", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteFirstByJsonPath(PostgresDb.TableName, writer, """$.Value ? (@ == "FIRST!")"""); + VerifyDocById(StreamText(stream), "one"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteFirstByJsonPath(PostgresDb.TableName, writer, """$.Sub.Foo ? (@ == "green")"""); + VerifyAnyById(StreamText(stream), ["two", "four"]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteFirstByJsonPath(PostgresDb.TableName, writer, """$.Id ? (@ == "nope")"""); + VerifyNoDoc(StreamText(stream)); + }) + ]), + TestList("WriteFirstByJsonPathOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteFirstByJsonPathOrdered(PostgresDb.TableName, writer, """$.Sub.Foo ? (@ == "green")""", + [Field.Named("Sub.Bar")]); + VerifyDocById(StreamText(stream), "two"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await LoadDocs(); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteFirstByJsonPathOrdered(PostgresDb.TableName, writer, """$.Sub.Foo ? (@ == "green")""", + [Field.Named("Sub.Bar DESC")]); + VerifyDocById(StreamText(stream), "four"); + }) ]) ]); diff --git a/src/Tests/PostgresTests.fs b/src/Tests/PostgresTests.fs index 8cd730b..d166cbf 100644 --- a/src/Tests/PostgresTests.fs +++ b/src/Tests/PostgresTests.fs @@ -1451,6 +1451,399 @@ let jsonTests = testList "Json" [ verifyDocById json "four" } ] + testList "writeAll" [ + testTask "succeeds when there is data" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeAll PostgresDb.TableName writer + verifyAllData (streamText stream) + } + testTask "succeeds when there is no data" { + use db = PostgresDb.BuildDb() + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeAll PostgresDb.TableName writer + verifyEmpty (streamText stream) + } + ] + testList "writeAllOrdered" [ + testTask "succeeds when ordering numerically" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeAllOrdered PostgresDb.TableName writer [ Field.Named "n:NumValue" ] + verifyExpectedOrder (streamText stream) "one" "three" (Some "two") (Some "four") (Some "five") + } + testTask "succeeds when ordering numerically descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeAllOrdered PostgresDb.TableName writer [ Field.Named "n:NumValue DESC" ] + verifyExpectedOrder (streamText stream) "five" "four" (Some "two") (Some "three") (Some "one") + } + testTask "succeeds when ordering alphabetically" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeAllOrdered PostgresDb.TableName writer [ Field.Named "Id DESC" ] + verifyExpectedOrder (streamText stream) "two" "three" (Some "one") (Some "four") (Some "five") + } + ] + testList "writeById" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeById PostgresDb.TableName writer "two" + let json = streamText stream + Expect.stringStarts json """{"Id": "two",""" "An incorrect document was returned" + Expect.stringEnds json "}" "JSON should have ended with this document" + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeById PostgresDb.TableName writer "three hundred eighty-seven" + verifyNoDoc (streamText stream) + } + ] + testList "writeByFields" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByFields + PostgresDb.TableName writer All [ Field.In "Value" [ "purple"; "blue" ]; Field.Exists "Sub" ] + verifySingleById (streamText stream) "four" + } + testTask "succeeds when documents are found using IN with numeric field" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByFields PostgresDb.TableName writer All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] + verifySingleById (streamText stream) "three" + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByFields + PostgresDb.TableName writer All [ Field.Equal "Value" "mauve"; Field.NotEqual "NumValue" 40 ] + verifyEmpty (streamText stream) + } + testTask "succeeds for InArray when matching documents exist" { + use db = PostgresDb.BuildDb() + do! Definition.ensureTable PostgresDb.TableName + for doc in ArrayDocument.TestDocuments do do! insert PostgresDb.TableName doc + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByFields + PostgresDb.TableName writer All [ Field.InArray "Values" PostgresDb.TableName [ "c" ] ] + let json = streamText stream + verifyBeginEnd json + verifyDocById json "first" + verifyDocById json "second" + } + testTask "succeeds for InArray when no matching documents exist" { + use db = PostgresDb.BuildDb() + do! Definition.ensureTable PostgresDb.TableName + for doc in ArrayDocument.TestDocuments do do! insert PostgresDb.TableName doc + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByFields + PostgresDb.TableName writer All [ Field.InArray "Values" PostgresDb.TableName [ "j" ] ] + verifyEmpty (streamText stream) + } + ] + testList "writeByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByFieldsOrdered + PostgresDb.TableName writer All [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + verifyExpectedOrder (streamText stream) "five" "four" None None None + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByFieldsOrdered + PostgresDb.TableName writer All [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + verifyExpectedOrder (streamText stream) "four" "five" None None None + } + ] + testList "writeByContains" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByContains PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} + let json = streamText stream + verifyBeginEnd json + verifyDocById json "two" + verifyDocById json "four" + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByContains PostgresDb.TableName writer {| Value = "mauve" |} + verifyEmpty (streamText stream) + } + ] + testList "writeByContainsOrdered" [ + // Id = two, Sub.Bar = blue; Id = four, Sub.Bar = red + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar" ] + verifyExpectedOrder (streamText stream) "two" "four" None None None + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar DESC" ] + verifyExpectedOrder (streamText stream) "four" "two" None None None + } + ] + testList "writeByJsonPath" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByJsonPath PostgresDb.TableName writer "$.NumValue ? (@ < 15)" + let json = streamText stream + verifyBeginEnd json + verifyDocById json "one" + verifyDocById json "two" + verifyDocById json "three" + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByJsonPath PostgresDb.TableName writer "$.NumValue ? (@ < 0)" + verifyEmpty (streamText stream) + } + ] + testList "writeByJsonPathOrdered" [ + // Id = one, NumValue = 0; Id = two, NumValue = 10; Id = three, NumValue = 4 + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByJsonPathOrdered + PostgresDb.TableName writer "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue" ] + verifyExpectedOrder (streamText stream) "one" "three" (Some "two") None None + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByJsonPathOrdered + PostgresDb.TableName writer "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue DESC" ] + verifyExpectedOrder (streamText stream) "two" "three" (Some "one") None None + } + ] + testList "writeFirstByFields" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "another" ] + verifyDocById (streamText stream) "two" + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] + verifyAnyById (streamText stream) [ "five"; "four" ] + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "absent" ] + verifyNoDoc (streamText stream) + } + ] + testList "writeFirstByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeFirstByFieldsOrdered + PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + verifyDocById (streamText stream) "five" + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeFirstByFieldsOrdered + PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + verifyDocById (streamText stream) "four" + } + ] + testList "writeFirstByContains" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeFirstByContains PostgresDb.TableName writer {| Value = "another" |} + verifyDocById (streamText stream) "two" + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeFirstByContains PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} + verifyAnyById (streamText stream) [ "two"; "four" ] + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeFirstByContains PostgresDb.TableName writer {| Value = "absent" |} + verifyNoDoc (streamText stream) + } + ] + testList "writeFirstByContainsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeFirstByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Value" ] + verifyDocById (streamText stream) "two" + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeFirstByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Value DESC" ] + verifyDocById (streamText stream) "four" + } + ] + testList "writeFirstByJsonPath" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeFirstByJsonPath PostgresDb.TableName writer """$.Value ? (@ == "FIRST!")""" + verifyDocById (streamText stream) "one" + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeFirstByJsonPath PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" + verifyAnyById (streamText stream) [ "two"; "four" ] + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeFirstByJsonPath PostgresDb.TableName writer """$.Id ? (@ == "nope")""" + verifyNoDoc (streamText stream) + } + ] + testList "writeFirstByJsonPathOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeFirstByJsonPathOrdered + PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar" ] + verifyDocById (streamText stream) "two" + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeFirstByJsonPathOrdered + PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar DESC" ] + verifyDocById (streamText stream) "four" + } + ] ] /// Integration tests for the Update module of the PostgreSQL library -- 2.47.2 From 1dcc35d0f033cf10fa38940b0abc724c63c1190f Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Sun, 6 Apr 2025 14:10:12 -0400 Subject: [PATCH 08/22] Complete Json impl for Postgres --- src/Postgres/Extensions.fs | 190 ++- src/Postgres/Functions.fs | 122 +- src/Postgres/Library.fs | 38 +- src/Postgres/WithProps.fs | 2 +- .../PostgresCSharpExtensionTests.cs | 1207 +++++++++++++++-- src/Tests.CSharp/PostgresCSharpTests.cs | 24 +- src/Tests/PostgresExtensionTests.fs | 970 ++++++++++++- 7 files changed, 2231 insertions(+), 322 deletions(-) diff --git a/src/Postgres/Extensions.fs b/src/Postgres/Extensions.fs index e53536c..da0d24d 100644 --- a/src/Postgres/Extensions.fs +++ b/src/Postgres/Extensions.fs @@ -38,7 +38,7 @@ module Extensions = /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// Some with the first matching result, or None if not found + /// Some with the first matching result, or None if not found member conn.customSingle<'TDoc> query parameters (mapFunc: RowReader -> 'TDoc) = Custom.single<'TDoc> query parameters mapFunc (Sql.existingConnection conn) @@ -102,7 +102,7 @@ module Extensions = member conn.countAll tableName = Count.all tableName (Sql.existingConnection conn) - /// Count matching documents using JSON field comparisons (->> =, etc.) + /// Count matching documents using JSON field comparisons (->> =, etc.) /// The table in which documents should be counted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -110,14 +110,14 @@ module Extensions = member conn.countByFields tableName howMatched fields = Count.byFields tableName howMatched fields (Sql.existingConnection conn) - /// Count matching documents using a JSON containment query (@>) + /// Count matching documents using a JSON containment query (@>) /// The table in which documents should be counted (may include schema) /// The document to match with the containment query /// The count of the documents in the table member conn.countByContains tableName criteria = Count.byContains tableName criteria (Sql.existingConnection conn) - /// Count matching documents using a JSON Path match query (@?) + /// Count matching documents using a JSON Path match query (@?) /// The table in which documents should be counted (may include schema) /// The JSON Path expression to be matched /// The count of the documents in the table @@ -131,7 +131,7 @@ module Extensions = member conn.existsById tableName docId = Exists.byId tableName docId (Sql.existingConnection conn) - /// Determine if a document exists using JSON field comparisons (->> =, etc.) + /// Determine if a document exists using JSON field comparisons (->> =, etc.) /// The table in which existence should be checked (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -139,14 +139,14 @@ module Extensions = member conn.existsByFields tableName howMatched fields = Exists.byFields tableName howMatched fields (Sql.existingConnection conn) - /// Determine if a document exists using a JSON containment query (@>) + /// Determine if a document exists using a JSON containment query (@>) /// The table in which existence should be checked (may include schema) /// The document to match with the containment query /// True if any matching documents exist, false if not member conn.existsByContains tableName criteria = Exists.byContains tableName criteria (Sql.existingConnection conn) - /// Determine if a document exists using a JSON Path match query (@?) + /// Determine if a document exists using a JSON Path match query (@?) /// The table in which existence should be checked (may include schema) /// The JSON Path expression to be matched /// True if any matching documents exist, false if not @@ -169,11 +169,11 @@ module Extensions = /// Retrieve a document by its ID /// The table from which a document should be retrieved (may include schema) /// The ID of the document to retrieve - /// Some with the document if found, None otherwise + /// Some with the document if found, None otherwise member conn.findById<'TKey, 'TDoc> tableName docId = Find.byId<'TKey, 'TDoc> tableName docId (Sql.existingConnection conn) - /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// Retrieve documents matching JSON field comparisons (->> =, etc.) /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -182,8 +182,8 @@ module Extensions = Find.byFields<'TDoc> tableName howMatched fields (Sql.existingConnection conn) /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields - /// in the document + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in + /// the document /// /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions @@ -194,7 +194,7 @@ module Extensions = Find.byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields (Sql.existingConnection conn) - /// Retrieve documents matching a JSON containment query (@>) + /// Retrieve documents matching a JSON containment query (@>) /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query /// All documents matching the given containment query @@ -202,7 +202,7 @@ module Extensions = Find.byContains<'TDoc> tableName criteria (Sql.existingConnection conn) /// - /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the + /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the /// document /// /// The table from which documents should be retrieved (may include schema) @@ -212,7 +212,7 @@ module Extensions = member conn.findByContainsOrdered<'TDoc> tableName (criteria: obj) orderFields = Find.byContainsOrdered<'TDoc> tableName criteria orderFields (Sql.existingConnection conn) - /// Retrieve documents matching a JSON Path match query (@?) + /// Retrieve documents matching a JSON Path match query (@?) /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match /// All documents matching the given JSON Path expression @@ -220,8 +220,7 @@ module Extensions = Find.byJsonPath<'TDoc> tableName jsonPath (Sql.existingConnection conn) /// - /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the - /// document + /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match @@ -230,65 +229,65 @@ module Extensions = member conn.findByJsonPathOrdered<'TDoc> tableName jsonPath orderFields = Find.byJsonPathOrdered<'TDoc> tableName jsonPath orderFields (Sql.existingConnection conn) - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// Some with the first document, or None if not found + /// Some with the first document, or None if not found member conn.findFirstByFields<'TDoc> tableName howMatched fields = Find.firstByFields<'TDoc> tableName howMatched fields (Sql.existingConnection conn) /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the - /// given fields in the document + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document /// /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered /// - /// Some with the first document ordered by the given fields, or None if not found + /// Some with the first document ordered by the given fields, or None if not found /// member conn.findFirstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields = Find.firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields (Sql.existingConnection conn) - /// Retrieve the first document matching a JSON containment query (@>) + /// Retrieve the first document matching a JSON containment query (@>) /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query - /// Some with the first document, or None if not found + /// Some with the first document, or None if not found member conn.findFirstByContains<'TDoc> tableName (criteria: obj) = Find.firstByContains<'TDoc> tableName criteria (Sql.existingConnection conn) /// - /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields - /// in the document + /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in + /// the document /// /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query /// Fields by which the results should be ordered /// - /// Some with the first document ordered by the given fields, or None if not found + /// Some with the first document ordered by the given fields, or None if not found /// member conn.findFirstByContainsOrdered<'TDoc> tableName (criteria: obj) orderFields = Find.firstByContainsOrdered<'TDoc> tableName criteria orderFields (Sql.existingConnection conn) - /// Retrieve the first document matching a JSON Path match query (@?) + /// Retrieve the first document matching a JSON Path match query (@?) /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match - /// Some with the first document, or None if not found + /// Some with the first document, or None if not found member conn.findFirstByJsonPath<'TDoc> tableName jsonPath = Find.firstByJsonPath<'TDoc> tableName jsonPath (Sql.existingConnection conn) /// - /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in - /// the document + /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the + /// document /// /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match /// Fields by which the results should be ordered /// - /// Some with the first document ordered by the given fields, or None if not found + /// Some with the first document ordered by the given fields, or None if not found /// member conn.findFirstByJsonPathOrdered<'TDoc> tableName jsonPath orderFields = Find.firstByJsonPathOrdered<'TDoc> tableName jsonPath orderFields (Sql.existingConnection conn) @@ -605,7 +604,7 @@ module Extensions = Patch.byId tableName docId patch (Sql.existingConnection conn) /// - /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, + /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, /// etc.) /// /// The table in which documents should be patched (may include schema) @@ -616,7 +615,7 @@ module Extensions = Patch.byFields tableName howMatched fields patch (Sql.existingConnection conn) /// - /// Patch documents using a JSON containment query in the WHERE clause (@>) + /// Patch documents using a JSON containment query in the WHERE clause (@>) /// /// The table in which documents should be patched (may include schema) /// The document to match the containment query @@ -624,7 +623,7 @@ module Extensions = member conn.patchByContains tableName (criteria: 'TCriteria) (patch: 'TPatch) = Patch.byContains tableName criteria patch (Sql.existingConnection conn) - /// Patch documents using a JSON Path match query in the WHERE clause (@?) + /// Patch documents using a JSON Path match query in the WHERE clause (@?) /// The table in which documents should be patched (may include schema) /// The JSON Path expression to match /// The partial document to patch the existing document @@ -646,14 +645,14 @@ module Extensions = member conn.removeFieldsByFields tableName howMatched fields fieldNames = RemoveFields.byFields tableName howMatched fields fieldNames (Sql.existingConnection conn) - /// Remove fields from documents via a JSON containment query (@>) + /// Remove fields from documents via a JSON containment query (@>) /// The table in which documents should be modified (may include schema) /// The document to match the containment query /// One or more field names to remove from the matching documents member conn.removeFieldsByContains tableName (criteria: 'TContains) fieldNames = RemoveFields.byContains tableName criteria fieldNames (Sql.existingConnection conn) - /// Remove fields from documents via a JSON Path match query (@?) + /// Remove fields from documents via a JSON Path match query (@?) /// The table in which documents should be modified (may include schema) /// The JSON Path expression to match /// One or more field names to remove from the matching documents @@ -666,14 +665,14 @@ module Extensions = member conn.deleteById tableName (docId: 'TKey) = Delete.byId tableName docId (Sql.existingConnection conn) - /// Delete documents by matching a JSON field comparison query (->> =, etc.) + /// Delete documents by matching a JSON field comparison query (->> =, etc.) /// The table in which documents should be deleted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match member conn.deleteByFields tableName howMatched fields = Delete.byFields tableName howMatched fields (Sql.existingConnection conn) - /// Delete documents by matching a JSON contains query (@>) + /// Delete documents by matching a JSON contains query (@>) /// The table in which documents should be deleted (may include schema) /// The document to match the containment query member conn.deleteByContains tableName (criteria: 'TContains) = @@ -707,6 +706,7 @@ type NpgsqlConnectionCSharpExtensions = /// Parameters to use for the query /// The mapping function to extract the document /// A JSON array of results for the given query + [] static member inline CustomJsonArray(conn, query, parameters, mapFunc) = Custom.JsonArray(query, parameters, mapFunc, Sql.existingConnection conn) @@ -716,6 +716,7 @@ type NpgsqlConnectionCSharpExtensions = /// Parameters to use for the query /// The StreamWriter to which the results should be written /// The mapping function to extract the document + [] static member inline WriteCustomJsonArray(conn, query, parameters, writer, mapFunc) = Custom.WriteJsonArray(query, parameters, writer, mapFunc, Sql.existingConnection conn) @@ -724,7 +725,7 @@ type NpgsqlConnectionCSharpExtensions = /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// The first matching result, or null if not found + /// The first matching result, or null if not found [] static member inline CustomSingle<'TDoc when 'TDoc: null and 'TDoc: not struct>( conn, query, parameters, mapFunc: System.Func) = @@ -736,6 +737,7 @@ type NpgsqlConnectionCSharpExtensions = /// Parameters to use for the query /// The mapping function to extract the document /// The JSON document with the first matching result, or an empty document if not found + [] static member inline CustomJsonSingle(conn, query, parameters, mapFunc) = Custom.JsonSingle(query, parameters, mapFunc, Sql.existingConnection conn) @@ -806,7 +808,7 @@ type NpgsqlConnectionCSharpExtensions = static member inline CountAll(conn, tableName) = Count.all tableName (Sql.existingConnection conn) - /// Count matching documents using JSON field comparisons (->> =, etc.) + /// Count matching documents using JSON field comparisons (->> =, etc.) /// The NpgsqlConnection on which to run the query /// The table in which documents should be counted (may include schema) /// Whether to match any or all of the field conditions @@ -816,7 +818,7 @@ type NpgsqlConnectionCSharpExtensions = static member inline CountByFields(conn, tableName, howMatched, fields) = Count.byFields tableName howMatched fields (Sql.existingConnection conn) - /// Count matching documents using a JSON containment query (@>) + /// Count matching documents using a JSON containment query (@>) /// The NpgsqlConnection on which to run the query /// The table in which documents should be counted (may include schema) /// The document to match with the containment query @@ -825,7 +827,7 @@ type NpgsqlConnectionCSharpExtensions = static member inline CountByContains(conn, tableName, criteria: 'TCriteria) = Count.byContains tableName criteria (Sql.existingConnection conn) - /// Count matching documents using a JSON Path match query (@?) + /// Count matching documents using a JSON Path match query (@?) /// The NpgsqlConnection on which to run the query /// The table in which documents should be counted (may include schema) /// The JSON Path expression to be matched @@ -843,7 +845,7 @@ type NpgsqlConnectionCSharpExtensions = static member inline ExistsById(conn, tableName, docId) = Exists.byId tableName docId (Sql.existingConnection conn) - /// Determine if a document exists using JSON field comparisons (->> =, etc.) + /// Determine if a document exists using JSON field comparisons (->> =, etc.) /// The NpgsqlConnection on which to run the query /// The table in which existence should be checked (may include schema) /// Whether to match any or all of the field conditions @@ -853,7 +855,7 @@ type NpgsqlConnectionCSharpExtensions = static member inline ExistsByFields(conn, tableName, howMatched, fields) = Exists.byFields tableName howMatched fields (Sql.existingConnection conn) - /// Determine if a document exists using a JSON containment query (@>) + /// Determine if a document exists using a JSON containment query (@>) /// The NpgsqlConnection on which to run the query /// The table in which existence should be checked (may include schema) /// The document to match with the containment query @@ -862,7 +864,7 @@ type NpgsqlConnectionCSharpExtensions = static member inline ExistsByContains(conn, tableName, criteria: 'TCriteria) = Exists.byContains tableName criteria (Sql.existingConnection conn) - /// Determine if a document exists using a JSON Path match query (@?) + /// Determine if a document exists using a JSON Path match query (@?) /// The NpgsqlConnection on which to run the query /// The table in which existence should be checked (may include schema) /// The JSON Path expression to be matched @@ -892,12 +894,12 @@ type NpgsqlConnectionCSharpExtensions = /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// The ID of the document to retrieve - /// The document if found, null otherwise + /// The document if found, null otherwise [] static member inline FindById<'TKey, 'TDoc when 'TDoc: null and 'TDoc: not struct>(conn, tableName, docId: 'TKey) = Find.ById<'TKey, 'TDoc>(tableName, docId, Sql.existingConnection conn) - /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// Retrieve documents matching JSON field comparisons (->> =, etc.) /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions @@ -908,8 +910,8 @@ type NpgsqlConnectionCSharpExtensions = Find.ByFields<'TDoc>(tableName, howMatched, fields, Sql.existingConnection conn) /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in - /// the document + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in the + /// document /// /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) @@ -922,7 +924,7 @@ type NpgsqlConnectionCSharpExtensions = Find.ByFieldsOrdered<'TDoc>( tableName, howMatched, queryFields, orderFields, Sql.existingConnection conn) - /// Retrieve documents matching a JSON containment query (@>) + /// Retrieve documents matching a JSON containment query (@>) /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query @@ -932,8 +934,7 @@ type NpgsqlConnectionCSharpExtensions = Find.ByContains<'TDoc>(tableName, criteria, Sql.existingConnection conn) /// - /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the - /// document + /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the document /// /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) @@ -944,7 +945,7 @@ type NpgsqlConnectionCSharpExtensions = static member inline FindByContainsOrdered<'TDoc>(conn, tableName, criteria: obj, orderFields) = Find.ByContainsOrdered<'TDoc>(tableName, criteria, orderFields, Sql.existingConnection conn) - /// Retrieve documents matching a JSON Path match query (@?) + /// Retrieve documents matching a JSON Path match query (@?) /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match @@ -954,7 +955,7 @@ type NpgsqlConnectionCSharpExtensions = Find.ByJsonPath<'TDoc>(tableName, jsonPath, Sql.existingConnection conn) /// - /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document + /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document /// /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) @@ -965,19 +966,19 @@ type NpgsqlConnectionCSharpExtensions = static member inline FindByJsonPathOrdered<'TDoc>(conn, tableName, jsonPath, orderFields) = Find.ByJsonPathOrdered<'TDoc>(tableName, jsonPath, orderFields, Sql.existingConnection conn) - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The first document, or null if not found + /// The first document, or null if not found [] static member inline FindFirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>( conn, tableName, howMatched, fields) = Find.FirstByFields<'TDoc>(tableName, howMatched, fields, Sql.existingConnection conn) /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given /// fields in the document /// /// The NpgsqlConnection on which to run the query @@ -985,55 +986,55 @@ type NpgsqlConnectionCSharpExtensions = /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered - /// The first document ordered by the given fields, or null if not found + /// The first document ordered by the given fields, or null if not found [] static member inline FindFirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( conn, tableName, howMatched, queryFields, orderFields) = Find.FirstByFieldsOrdered<'TDoc>( tableName, howMatched, queryFields, orderFields, Sql.existingConnection conn) - /// Retrieve the first document matching a JSON containment query (@>) + /// Retrieve the first document matching a JSON containment query (@>) /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query - /// The first document, or null if not found + /// The first document, or null if not found [] static member inline FindFirstByContains<'TDoc when 'TDoc: null and 'TDoc: not struct>( conn, tableName, criteria: obj) = Find.FirstByContains<'TDoc>(tableName, criteria, Sql.existingConnection conn) /// - /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in - /// the document + /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in the + /// document /// /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query /// Fields by which the results should be ordered - /// The first document ordered by the given fields, or null if not found + /// The first document ordered by the given fields, or null if not found [] static member inline FindFirstByContainsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( conn, tableName, criteria: obj, orderFields) = Find.FirstByContainsOrdered<'TDoc>(tableName, criteria, orderFields, Sql.existingConnection conn) - /// Retrieve the first document matching a JSON Path match query (@?) + /// Retrieve the first document matching a JSON Path match query (@?) /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match - /// The first document, or null if not found + /// The first document, or null if not found [] static member inline FindFirstByJsonPath<'TDoc when 'TDoc: null and 'TDoc: not struct>(conn, tableName, jsonPath) = Find.FirstByJsonPath<'TDoc>(tableName, jsonPath, Sql.existingConnection conn) /// - /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the + /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the /// document /// /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match /// Fields by which the results should be ordered - /// The first document ordered by the given fields, or null if not found + /// The first document ordered by the given fields, or null if not found [] static member inline FindFirstByJsonPathOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( conn, tableName, jsonPath, orderFields) = @@ -1107,8 +1108,7 @@ type NpgsqlConnectionCSharpExtensions = Json.byFields tableName howMatched fields (Sql.existingConnection conn) /// - /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, - /// etc.) + /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) /// /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) @@ -1120,8 +1120,8 @@ type NpgsqlConnectionCSharpExtensions = Json.writeByFields tableName writer howMatched fields (Sql.existingConnection conn) /// - /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given - /// fields in the document + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given fields + /// in the document /// /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) @@ -1134,8 +1134,8 @@ type NpgsqlConnectionCSharpExtensions = Json.byFieldsOrdered tableName howMatched queryFields orderFields (Sql.existingConnection conn) /// - /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, - /// etc.) ordered by the given fields in the document + /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// ordered by the given fields in the document /// /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) @@ -1181,8 +1181,8 @@ type NpgsqlConnectionCSharpExtensions = Json.byContainsOrdered tableName criteria orderFields (Sql.existingConnection conn) /// - /// Write JSON documents to the given StreamWriter matching a JSON containment query (@>) - /// ordered by the given fields in the document + /// Write JSON documents to the given StreamWriter matching a JSON containment query (@>) ordered + /// by the given fields in the document /// /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) @@ -1214,8 +1214,7 @@ type NpgsqlConnectionCSharpExtensions = Json.writeByJsonPath tableName writer jsonPath (Sql.existingConnection conn) /// - /// Retrieve JSON documents matching a JSON Path match query (@?) ordered by the given fields in the - /// document + /// Retrieve JSON documents matching a JSON Path match query (@?) ordered by the given fields in the document /// /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) @@ -1227,8 +1226,8 @@ type NpgsqlConnectionCSharpExtensions = Json.byJsonPathOrdered tableName jsonPath orderFields (Sql.existingConnection conn) /// - /// Write JSON documents to the given StreamWriter matching a JSON Path match query (@?) ordered - /// by the given fields in the document + /// Write JSON documents to the given StreamWriter matching a JSON Path match query (@?) ordered by + /// the given fields in the document /// /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) @@ -1239,9 +1238,7 @@ type NpgsqlConnectionCSharpExtensions = static member inline WriteJsonByJsonPathOrdered(conn, tableName, writer, jsonPath, orderFields) = Json.writeByJsonPathOrdered tableName writer jsonPath orderFields (Sql.existingConnection conn) - /// - /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) - /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions @@ -1303,8 +1300,7 @@ type NpgsqlConnectionCSharpExtensions = Json.firstByContains tableName criteria (Sql.existingConnection conn) /// - /// Write the first JSON document to the given StreamWriter matching a JSON containment query - /// (@>) + /// Write the first JSON document to the given StreamWriter matching a JSON containment query (@>) /// /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) @@ -1315,8 +1311,8 @@ type NpgsqlConnectionCSharpExtensions = Json.writeFirstByContains tableName writer criteria (Sql.existingConnection conn) /// - /// Retrieve the first JSON document matching a JSON containment query (@>) ordered by the given - /// fields in the document + /// Retrieve the first JSON document matching a JSON containment query (@>) ordered by the given fields in + /// the document /// /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) @@ -1328,8 +1324,8 @@ type NpgsqlConnectionCSharpExtensions = Json.firstByContainsOrdered tableName criteria orderFields (Sql.existingConnection conn) /// - /// Write the first JSON document to the given StreamWriter matching a JSON containment query - /// (@>) ordered by the given fields in the document + /// Write the first JSON document to the given StreamWriter matching a JSON containment query (@>) + /// ordered by the given fields in the document /// /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) @@ -1361,8 +1357,8 @@ type NpgsqlConnectionCSharpExtensions = Json.writeFirstByJsonPath tableName writer jsonPath (Sql.existingConnection conn) /// - /// Retrieve the first JSON document matching a JSON Path match query (@?) ordered by the given fields - /// in the document + /// Retrieve the first JSON document matching a JSON Path match query (@?) ordered by the given fields in the + /// document /// /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) @@ -1416,7 +1412,7 @@ type NpgsqlConnectionCSharpExtensions = Patch.byId tableName docId patch (Sql.existingConnection conn) /// - /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) + /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) /// /// The NpgsqlConnection on which to run the query /// The table in which documents should be patched (may include schema) @@ -1427,7 +1423,7 @@ type NpgsqlConnectionCSharpExtensions = static member inline PatchByFields(conn, tableName, howMatched, fields, patch: 'TPatch) = Patch.byFields tableName howMatched fields patch (Sql.existingConnection conn) - /// Patch documents using a JSON containment query in the WHERE clause (@>) + /// Patch documents using a JSON containment query in the WHERE clause (@>) /// The NpgsqlConnection on which to run the query /// The table in which documents should be patched (may include schema) /// The document to match the containment query @@ -1436,7 +1432,7 @@ type NpgsqlConnectionCSharpExtensions = static member inline PatchByContains(conn, tableName, criteria: 'TCriteria, patch: 'TPatch) = Patch.byContains tableName criteria patch (Sql.existingConnection conn) - /// Patch documents using a JSON Path match query in the WHERE clause (@?) + /// Patch documents using a JSON Path match query in the WHERE clause (@?) /// The NpgsqlConnection on which to run the query /// The table in which documents should be patched (may include schema) /// The JSON Path expression to match @@ -1464,7 +1460,7 @@ type NpgsqlConnectionCSharpExtensions = static member inline RemoveFieldsByFields(conn, tableName, howMatched, fields, fieldNames) = RemoveFields.byFields tableName howMatched fields fieldNames (Sql.existingConnection conn) - /// Remove fields from documents via a JSON containment query (@>) + /// Remove fields from documents via a JSON containment query (@>) /// The NpgsqlConnection on which to run the query /// The table in which documents should be modified (may include schema) /// The document to match the containment query @@ -1473,7 +1469,7 @@ type NpgsqlConnectionCSharpExtensions = static member inline RemoveFieldsByContains(conn, tableName, criteria: 'TContains, fieldNames) = RemoveFields.byContains tableName criteria fieldNames (Sql.existingConnection conn) - /// Remove fields from documents via a JSON Path match query (@?) + /// Remove fields from documents via a JSON Path match query (@?) /// The NpgsqlConnection on which to run the query /// The table in which documents should be modified (may include schema) /// The JSON Path expression to match @@ -1490,7 +1486,7 @@ type NpgsqlConnectionCSharpExtensions = static member inline DeleteById(conn, tableName, docId: 'TKey) = Delete.byId tableName docId (Sql.existingConnection conn) - /// Delete documents by matching a JSON field comparison query (->> =, etc.) + /// Delete documents by matching a JSON field comparison query (->> =, etc.) /// The NpgsqlConnection on which to run the query /// The table in which documents should be deleted (may include schema) /// Whether to match any or all of the field conditions @@ -1499,7 +1495,7 @@ type NpgsqlConnectionCSharpExtensions = static member inline DeleteByFields(conn, tableName, howMatched, fields) = Delete.byFields tableName howMatched fields (Sql.existingConnection conn) - /// Delete documents by matching a JSON contains query (@>) + /// Delete documents by matching a JSON contains query (@>) /// The NpgsqlConnection on which to run the query /// The table in which documents should be deleted (may include schema) /// The document to match the containment query diff --git a/src/Postgres/Functions.fs b/src/Postgres/Functions.fs index 7ffca97..f9f0eae 100644 --- a/src/Postgres/Functions.fs +++ b/src/Postgres/Functions.fs @@ -59,7 +59,7 @@ module Custom = /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// Some with the first matching result, or None if not found + /// Some with the first matching result, or None if not found [] let single<'TDoc> query parameters (mapFunc: RowReader -> 'TDoc) = WithProps.Custom.single<'TDoc> query parameters mapFunc (fromDataSource ()) @@ -68,7 +68,7 @@ module Custom = /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// The first matching result, or null if not found + /// The first matching result, or null if not found let Single<'TDoc when 'TDoc: null and 'TDoc: not struct>( query, parameters, mapFunc: System.Func) = WithProps.Custom.Single<'TDoc>(query, parameters, mapFunc, fromDataSource ()) @@ -171,7 +171,7 @@ module Count = let all tableName = WithProps.Count.all tableName (fromDataSource ()) - /// Count matching documents using JSON field comparisons (->> =, etc.) + /// Count matching documents using JSON field comparisons (->> =, etc.) /// The table in which documents should be counted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -180,7 +180,7 @@ module Count = let byFields tableName howMatched fields = WithProps.Count.byFields tableName howMatched fields (fromDataSource ()) - /// Count matching documents using a JSON containment query (@>) + /// Count matching documents using a JSON containment query (@>) /// The table in which documents should be counted (may include schema) /// The document to match with the containment query /// The count of the documents in the table @@ -188,7 +188,7 @@ module Count = let byContains tableName criteria = WithProps.Count.byContains tableName criteria (fromDataSource ()) - /// Count matching documents using a JSON Path match query (@?) + /// Count matching documents using a JSON Path match query (@?) /// The table in which documents should be counted (may include schema) /// The JSON Path expression to be matched /// The count of the documents in the table @@ -209,7 +209,7 @@ module Exists = let byId tableName docId = WithProps.Exists.byId tableName docId (fromDataSource ()) - /// Determine if a document exists using JSON field comparisons (->> =, etc.) + /// Determine if a document exists using JSON field comparisons (->> =, etc.) /// The table in which existence should be checked (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -218,7 +218,7 @@ module Exists = let byFields tableName howMatched fields = WithProps.Exists.byFields tableName howMatched fields (fromDataSource ()) - /// Determine if a document exists using a JSON containment query (@>) + /// Determine if a document exists using a JSON containment query (@>) /// The table in which existence should be checked (may include schema) /// The document to match with the containment query /// True if any matching documents exist, false if not @@ -226,7 +226,7 @@ module Exists = let byContains tableName criteria = WithProps.Exists.byContains tableName criteria (fromDataSource ()) - /// Determine if a document exists using a JSON Path match query (@?) + /// Determine if a document exists using a JSON Path match query (@?) /// The table in which existence should be checked (may include schema) /// The JSON Path expression to be matched /// True if any matching documents exist, false if not @@ -270,7 +270,7 @@ module Find = /// Retrieve a document by its ID /// The table from which a document should be retrieved (may include schema) /// The ID of the document to retrieve - /// Some with the document if found, None otherwise + /// Some with the document if found, None otherwise [] let byId<'TKey, 'TDoc> tableName docId = WithProps.Find.byId<'TKey, 'TDoc> tableName docId (fromDataSource ()) @@ -278,11 +278,11 @@ module Find = /// Retrieve a document by its ID /// The table from which a document should be retrieved (may include schema) /// The ID of the document to retrieve - /// The document if found, null otherwise + /// The document if found, null otherwise let ById<'TKey, 'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, docId: 'TKey) = WithProps.Find.ById<'TKey, 'TDoc>(tableName, docId, fromDataSource ()) - /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// Retrieve documents matching JSON field comparisons (->> =, etc.) /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -291,7 +291,7 @@ module Find = let byFields<'TDoc> tableName howMatched fields = WithProps.Find.byFields<'TDoc> tableName howMatched fields (fromDataSource ()) - /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// Retrieve documents matching JSON field comparisons (->> =, etc.) /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -300,8 +300,8 @@ module Find = WithProps.Find.ByFields<'TDoc>(tableName, howMatched, fields, fromDataSource ()) /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in - /// the document + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in the + /// document /// /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions @@ -313,8 +313,8 @@ module Find = WithProps.Find.byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields (fromDataSource ()) /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in - /// the document + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in the + /// document /// /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions @@ -324,7 +324,7 @@ module Find = let ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields) = WithProps.Find.ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, fromDataSource ()) - /// Retrieve documents matching a JSON containment query (@>) + /// Retrieve documents matching a JSON containment query (@>) /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query /// All documents matching the given containment query @@ -332,7 +332,7 @@ module Find = let byContains<'TDoc> tableName (criteria: obj) = WithProps.Find.byContains<'TDoc> tableName criteria (fromDataSource ()) - /// Retrieve documents matching a JSON containment query (@>) + /// Retrieve documents matching a JSON containment query (@>) /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query /// All documents matching the given containment query @@ -340,8 +340,7 @@ module Find = WithProps.Find.ByContains<'TDoc>(tableName, criteria, fromDataSource ()) /// - /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the - /// document + /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query @@ -352,8 +351,7 @@ module Find = WithProps.Find.byContainsOrdered<'TDoc> tableName criteria orderFields (fromDataSource ()) /// - /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the - /// document + /// Retrieve documents matching a JSON containment query (@>) ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) /// The document to match with the containment query @@ -362,7 +360,7 @@ module Find = let ByContainsOrdered<'TDoc>(tableName, criteria: obj, orderFields) = WithProps.Find.ByContainsOrdered<'TDoc>(tableName, criteria, orderFields, fromDataSource ()) - /// Retrieve documents matching a JSON Path match query (@?) + /// Retrieve documents matching a JSON Path match query (@?) /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match /// All documents matching the given JSON Path expression @@ -370,7 +368,7 @@ module Find = let byJsonPath<'TDoc> tableName jsonPath = WithProps.Find.byJsonPath<'TDoc> tableName jsonPath (fromDataSource ()) - /// Retrieve documents matching a JSON Path match query (@?) + /// Retrieve documents matching a JSON Path match query (@?) /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match /// All documents matching the given JSON Path expression @@ -378,7 +376,7 @@ module Find = WithProps.Find.ByJsonPath<'TDoc>(tableName, jsonPath, fromDataSource ()) /// - /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document + /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match @@ -389,7 +387,7 @@ module Find = WithProps.Find.byJsonPathOrdered<'TDoc> tableName jsonPath orderFields (fromDataSource ()) /// - /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document + /// Retrieve documents matching a JSON Path match query (@?) ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) /// The JSON Path expression to match @@ -398,128 +396,122 @@ module Find = let ByJsonPathOrdered<'TDoc>(tableName, jsonPath, orderFields) = WithProps.Find.ByJsonPathOrdered<'TDoc>(tableName, jsonPath, orderFields, fromDataSource ()) - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// Some with the first document, or None if not found + /// Some with the first document, or None if not found [] let firstByFields<'TDoc> tableName howMatched fields = WithProps.Find.firstByFields<'TDoc> tableName howMatched fields (fromDataSource ()) - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The first document, or null if not found + /// The first document, or null if not found let FirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, howMatched, fields) = WithProps.Find.FirstByFields<'TDoc>(tableName, howMatched, fields, fromDataSource ()) /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given /// fields in the document /// /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered - /// - /// Some with the first document ordered by the given fields, or None if not found - /// + /// Some with the first document ordered by the given fields, or None if not found [] let firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields = WithProps.Find.firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields (fromDataSource ()) /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given /// fields in the document /// /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered - /// The first document ordered by the given fields, or null if not found + /// The first document ordered by the given fields, or null if not found let FirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( tableName, howMatched, queryFields, orderFields) = WithProps.Find.FirstByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, fromDataSource ()) - /// Retrieve the first document matching a JSON containment query (@>) + /// Retrieve the first document matching a JSON containment query (@>) /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query - /// Some with the first document, or None if not found + /// Some with the first document, or None if not found [] let firstByContains<'TDoc> tableName (criteria: obj) = WithProps.Find.firstByContains<'TDoc> tableName criteria (fromDataSource ()) - /// Retrieve the first document matching a JSON containment query (@>) + /// Retrieve the first document matching a JSON containment query (@>) /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query - /// The first document, or null if not found + /// The first document, or null if not found let FirstByContains<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, criteria: obj) = WithProps.Find.FirstByContains<'TDoc>(tableName, criteria, fromDataSource ()) /// - /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in - /// the document + /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in the + /// document /// /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query /// Fields by which the results should be ordered - /// - /// Some with the first document ordered by the given fields, or None if not found - /// + /// Some with the first document ordered by the given fields, or None if not found [] let firstByContainsOrdered<'TDoc> tableName (criteria: obj) orderFields = WithProps.Find.firstByContainsOrdered<'TDoc> tableName criteria orderFields (fromDataSource ()) /// - /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in - /// the document + /// Retrieve the first document matching a JSON containment query (@>) ordered by the given fields in the + /// document /// /// The table from which a document should be retrieved (may include schema) /// The document to match with the containment query /// Fields by which the results should be ordered - /// The first document ordered by the given fields, or null if not found + /// The first document ordered by the given fields, or null if not found let FirstByContainsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, criteria: obj, orderFields) = WithProps.Find.FirstByContainsOrdered<'TDoc>(tableName, criteria, orderFields, fromDataSource ()) - /// Retrieve the first document matching a JSON Path match query (@?) + /// Retrieve the first document matching a JSON Path match query (@?) /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match - /// Some with the first document, or None if not found + /// Some with the first document, or None if not found [] let firstByJsonPath<'TDoc> tableName jsonPath = WithProps.Find.firstByJsonPath<'TDoc> tableName jsonPath (fromDataSource ()) - /// Retrieve the first document matching a JSON Path match query (@?) + /// Retrieve the first document matching a JSON Path match query (@?) /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match - /// The first document, or null if not found + /// The first document, or null if not found let FirstByJsonPath<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, jsonPath) = WithProps.Find.FirstByJsonPath<'TDoc>(tableName, jsonPath, fromDataSource ()) /// - /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the + /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the /// document /// /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match /// Fields by which the results should be ordered - /// - /// Some with the first document ordered by the given fields, or None if not found - /// + /// Some with the first document ordered by the given fields, or None if not found [] let firstByJsonPathOrdered<'TDoc> tableName jsonPath orderFields = WithProps.Find.firstByJsonPathOrdered<'TDoc> tableName jsonPath orderFields (fromDataSource ()) /// - /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the + /// Retrieve the first document matching a JSON Path match query (@?) ordered by the given fields in the /// document /// /// The table from which a document should be retrieved (may include schema) /// The JSON Path expression to match /// Fields by which the results should be ordered - /// The first document ordered by the given fields, or null if not found + /// The first document ordered by the given fields, or null if not found let FirstByJsonPathOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, jsonPath, orderFields) = WithProps.Find.FirstByJsonPathOrdered<'TDoc>(tableName, jsonPath, orderFields, fromDataSource ()) @@ -885,7 +877,7 @@ module Patch = WithProps.Patch.byId tableName docId patch (fromDataSource ()) /// - /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) + /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) /// /// The table in which documents should be patched (may include schema) /// Whether to match any or all of the field conditions @@ -895,7 +887,7 @@ module Patch = let byFields tableName howMatched fields (patch: 'TPatch) = WithProps.Patch.byFields tableName howMatched fields patch (fromDataSource ()) - /// Patch documents using a JSON containment query in the WHERE clause (@>) + /// Patch documents using a JSON containment query in the WHERE clause (@>) /// The table in which documents should be patched (may include schema) /// The document to match the containment query /// The partial document to patch the existing document @@ -903,7 +895,7 @@ module Patch = let byContains tableName (criteria: 'TCriteria) (patch: 'TPatch) = WithProps.Patch.byContains tableName criteria patch (fromDataSource ()) - /// Patch documents using a JSON Path match query in the WHERE clause (@?) + /// Patch documents using a JSON Path match query in the WHERE clause (@?) /// The table in which documents should be patched (may include schema) /// The JSON Path expression to match /// The partial document to patch the existing document @@ -933,7 +925,7 @@ module RemoveFields = let byFields tableName howMatched fields fieldNames = WithProps.RemoveFields.byFields tableName howMatched fields fieldNames (fromDataSource ()) - /// Remove fields from documents via a JSON containment query (@>) + /// Remove fields from documents via a JSON containment query (@>) /// The table in which documents should be modified (may include schema) /// The document to match the containment query /// One or more field names to remove from the matching documents @@ -941,7 +933,7 @@ module RemoveFields = let byContains tableName (criteria: 'TContains) fieldNames = WithProps.RemoveFields.byContains tableName criteria fieldNames (fromDataSource ()) - /// Remove fields from documents via a JSON Path match query (@?) + /// Remove fields from documents via a JSON Path match query (@?) /// The table in which documents should be modified (may include schema) /// The JSON Path expression to match /// One or more field names to remove from the matching documents @@ -961,7 +953,7 @@ module Delete = let byId tableName (docId: 'TKey) = WithProps.Delete.byId tableName docId (fromDataSource ()) - /// Delete documents by matching a JSON field comparison query (->> =, etc.) + /// Delete documents by matching a JSON field comparison query (->> =, etc.) /// The table in which documents should be deleted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -969,7 +961,7 @@ module Delete = let byFields tableName howMatched fields = WithProps.Delete.byFields tableName howMatched fields (fromDataSource ()) - /// Delete documents by matching a JSON contains query (@>) + /// Delete documents by matching a JSON contains query (@>) /// The table in which documents should be deleted (may include schema) /// The document to match the containment query [] diff --git a/src/Postgres/Library.fs b/src/Postgres/Library.fs index 62e38ae..beb9542 100644 --- a/src/Postgres/Library.fs +++ b/src/Postgres/Library.fs @@ -7,11 +7,11 @@ open System.Text [] type DocumentIndex = - /// A GIN index with standard operations (all operators supported) + /// A GIN index with standard operations (all operators supported) | Full /// - /// A GIN index with JSONPath operations (optimized for @>, @?, @@ operators) + /// A GIN index with JSON Path operations (optimized for @>, @?, @@ operators) /// | Optimized @@ -97,7 +97,7 @@ module Parameters = name, Sql.jsonb (Configuration.serializer().Serialize it) /// Create JSON field parameters - /// The Fields to convert to parameters + /// The Fields to convert to parameters /// The current parameters for the query /// A unified sequence of parameter names and values [] @@ -132,7 +132,7 @@ module Parameters = /// Append JSON field name parameters for the given field names to the given parameters /// The names of fields to be addressed - /// The name (@name) and parameter value for the field names + /// The name (@name) and parameter value for the field names [] let fieldNameParams (fieldNames: string seq) = if Seq.length fieldNames = 1 then "@name", Sql.string (Seq.head fieldNames) @@ -148,12 +148,10 @@ module Parameters = [] module Query = - /// - /// Create a WHERE clause fragment to implement a comparison on fields in a JSON document - /// + /// Create a WHERE clause fragment to implement a comparison on fields in a JSON document /// How the fields should be matched /// The fields for the comparisons - /// A WHERE clause implementing the comparisons for the given fields + /// A WHERE clause implementing the comparisons for the given fields [] let whereByFields (howMatched: FieldMatch) fields = let name = ParameterName() @@ -182,9 +180,9 @@ module Query = else $"{it.Path PostgreSQL AsSql} {it.Comparison.OpSql} {param}") |> String.concat $" {howMatched} " - /// Create a WHERE clause fragment to implement an ID-based query + /// Create a WHERE clause fragment to implement an ID-based query /// The ID of the document - /// A WHERE clause fragment identifying a document by its ID + /// A WHERE clause fragment identifying a document by its ID [] let whereById<'TKey> (docId: 'TKey) = whereByFields Any [ { Field.Equal (Configuration.idField ()) docId with ParameterName = Some "@id" } ] @@ -209,32 +207,28 @@ module Query = let tableName = name.Split '.' |> Array.last $"CREATE INDEX IF NOT EXISTS idx_{tableName}_document ON {name} USING GIN (data{extraOps})" - /// - /// Create a WHERE clause fragment to implement a @> (JSON contains) condition - /// + /// Create a WHERE clause fragment to implement a @> (JSON contains) condition /// The parameter name for the query - /// A WHERE clause fragment for the contains condition + /// A WHERE clause fragment for the contains condition [] let whereDataContains paramName = $"data @> %s{paramName}" - /// - /// Create a WHERE clause fragment to implement a @? (JSON Path match) condition - /// + /// Create a WHERE clause fragment to implement a @? (JSON Path match) condition /// The parameter name for the query - /// A WHERE clause fragment for the JSON Path match condition + /// A WHERE clause fragment for the JSON Path match condition [] let whereJsonPathMatches paramName = $"data @? %s{paramName}::jsonpath" - /// Create an UPDATE statement to patch documents + /// Create an UPDATE statement to patch documents /// The table to be updated /// A query to patch documents [] let patch tableName = $"UPDATE %s{tableName} SET data = data || @data" - /// Create an UPDATE statement to remove fields from documents + /// Create an UPDATE statement to remove fields from documents /// The table to be updated /// A query to remove fields from documents [] @@ -292,14 +286,14 @@ module Results = let fromData<'T> row : 'T = fromDocument "data" row - /// Extract a count from the column it + /// Extract a count from the column it /// A row reader set to the row with the count to retrieve /// The count from the row [] let toCount (row: RowReader) = row.int "it" - /// Extract a true/false value from the column it + /// Extract a true/false value from the column it /// A row reader set to the row with the true/false value to retrieve /// The true/false value from the row [] diff --git a/src/Postgres/WithProps.fs b/src/Postgres/WithProps.fs index f0f7c39..2a06c96 100644 --- a/src/Postgres/WithProps.fs +++ b/src/Postgres/WithProps.fs @@ -1,4 +1,4 @@ -/// Versions of queries that accept SqlProps as the last parameter +/// Versions of queries that accept SqlProps as the last parameter module BitBadger.Documents.Postgres.WithProps open System.IO diff --git a/src/Tests.CSharp/PostgresCSharpExtensionTests.cs b/src/Tests.CSharp/PostgresCSharpExtensionTests.cs index 65b0743..c0398ed 100644 --- a/src/Tests.CSharp/PostgresCSharpExtensionTests.cs +++ b/src/Tests.CSharp/PostgresCSharpExtensionTests.cs @@ -9,11 +9,14 @@ using static CommonExtensionsAndTypesForNpgsqlFSharp; using static Runner; /// -/// C# tests for the extensions on the NpgsqlConnection type +/// C# tests for the extensions on the NpgsqlConnection type /// public class PostgresCSharpExtensionTests { - private static Task LoadDocs() => PostgresCSharpTests.LoadDocs(); + private static async Task LoadDocs(NpgsqlConnection conn) + { + foreach (var doc in JsonDocument.TestDocuments) await conn.Insert(SqliteDb.TableName, doc); + } /// /// Create a connection to the throwaway database @@ -27,6 +30,93 @@ public class PostgresCSharpExtensionTests return conn; } + /// Set up a stream writer for a test + private static StreamWriter WriteStream(Stream stream) + { + StreamWriter writer = new(stream); + writer.AutoFlush = true; + return writer; + } + + /// Get the text of the given stream + private static string StreamText(Stream stream) + { + stream.Position = 0L; + using StreamReader reader = new(stream); + return reader.ReadToEnd(); + } + + /// Verify a JSON array begins with "[" and ends with "]" + private static void VerifyBeginEnd(string json) + { + Expect.stringStarts(json, "[", "The array should have started with `[`"); + Expect.stringEnds(json, "]", "The array should have ended with `]`"); + } + + /// Verify the presence of a document by its ID + private static void VerifyDocById(string json, string docId) + { + Expect.stringContains(json, $"{{\"Id\": \"{docId}\",", $"Document `{docId}` not present"); + } + + /// Verify the presence of a document by its ID + private static void VerifySingleById(string json, string docId) + { + VerifyBeginEnd(json); + Expect.stringContains(json, $"{{\"Id\": \"{docId}\",", $"Document `{docId}` not present"); + } + + /// Verify the presence of any of the given document IDs in the given JSON + private static void VerifyAnyById(string json, IEnumerable docIds) + { + var theIds = docIds.ToList(); + if (theIds.Any(it => json.Contains($"{{\"Id\": \"{it}\""))) return; + var ids = string.Join(", ", theIds); + Expect.isTrue(false, $"Could not find any of IDs {ids} in {json}"); + } + + /// Verify the JSON for `all` returning data + private static void VerifyAllData(string json) + { + VerifyBeginEnd(json); + IEnumerable ids = ["one", "two", "three", "four", "five"]; + foreach (var docId in ids) VerifyDocById(json, docId); + } + + /// Verify an empty JSON array + private static void VerifyEmpty(string json) + { + Expect.equal(json, "[]", "There should be no documents returned"); + } + + /// Verify an empty JSON document + private static void VerifyNoDoc(string json) + { + Expect.equal(json, "{}", "There should be no document returned"); + } + + /// Verify the JSON for an ordered query + private static void VerifyExpectedOrder(string json, string idFirst, string idSecond, string? idThird = null, + string? idFourth = null, string? idFifth = null) + { + var firstIdx = json.IndexOf($"{{\"Id\": \"{idFirst}\",", StringComparison.Ordinal); + var secondIdx = json.IndexOf($"{{\"Id\": \"{idSecond}\",", StringComparison.Ordinal); + VerifyBeginEnd(json); + Expect.isGreaterThan(secondIdx, firstIdx, $"`{idSecond}` should have been after `{idFirst}`"); + if (idThird is null) return; + + var thirdIdx = json.IndexOf($"{{\"Id\": \"{idThird}\",", StringComparison.Ordinal); + Expect.isGreaterThan(thirdIdx, secondIdx, $"`{idThird}` should have been after `{idSecond}`"); + if (idFourth is null) return; + + var fourthIdx = json.IndexOf($"{{\"Id\": \"{idFourth}\",", StringComparison.Ordinal); + Expect.isGreaterThan(fourthIdx, thirdIdx, $"`{idFourth}` should have been after `{idThird}`"); + if (idFifth is null) return; + + var fifthIdx = json.IndexOf($"{{\"Id\": \"{idFifth}\",", StringComparison.Ordinal); + Expect.isGreaterThan(fifthIdx, fourthIdx, $"`{idFifth}` should have been after `{idFourth}`"); + } + /// /// Integration tests for the SQLite extension methods /// @@ -39,7 +129,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.CustomList(Query.Find(PostgresDb.TableName), Parameters.None, Results.FromData); @@ -49,7 +139,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.CustomList( $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath", @@ -58,13 +148,72 @@ public class PostgresCSharpExtensionTests Expect.isEmpty(docs, "There should have been no documents returned"); }) ]), + TestList("CustomJsonArray", + [ + TestCase("succeeds when data is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + var docs = await conn.CustomJsonArray(Query.Find(PostgresDb.TableName), Parameters.None, + Results.JsonFromData); + Expect.stringStarts(docs, "[", "The JSON array should have started with `[`"); + Expect.hasLength(docs.Split("{\"Id\":"), 6, "There should have been 5 documents returned"); + Expect.stringEnds(docs, "]", "The JSON array should have ended with `[`"); + }), + TestCase("succeeds when data is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + var docs = await conn.CustomJsonArray( + $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath", + [Tuple.Create("@path", Sql.@string("$.NumValue ? (@ > 100)"))], Results.JsonFromData); + Expect.equal(docs, "[]", "There should have been no documents returned"); + }) + ]), + TestList("WriteJsonArray", + [ + TestCase("succeeds when data is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteCustomJsonArray(Query.Find(PostgresDb.TableName), Parameters.None, writer, + Results.JsonFromData); + + var docs = StreamText(stream); + Expect.stringStarts(docs, "[", "The JSON array should have started with `[`"); + Expect.hasLength(docs.Split("{\"Id\":"), 6, "There should have been 5 documents returned"); + Expect.stringEnds(docs, "]", "The JSON array should have ended with `[`"); + }), + TestCase("succeeds when data is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteCustomJsonArray( + $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath", + [Tuple.Create("@path", Sql.@string("$.NumValue ? (@ > 100)"))], writer, Results.JsonFromData); + + Expect.equal(StreamText(stream), "[]", "There should have been no documents returned"); + }) + ]), TestList("CustomSingle", [ TestCase("succeeds when a row is found", async () => { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.CustomSingle($"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id", [Tuple.Create("@id", Sql.@string("one"))], Results.FromData); @@ -75,23 +224,50 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.CustomSingle($"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id", [Tuple.Create("@id", Sql.@string("eighty"))], Results.FromData); Expect.isNull(doc, "There should not have been a document returned"); }) ]), + TestList("CustomJsonSingle", + [ + TestCase("succeeds when a row is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + var doc = await conn.CustomJsonSingle( + $"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id", + [Tuple.Create("@id", Sql.@string("one"))], Results.JsonFromData); + Expect.stringStarts(doc, "{", "The document should have started with an open brace"); + Expect.stringContains(doc, "\"Id\": \"one\"", "An incorrect document was returned"); + Expect.stringEnds(doc, "}", "The document should have ended with a closing brace"); + }), + TestCase("succeeds when a row is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + var doc = await conn.CustomJsonSingle( + $"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id", + [Tuple.Create("@id", Sql.@string("eighty"))], Results.JsonFromData); + Expect.equal(doc, "{}", "There should not have been a document returned"); + }) + ]), TestList("CustomNonQuery", [ TestCase("succeeds when operating on data", async () => { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.CustomNonQuery($"DELETE FROM {PostgresDb.TableName}", Parameters.None); - + var remaining = await conn.CountAll(PostgresDb.TableName); Expect.equal(remaining, 0, "There should be no documents remaining in the table"); }), @@ -99,11 +275,11 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.CustomNonQuery($"DELETE FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath", [Tuple.Create("@path", Sql.@string("$.NumValue ? (@ > 100)"))]); - + var remaining = await conn.CountAll(PostgresDb.TableName); Expect.equal(remaining, 5, "There should be 5 documents remaining in the table"); }) @@ -183,7 +359,7 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); var before = await conn.CountAll(PostgresDb.TableName); Expect.equal(before, 0, "There should be no documents in the table"); - + await conn.Insert(PostgresDb.TableName, new JsonDocument { Id = "turkey", Sub = new() { Foo = "gobble", Bar = "gobble" } }); var after = await conn.FindAll(PostgresDb.TableName); @@ -213,7 +389,7 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); var before = await conn.CountAll(PostgresDb.TableName); Expect.equal(before, 0, "There should be no documents in the table"); - + await conn.Save(PostgresDb.TableName, new JsonDocument { Id = "test", Sub = new() { Foo = "a", Bar = "b" } }); var after = await conn.FindAll(PostgresDb.TableName); @@ -229,7 +405,7 @@ public class PostgresCSharpExtensionTests var before = await conn.FindById(PostgresDb.TableName, "test"); Expect.isNotNull(before, "There should have been a document returned"); Expect.equal(before.Id, "test", "The document is not correct"); - + await conn.Save(PostgresDb.TableName, new JsonDocument { Id = "test", Sub = new() { Foo = "c", Bar = "d" } }); var after = await conn.FindById(PostgresDb.TableName, "test"); @@ -241,8 +417,8 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); - + await LoadDocs(conn); + var theCount = await conn.CountAll(PostgresDb.TableName); Expect.equal(theCount, 5, "There should have been 5 matching documents"); }), @@ -250,7 +426,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var theCount = await conn.CountByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")]); @@ -260,7 +436,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var theCount = await conn.CountByContains(PostgresDb.TableName, new { Value = "purple" }); Expect.equal(theCount, 2, "There should have been 2 matching documents"); @@ -269,7 +445,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var theCount = await conn.CountByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ > 5)"); Expect.equal(theCount, 3, "There should have been 3 matching documents"); @@ -280,7 +456,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsById(PostgresDb.TableName, "three"); Expect.isTrue(exists, "There should have been an existing document"); @@ -289,7 +465,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsById(PostgresDb.TableName, "seven"); Expect.isFalse(exists, "There should not have been an existing document"); @@ -301,7 +477,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Exists("Sub")]); Expect.isTrue(exists, "There should have been existing documents"); @@ -310,7 +486,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("NumValue", "six")]); @@ -323,7 +499,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsByContains(PostgresDb.TableName, new { NumValue = 10 }); Expect.isTrue(exists, "There should have been existing documents"); @@ -332,7 +508,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsByContains(PostgresDb.TableName, new { Nothing = "none" }); Expect.isFalse(exists, "There should not have been any existing documents"); @@ -344,7 +520,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsByJsonPath(PostgresDb.TableName, "$.Sub.Foo ? (@ == \"green\")"); Expect.isTrue(exists, "There should have been existing documents"); @@ -353,7 +529,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ > 1000)"); Expect.isFalse(exists, "There should not have been any existing documents"); @@ -387,7 +563,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var results = await conn.FindAllOrdered(PostgresDb.TableName, [Field.Named("n:NumValue")]); @@ -399,7 +575,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var results = await conn.FindAllOrdered(PostgresDb.TableName, [Field.Named("n:NumValue DESC")]); @@ -411,7 +587,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var results = await conn.FindAllOrdered(PostgresDb.TableName, [Field.Named("Id DESC")]); Expect.hasLength(results, 5, "There should have been 5 documents returned"); @@ -425,7 +601,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindById(PostgresDb.TableName, "two"); Expect.isNotNull(doc, "There should have been a document returned"); @@ -435,7 +611,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindById(PostgresDb.TableName, "three hundred eighty-seven"); Expect.isNull(doc, "There should not have been a document returned"); @@ -447,7 +623,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "another")]); @@ -457,7 +633,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "mauve")]); @@ -470,7 +646,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByFieldsOrdered(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")], [Field.Named("Id")]); @@ -482,7 +658,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByFieldsOrdered(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]); @@ -497,7 +673,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByContains(PostgresDb.TableName, new { Sub = new { Foo = "green" } }); @@ -507,7 +683,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByContains(PostgresDb.TableName, new { Value = "mauve" }); Expect.isEmpty(docs, "There should have been no documents returned"); @@ -520,7 +696,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, [Field.Named("Sub.Bar")]); @@ -532,7 +708,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, [Field.Named("Sub.Bar DESC")]); @@ -547,7 +723,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ < 15)"); Expect.equal(docs.Count, 3, "There should have been 3 documents returned"); @@ -556,7 +732,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ < 0)"); Expect.isEmpty(docs, "There should have been no documents returned"); @@ -569,7 +745,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByJsonPathOrdered(PostgresDb.TableName, "$.NumValue ? (@ < 15)", [Field.Named("n:NumValue")]); @@ -581,7 +757,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByJsonPathOrdered(PostgresDb.TableName, "$.NumValue ? (@ < 15)", [Field.Named("n:NumValue DESC")]); @@ -596,7 +772,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "another")]); @@ -607,7 +783,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")]); @@ -618,7 +794,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "absent")]); @@ -631,7 +807,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByFieldsOrdered(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")], [Field.Named("Id")]); @@ -642,7 +818,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByFieldsOrdered(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]); @@ -656,7 +832,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByContains(PostgresDb.TableName, new { Value = "another" }); Expect.isNotNull(doc, "There should have been a document returned"); @@ -666,7 +842,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByContains(PostgresDb.TableName, new { Sub = new { Foo = "green" } }); @@ -677,7 +853,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByContains(PostgresDb.TableName, new { Value = "absent" }); Expect.isNull(doc, "There should not have been a document returned"); @@ -689,7 +865,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, [Field.Named("Value")]); @@ -700,7 +876,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, [Field.Named("Value DESC")]); @@ -714,7 +890,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByJsonPath(PostgresDb.TableName, "$.Value ? (@ == \"FIRST!\")"); @@ -725,7 +901,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByJsonPath(PostgresDb.TableName, "$.Sub.Foo ? (@ == \"green\")"); @@ -736,7 +912,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByJsonPath(PostgresDb.TableName, "$.Id ? (@ == \"nope\")"); Expect.isNull(doc, "There should not have been a document returned"); @@ -748,7 +924,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByJsonPathOrdered(PostgresDb.TableName, "$.Sub.Foo ? (@ == \"green\")", [Field.Named("Sub.Bar")]); @@ -759,7 +935,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByJsonPathOrdered(PostgresDb.TableName, "$.Sub.Foo ? (@ == \"green\")", [Field.Named("Sub.Bar DESC")]); @@ -767,13 +943,848 @@ public class PostgresCSharpExtensionTests Expect.equal("four", doc.Id, "An incorrect document was returned"); }) ]), + TestList("JsonAll", + [ + TestCase("succeeds when there is data", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyAllData(await conn.JsonAll(PostgresDb.TableName)); + }), + TestCase("succeeds when there is no data", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + VerifyEmpty(await conn.JsonAll(PostgresDb.TableName)); + }) + ]), + TestList("JsonAllOrdered", + [ + TestCase("succeeds when ordering numerically", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyExpectedOrder(await conn.JsonAllOrdered(PostgresDb.TableName, [Field.Named("n:NumValue")]), + "one", "three", "two", "four", "five"); + }), + TestCase("succeeds when ordering numerically descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyExpectedOrder(await conn.JsonAllOrdered(PostgresDb.TableName, [Field.Named("n:NumValue DESC")]), + "five", "four", "two", "three", "one"); + }), + TestCase("succeeds when ordering alphabetically", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyExpectedOrder(await conn.JsonAllOrdered(PostgresDb.TableName, [Field.Named("Id DESC")]), + "two", "three", "one", "four", "five"); + }) + ]), + TestList("JsonById", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + var json = await conn.JsonById(PostgresDb.TableName, "two"); + Expect.stringStarts(json, """{"Id": "two",""", "An incorrect document was returned"); + Expect.stringEnds(json, "}", "JSON should have ended with this document"); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyNoDoc(await conn.JsonById(PostgresDb.TableName, "three hundred eighty-seven")); + }) + ]), + TestList("JsonByFields", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifySingleById( + await conn.JsonByFields(PostgresDb.TableName, FieldMatch.All, + [Field.In("Value", ["purple", "blue"]), Field.Exists("Sub")]), + "four"); + }), + TestCase("succeeds when documents are found using IN with numeric field", async() => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifySingleById( + await conn.JsonByFields(PostgresDb.TableName, FieldMatch.All, [Field.In("NumValue", [2, 4, 6, 8])]), + "three"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyEmpty(await conn.JsonByFields(PostgresDb.TableName, FieldMatch.All, + [Field.Equal("Value", "mauve"), Field.NotEqual("NumValue", 40)])); + }), + TestCase("succeeds for InArray when matching documents exist", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await conn.EnsureTable(PostgresDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await conn.Insert(PostgresDb.TableName, doc); + + var json = await conn.JsonByFields(PostgresDb.TableName, FieldMatch.All, + [Field.InArray("Values", PostgresDb.TableName, ["c"])]); + VerifyBeginEnd(json); + VerifyDocById(json, "first"); + VerifyDocById(json, "second"); + }), + TestCase("succeeds for InArray when no matching documents exist", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await conn.EnsureTable(PostgresDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await conn.Insert(PostgresDb.TableName, doc); + VerifyEmpty(await conn.JsonByFields(PostgresDb.TableName, FieldMatch.All, + [Field.InArray("Values", PostgresDb.TableName, ["j"])])); + }) + ]), + TestList("JsonByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyExpectedOrder( + await conn.JsonByFieldsOrdered(PostgresDb.TableName, FieldMatch.All, + [Field.Equal("Value", "purple")], [Field.Named("Id")]), + "five", "four"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyExpectedOrder( + await conn.JsonByFieldsOrdered(PostgresDb.TableName, FieldMatch.All, + [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]), + "four", "five"); + }) + ]), + TestList("JsonByContains", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + var json = await conn.JsonByContains(PostgresDb.TableName, new { Sub = new { Foo = "green" } }); + VerifyBeginEnd(json); + VerifyDocById(json, "two"); + VerifyDocById(json, "four"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyEmpty(await conn.JsonByContains(PostgresDb.TableName, new { Value = "mauve" })); + }) + ]), + TestList("JsonByContainsOrdered", + [ + // Id = two, Sub.Bar = blue; Id = four, Sub.Bar = red + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyExpectedOrder( + await conn.JsonByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, + [Field.Named("Sub.Bar")]), + "two", "four"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyExpectedOrder( + await conn.JsonByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, + [Field.Named("Sub.Bar DESC")]), + "four", "two"); + }) + ]), + TestList("JsonByJsonPath", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + var json = await conn.JsonByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ < 15)"); + VerifyBeginEnd(json); + VerifyDocById(json, "one"); + VerifyDocById(json, "two"); + VerifyDocById(json, "three"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyEmpty(await conn.JsonByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ < 0)")); + }) + ]), + TestList("JsonByJsonPathOrdered", + [ + // Id = one, NumValue = 0; Id = two, NumValue = 10; Id = three, NumValue = 4 + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyExpectedOrder( + await conn.JsonByJsonPathOrdered(PostgresDb.TableName, "$.NumValue ? (@ < 15)", + [Field.Named("n:NumValue")]), + "one", "three", "two"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyExpectedOrder( + await conn.JsonByJsonPathOrdered(PostgresDb.TableName, "$.NumValue ? (@ < 15)", + [Field.Named("n:NumValue DESC")]), + "two", "three", "one"); + }) + ]), + TestList("JsonFirstByFields", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyDocById( + await conn.JsonFirstByFields(PostgresDb.TableName, FieldMatch.Any, + [Field.Equal("Value", "another")]), + "two"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyAnyById( + await conn.JsonFirstByFields(PostgresDb.TableName, FieldMatch.Any, + [Field.Equal("Value", "purple")]), + ["five", "four"]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyNoDoc(await conn.JsonFirstByFields(PostgresDb.TableName, FieldMatch.Any, + [Field.Equal("Value", "absent")])); + }) + ]), + TestList("JsonFirstByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyDocById( + await conn.JsonFirstByFieldsOrdered(PostgresDb.TableName, FieldMatch.Any, + [Field.Equal("Value", "purple")], [Field.Named("Id")]), + "five"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyDocById( + await conn.JsonFirstByFieldsOrdered(PostgresDb.TableName, FieldMatch.Any, + [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]), + "four"); + }) + ]), + TestList("JsonFirstByContains", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyDocById(await conn.JsonFirstByContains(PostgresDb.TableName, new { Value = "another" }), "two"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyAnyById(await conn.JsonFirstByContains(PostgresDb.TableName, new { Sub = new { Foo = "green" } }), + ["two", "four"]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyNoDoc(await conn.JsonFirstByContains(PostgresDb.TableName, new { Value = "absent" })); + }) + ]), + TestList("JsonFirstByContainsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyDocById( + await conn.JsonFirstByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, + [Field.Named("Value")]), + "two"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyDocById( + await conn.JsonFirstByContainsOrdered(PostgresDb.TableName, new { Sub = new { Foo = "green" } }, + [Field.Named("Value DESC")]), + "four"); + }) + ]), + TestList("JsonFirstByJsonPath", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyDocById(await conn.JsonFirstByJsonPath(PostgresDb.TableName, """$.Value ? (@ == "FIRST!")"""), + "one"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyAnyById(await conn.JsonFirstByJsonPath(PostgresDb.TableName, """$.Sub.Foo ? (@ == "green")"""), + ["two", "four"]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyNoDoc(await conn.JsonFirstByJsonPath(PostgresDb.TableName, """$.Id ? (@ == "nope")""")); + }) + ]), + TestList("JsonFirstByJsonPathOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyDocById( + await conn.JsonFirstByJsonPathOrdered(PostgresDb.TableName, """$.Sub.Foo ? (@ == "green")""", + [Field.Named("Sub.Bar")]), + "two"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + VerifyDocById( + await conn.JsonFirstByJsonPathOrdered(PostgresDb.TableName, """$.Sub.Foo ? (@ == "green")""", + [Field.Named("Sub.Bar DESC")]), + "four"); + }) + ]), + TestList("WriteJsonAll", + [ + TestCase("succeeds when there is data", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonAll(PostgresDb.TableName, writer); + VerifyAllData(StreamText(stream)); + }), + TestCase("succeeds when there is no data", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonAll(PostgresDb.TableName, writer); + VerifyEmpty(StreamText(stream)); + }) + ]), + TestList("WriteJsonAllOrdered", + [ + TestCase("succeeds when ordering numerically", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonAllOrdered(PostgresDb.TableName, writer, [Field.Named("n:NumValue")]); + VerifyExpectedOrder(StreamText(stream), "one", "three", "two", "four", "five"); + }), + TestCase("succeeds when ordering numerically descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonAllOrdered(PostgresDb.TableName, writer, [Field.Named("n:NumValue DESC")]); + VerifyExpectedOrder(StreamText(stream), "five", "four", "two", "three", "one"); + }), + TestCase("succeeds when ordering alphabetically", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonAllOrdered(PostgresDb.TableName, writer, [Field.Named("Id DESC")]); + VerifyExpectedOrder(StreamText(stream), "two", "three", "one", "four", "five"); + }) + ]), + TestList("WriteJsonById", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonById(PostgresDb.TableName, writer, "two"); + var json = StreamText(stream); + Expect.stringStarts(json, """{"Id": "two",""", "An incorrect document was returned"); + Expect.stringEnds(json, "}", "JSON should have ended with this document"); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonById(PostgresDb.TableName, writer, "three hundred eighty-seven"); + VerifyNoDoc(StreamText(stream)); + }) + ]), + TestList("WriteJsonByFields", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.In("Value", ["purple", "blue"]), Field.Exists("Sub")]); + VerifySingleById(StreamText(stream), "four"); + }), + TestCase("succeeds when documents are found using IN with numeric field", async() => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.In("NumValue", [2, 4, 6, 8])]); + VerifySingleById(StreamText(stream), "three"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.Equal("Value", "mauve"), Field.NotEqual("NumValue", 40)]); + VerifyEmpty(StreamText(stream)); + }), + TestCase("succeeds for InArray when matching documents exist", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await conn.EnsureTable(PostgresDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await conn.Insert(PostgresDb.TableName, doc); + + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", PostgresDb.TableName, ["c"])]); + var json = StreamText(stream); + VerifyBeginEnd(json); + VerifyDocById(json, "first"); + VerifyDocById(json, "second"); + }), + TestCase("succeeds for InArray when no matching documents exist", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await conn.EnsureTable(PostgresDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await conn.Insert(PostgresDb.TableName, doc); + + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", PostgresDb.TableName, ["j"])]); + VerifyEmpty(StreamText(stream)); + }) + ]), + TestList("WriteJsonByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.All, + [Field.Equal("Value", "purple")], [Field.Named("Id")]); + VerifyExpectedOrder(StreamText(stream), "five", "four"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.All, + [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]); + VerifyExpectedOrder(StreamText(stream), "four", "five"); + }) + ]), + TestList("WriteJsonByContains", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByContains(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }); + var json = StreamText(stream); + VerifyBeginEnd(json); + VerifyDocById(json, "two"); + VerifyDocById(json, "four"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByContains(PostgresDb.TableName, writer, new { Value = "mauve" }); + VerifyEmpty(StreamText(stream)); + }) + ]), + TestList("WriteJsonByContainsOrdered", + [ + // Id = two, Sub.Bar = blue; Id = four, Sub.Bar = red + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByContainsOrdered(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }, + [Field.Named("Sub.Bar")]); + VerifyExpectedOrder(StreamText(stream), "two", "four"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByContainsOrdered(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }, + [Field.Named("Sub.Bar DESC")]); + VerifyExpectedOrder(StreamText(stream), "four", "two"); + }) + ]), + TestList("WriteJsonByJsonPath", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByJsonPath(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)"); + var json = StreamText(stream); + VerifyBeginEnd(json); + VerifyDocById(json, "one"); + VerifyDocById(json, "two"); + VerifyDocById(json, "three"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByJsonPath(PostgresDb.TableName, writer, "$.NumValue ? (@ < 0)"); + VerifyEmpty(StreamText(stream)); + }) + ]), + TestList("WriteJsonByJsonPathOrdered", + [ + // Id = one, NumValue = 0; Id = two, NumValue = 10; Id = three, NumValue = 4 + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByJsonPathOrdered(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)", + [Field.Named("n:NumValue")]); + VerifyExpectedOrder(StreamText(stream), "one", "three", "two"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByJsonPathOrdered(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)", + [Field.Named("n:NumValue DESC")]); + VerifyExpectedOrder(StreamText(stream), "two", "three", "one"); + }) + ]), + TestList("WriteJsonFirstByFields", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "another")]); + VerifyDocById(StreamText(stream), "two"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "purple")]); + VerifyAnyById(StreamText(stream), ["five", "four"]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "absent")]); + VerifyNoDoc(StreamText(stream)); + }) + ]), + TestList("WriteJsonFirstByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonFirstByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "purple")], [Field.Named("Id")]); + VerifyDocById(StreamText(stream), "five"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonFirstByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]); + VerifyDocById(StreamText(stream), "four"); + }) + ]), + TestList("WriteJsonFirstByContains", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonFirstByContains(PostgresDb.TableName, writer, new { Value = "another" }); + VerifyDocById(StreamText(stream), "two"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonFirstByContains(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }); + VerifyAnyById(StreamText(stream), ["two", "four"]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonFirstByContains(PostgresDb.TableName, writer, new { Value = "absent" }); + VerifyNoDoc(StreamText(stream)); + }) + ]), + TestList("WriteJsonFirstByContainsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonFirstByContainsOrdered(PostgresDb.TableName, writer, + new { Sub = new { Foo = "green" } }, [Field.Named("Value")]); + VerifyDocById(StreamText(stream), "two"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonFirstByContainsOrdered(PostgresDb.TableName, writer, + new { Sub = new { Foo = "green" } }, [Field.Named("Value DESC")]); + VerifyDocById(StreamText(stream), "four"); + }) + ]), + TestList("WriteJsonFirstByJsonPath", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonFirstByJsonPath(PostgresDb.TableName, writer, """$.Value ? (@ == "FIRST!")"""); + VerifyDocById(StreamText(stream), "one"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonFirstByJsonPath(PostgresDb.TableName, writer, """$.Sub.Foo ? (@ == "green")"""); + VerifyAnyById(StreamText(stream), ["two", "four"]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonFirstByJsonPath(PostgresDb.TableName, writer, """$.Id ? (@ == "nope")"""); + VerifyNoDoc(StreamText(stream)); + }) + ]), + TestList("WriteJsonFirstByJsonPathOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonFirstByJsonPathOrdered(PostgresDb.TableName, writer, + """$.Sub.Foo ? (@ == "green")""", [Field.Named("Sub.Bar")]); + VerifyDocById(StreamText(stream), "two"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = PostgresDb.BuildDb(); + await using var conn = MkConn(db); + await LoadDocs(conn); + await using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonFirstByJsonPathOrdered(PostgresDb.TableName, writer, + """$.Sub.Foo ? (@ == "green")""", [Field.Named("Sub.Bar DESC")]); + VerifyDocById(StreamText(stream), "four"); + }) + ]), TestList("UpdateById", [ TestCase("succeeds when a document is updated", async () => { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.UpdateById(PostgresDb.TableName, "one", new JsonDocument { Id = "one", Sub = new() { Foo = "blue", Bar = "red" } }); @@ -792,7 +1803,7 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); var before = await conn.CountAll(PostgresDb.TableName); Expect.equal(before, 0, "There should have been no documents returned"); - + // This not raising an exception is the test await conn.UpdateById(PostgresDb.TableName, "test", new JsonDocument { Id = "x", Sub = new() { Foo = "blue", Bar = "red" } }); @@ -804,7 +1815,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.UpdateByFunc(PostgresDb.TableName, doc => doc.Id, new JsonDocument { Id = "one", Value = "le un", NumValue = 1 }); @@ -821,7 +1832,7 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); var before = await conn.CountAll(PostgresDb.TableName); Expect.equal(before, 0, "There should have been no documents returned"); - + // This not raising an exception is the test await conn.UpdateByFunc(PostgresDb.TableName, doc => doc.Id, new JsonDocument { Id = "one", Value = "le un", NumValue = 1 }); @@ -833,7 +1844,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.PatchById(PostgresDb.TableName, "one", new { NumValue = 44 }); var after = await conn.FindById(PostgresDb.TableName, "one"); @@ -846,7 +1857,7 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); var before = await conn.CountAll(PostgresDb.TableName); Expect.equal(before, 0, "There should have been no documents returned"); - + // This not raising an exception is the test await conn.PatchById(PostgresDb.TableName, "test", new { Foo = "green" }); }) @@ -857,7 +1868,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.PatchByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")], new { NumValue = 77 }); @@ -871,7 +1882,7 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); var before = await conn.CountAll(PostgresDb.TableName); Expect.equal(before, 0, "There should have been no documents returned"); - + // This not raising an exception is the test await conn.PatchByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "burgundy")], new { Foo = "green" }); @@ -883,7 +1894,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.PatchByContains(PostgresDb.TableName, new { Value = "purple" }, new { NumValue = 77 }); var after = await conn.CountByContains(PostgresDb.TableName, new { NumValue = 77 }); @@ -895,7 +1906,7 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); var before = await conn.CountAll(PostgresDb.TableName); Expect.equal(before, 0, "There should have been no documents returned"); - + // This not raising an exception is the test await conn.PatchByContains(PostgresDb.TableName, new { Value = "burgundy" }, new { Foo = "green" }); }) @@ -906,7 +1917,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.PatchByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ > 10)", new { NumValue = 1000 }); var after = await conn.CountByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ > 999)"); @@ -918,7 +1929,7 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); var before = await conn.CountAll(PostgresDb.TableName); Expect.equal(before, 0, "There should have been no documents returned"); - + // This not raising an exception is the test await conn.PatchByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ < 0)", new { Foo = "green" }); }) @@ -929,7 +1940,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.RemoveFieldsById(PostgresDb.TableName, "two", ["Sub", "Value"]); var updated = await Find.ById(PostgresDb.TableName, "two"); @@ -941,7 +1952,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.RemoveFieldsById(PostgresDb.TableName, "two", ["Sub"]); var updated = await Find.ById(PostgresDb.TableName, "two"); @@ -953,8 +1964,8 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); - + await LoadDocs(conn); + // This not raising an exception is the test await conn.RemoveFieldsById(PostgresDb.TableName, "two", ["AFieldThatIsNotThere"]); }), @@ -962,7 +1973,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - + // This not raising an exception is the test await conn.RemoveFieldsById(PostgresDb.TableName, "two", ["Value"]); }) @@ -973,7 +1984,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.RemoveFieldsByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("NumValue", "17")], ["Sub", "Value"]); @@ -986,7 +1997,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.RemoveFieldsByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("NumValue", "17")], ["Sub"]); @@ -999,8 +2010,8 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); - + await LoadDocs(conn); + // This not raising an exception is the test await conn.RemoveFieldsByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("NumValue", "17")], ["Nothing"]); @@ -1009,7 +2020,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - + // This not raising an exception is the test await conn.RemoveFieldsByFields(PostgresDb.TableName, FieldMatch.Any, [Field.NotEqual("Abracadabra", "apple")], ["Value"]); @@ -1021,7 +2032,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.RemoveFieldsByContains(PostgresDb.TableName, new { NumValue = 17 }, ["Sub", "Value"]); var updated = await Find.ById(PostgresDb.TableName, "four"); @@ -1033,7 +2044,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.RemoveFieldsByContains(PostgresDb.TableName, new { NumValue = 17 }, ["Sub"]); var updated = await Find.ById(PostgresDb.TableName, "four"); @@ -1045,8 +2056,8 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); - + await LoadDocs(conn); + // This not raising an exception is the test await conn.RemoveFieldsByContains(PostgresDb.TableName, new { NumValue = 17 }, ["Nothing"]); }), @@ -1054,7 +2065,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - + // This not raising an exception is the test await conn.RemoveFieldsByContains(PostgresDb.TableName, new { Abracadabra = "apple" }, ["Value"]); }) @@ -1065,7 +2076,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.RemoveFieldsByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ == 17)", ["Sub", "Value"]); var updated = await Find.ById(PostgresDb.TableName, "four"); @@ -1077,7 +2088,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.RemoveFieldsByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ == 17)", ["Sub"]); var updated = await Find.ById(PostgresDb.TableName, "four"); @@ -1089,8 +2100,8 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); - + await LoadDocs(conn); + // This not raising an exception is the test await conn.RemoveFieldsByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ == 17)", ["Nothing"]); }), @@ -1098,7 +2109,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - + // This not raising an exception is the test await conn.RemoveFieldsByJsonPath(PostgresDb.TableName, "$.Abracadabra ? (@ == \"apple\")", ["Value"]); }) @@ -1109,7 +2120,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteById(PostgresDb.TableName, "four"); var remaining = await conn.CountAll(PostgresDb.TableName); @@ -1119,7 +2130,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteById(PostgresDb.TableName, "thirty"); var remaining = await conn.CountAll(PostgresDb.TableName); @@ -1132,7 +2143,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteByFields(PostgresDb.TableName, FieldMatch.Any, [Field.NotEqual("Value", "purple")]); var remaining = await conn.CountAll(PostgresDb.TableName); @@ -1142,7 +2153,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteByFields(PostgresDb.TableName, FieldMatch.Any, [Field.Equal("Value", "crimson")]); var remaining = await conn.CountAll(PostgresDb.TableName); @@ -1155,7 +2166,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteByContains(PostgresDb.TableName, new { Value = "purple" }); var remaining = await conn.CountAll(PostgresDb.TableName); @@ -1165,7 +2176,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteByContains(PostgresDb.TableName, new { Value = "crimson" }); var remaining = await conn.CountAll(PostgresDb.TableName); @@ -1178,7 +2189,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteByJsonPath(PostgresDb.TableName, "$.Sub.Foo ? (@ == \"green\")"); var remaining = await conn.CountAll(PostgresDb.TableName); @@ -1188,7 +2199,7 @@ public class PostgresCSharpExtensionTests { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteByJsonPath(PostgresDb.TableName, "$.NumValue ? (@ > 100)"); var remaining = await conn.CountAll(PostgresDb.TableName); diff --git a/src/Tests.CSharp/PostgresCSharpTests.cs b/src/Tests.CSharp/PostgresCSharpTests.cs index efc7b90..df8ab6d 100644 --- a/src/Tests.CSharp/PostgresCSharpTests.cs +++ b/src/Tests.CSharp/PostgresCSharpTests.cs @@ -9,7 +9,7 @@ using static CommonExtensionsAndTypesForNpgsqlFSharp; using static Runner; /// -/// C# tests for the PostgreSQL implementation of BitBadger.Documents +/// C# tests for the PostgreSQL implementation of BitBadger.Documents /// public static class PostgresCSharpTests { @@ -323,21 +323,21 @@ public static class PostgresCSharpTests /// /// Add the test documents to the database /// - internal static async Task LoadDocs() + private static async Task LoadDocs() { foreach (var doc in JsonDocument.TestDocuments) await Document.Insert(SqliteDb.TableName, doc); } /// Set up a stream writer for a test - internal static StreamWriter WriteStream(Stream stream) + private static StreamWriter WriteStream(Stream stream) { StreamWriter writer = new(stream); writer.AutoFlush = true; return writer; } - /// Get the text of the given stream - internal static string StreamText(Stream stream) + /// Get the text of the given stream + private static string StreamText(Stream stream) { stream.Position = 0L; using StreamReader reader = new(stream); @@ -1298,20 +1298,20 @@ public static class PostgresCSharpTests Expect.stringEnds(json, "]", "The array should have ended with `]`"); } - /// Verify the presence of a document by its ID + /// Verify the presence of a document by its ID private static void VerifyDocById(string json, string docId) { Expect.stringContains(json, $"{{\"Id\": \"{docId}\",", $"Document `{docId}` not present"); } - /// Verify the presence of a document by its ID + /// Verify the presence of a document by its ID private static void VerifySingleById(string json, string docId) { VerifyBeginEnd(json); Expect.stringContains(json, $"{{\"Id\": \"{docId}\",", $"Document `{docId}` not present"); } - /// Verify the presence of any of the given document IDs in the given JSON + /// Verify the presence of any of the given document IDs in the given JSON private static void VerifyAnyById(string json, IEnumerable docIds) { var theIds = docIds.ToList(); @@ -1320,7 +1320,7 @@ public static class PostgresCSharpTests Expect.isTrue(false, $"Could not find any of IDs {ids} in {json}"); } - /// Verify the JSON for `all` returning data + /// Verify the JSON for `all` returning data private static void VerifyAllData(string json) { VerifyBeginEnd(json); @@ -1328,19 +1328,19 @@ public static class PostgresCSharpTests foreach (var docId in ids) VerifyDocById(json, docId); } - /// Verify an empty JSON array + /// Verify an empty JSON array private static void VerifyEmpty(string json) { Expect.equal(json, "[]", "There should be no documents returned"); } - /// Verify an empty JSON document + /// Verify an empty JSON document private static void VerifyNoDoc(string json) { Expect.equal(json, "{}", "There should be no document returned"); } - /// Verify the JSON for an ordered query + /// Verify the JSON for an ordered query private static void VerifyExpectedOrder(string json, string idFirst, string idSecond, string? idThird = null, string? idFourth = null, string? idFifth = null) { diff --git a/src/Tests/PostgresExtensionTests.fs b/src/Tests/PostgresExtensionTests.fs index 60fcf31..df675c1 100644 --- a/src/Tests/PostgresExtensionTests.fs +++ b/src/Tests/PostgresExtensionTests.fs @@ -1,5 +1,6 @@ module PostgresExtensionTests +open System.IO open BitBadger.Documents open BitBadger.Documents.Postgres open BitBadger.Documents.Tests @@ -13,6 +14,75 @@ let private mkConn (db: ThrowawayPostgresDb) = conn.Open() conn +/// Set up a stream writer for a test +let private writeStream (stream: Stream) = + let writer = new StreamWriter(stream) + writer.AutoFlush <- true + writer + +/// Get the text of the given stream +let private streamText (stream: Stream) = + stream.Position <- 0L + use reader = new StreamReader(stream) + reader.ReadToEnd() + +/// Verify a JSON array begins with "[" and ends with "]" +let private verifyBeginEnd json = + Expect.stringStarts json "[" "The array should have started with `[`" + Expect.stringEnds json "]" "The array should have ended with `]`" + +/// Verify the presence of a document by its ID +let private verifyDocById json docId = + Expect.stringContains json $"{{\"Id\": \"%s{docId}\"," $"Document `{docId}` not present" + +/// Verify the presence of a document by its ID +let private verifySingleById json docId = + verifyBeginEnd json + Expect.stringContains json $"{{\"Id\": \"%s{docId}\"," $"Document `{docId}` not present" + +/// Verify the presence of any of the given document IDs in the given JSON +let private verifyAnyById (json: string) (docIds: string list) = + match docIds |> List.tryFind (fun it -> json.Contains $"{{\"Id\": \"{it}\"") with + | Some _ -> () + | None -> + let ids = docIds |> String.concat ", " + Expect.isTrue false $"Could not find any of IDs {ids} in {json}" + +/// Verify the JSON for `all` returning data +let private verifyAllData json = + verifyBeginEnd json + [ "one"; "two"; "three"; "four"; "five" ] |> List.iter (verifyDocById json) + +/// Verify an empty JSON array +let private verifyEmpty json = + Expect.equal json "[]" "There should be no documents returned" + +/// Verify an empty JSON document +let private verifyNoDoc json = + Expect.equal json "{}" "There should be no document returned" + +/// Verify the JSON for an ordered query +let private verifyExpectedOrder (json: string) idFirst idSecond idThird idFourth idFifth = + let firstIdx = json.IndexOf $"{{\"Id\": \"%s{idFirst}\"," + let secondIdx = json.IndexOf $"{{\"Id\": \"%s{idSecond}\"," + verifyBeginEnd json + Expect.isGreaterThan secondIdx firstIdx $"`{idSecond}` should have been after `{idFirst}`" + match idThird with + | Some id3 -> + let thirdIdx = json.IndexOf $"{{\"Id\": \"%s{id3}\"," + Expect.isGreaterThan thirdIdx secondIdx $"`{id3}` should have been after `{idSecond}`" + match idFourth with + | Some id4 -> + let fourthIdx = json.IndexOf $"{{\"Id\": \"%s{id4}\"," + Expect.isGreaterThan fourthIdx thirdIdx $"`{id4}` should have been after `{id3}`" + match idFifth with + | Some id5 -> + let fifthIdx = json.IndexOf $"{{\"Id\": \"%s{id5}\"," + Expect.isGreaterThan fifthIdx fourthIdx $"`{id5}` should have been after `{id4}`" + | None -> () + | None -> () + | None -> () + /// Integration tests for the F# extensions on the NpgsqlConnection data type let integrationTests = let loadDocs (conn: NpgsqlConnection) = backgroundTask { @@ -41,6 +111,61 @@ let integrationTests = Expect.isEmpty docs "There should have been no documents returned" } ] + testList "customJsonArray" [ + testTask "succeeds when data is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + let! docs = conn.customJsonArray (Query.find PostgresDb.TableName) [] jsonFromData + Expect.stringStarts docs "[" "The JSON array should have started with `[`" + Expect.hasLength ((string docs).Split "{\"Id\":") 6 "There should have been 5 documents returned" + Expect.stringEnds docs "]" "The JSON array should have ended with `[`" + } + testTask "succeeds when data is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + let! docs = + conn.customJsonArray + $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath" + [ "@path", Sql.string "$.NumValue ? (@ > 100)" ] + jsonFromData + Expect.equal docs "[]" "There should have been no documents returned" + } + ] + testList "writeCustomJsonArray" [ + testTask "succeeds when data is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeCustomJsonArray (Query.find PostgresDb.TableName) [] writer jsonFromData + + let docs = streamText stream + Expect.stringStarts docs "[" "The JSON array should have started with `[`" + Expect.hasLength (docs.Split "{\"Id\":") 6 "There should have been 5 documents returned" + Expect.stringEnds docs "]" "The JSON array should have ended with `[`" + } + testTask "succeeds when data is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeCustomJsonArray + $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath" + [ "@path", Sql.string "$.NumValue ? (@ > 100)" ] + writer + jsonFromData + + Expect.equal (streamText stream) "[]" "There should have been no documents returned" + } + ] testList "customSingle" [ testTask "succeeds when a row is found" { use db = PostgresDb.BuildDb() @@ -68,6 +193,34 @@ let integrationTests = Expect.isNone doc "There should not have been a document returned" } ] + testList "customJsonSingle" [ + testTask "succeeds when a row is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + let! doc = + conn.customJsonSingle + $"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id" + [ "@id", Sql.string "one"] + jsonFromData + Expect.stringStarts doc "{" "The document should have started with an open brace" + Expect.stringContains doc "\"Id\": \"one\"" "An incorrect document was returned" + Expect.stringEnds doc "}" "The document should have ended with a closing brace" + } + testTask "succeeds when a row is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + let! doc = + conn.customJsonSingle + $"SELECT data FROM {PostgresDb.TableName} WHERE data ->> 'Id' = @id" + [ "@id", Sql.string "eighty" ] + jsonFromData + Expect.equal doc "{}" "There should not have been a document returned" + } + ] testList "customNonQuery" [ testTask "succeeds when operating on data" { use db = PostgresDb.BuildDb() @@ -106,7 +259,7 @@ let integrationTests = let keyExists () = conn.customScalar "SELECT EXISTS (SELECT 1 FROM pg_class WHERE relname = 'idx_ensured_key') AS it" [] toExists - + let! exists = tableExists () let! alsoExists = keyExists () Expect.isFalse exists "The table should not exist already" @@ -124,7 +277,7 @@ let integrationTests = let indexExists () = conn.customScalar "SELECT EXISTS (SELECT 1 FROM pg_class WHERE relname = 'idx_ensured_document') AS it" [] toExists - + let! exists = indexExists () Expect.isFalse exists "The index should not exist already" @@ -139,7 +292,7 @@ let integrationTests = let indexExists () = conn.customScalar "SELECT EXISTS (SELECT 1 FROM pg_class WHERE relname = 'idx_ensured_test') AS it" [] toExists - + let! exists = indexExists () Expect.isFalse exists "The index should not exist already" @@ -213,7 +366,7 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + let! theCount = conn.countByFields PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] Expect.equal theCount 2 "There should have been 2 matching documents" } @@ -332,7 +485,7 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + let! results = conn.findAllOrdered PostgresDb.TableName [ Field.Named "n:NumValue" ] Expect.hasLength results 5 "There should have been 5 documents returned" Expect.equal @@ -344,7 +497,7 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + let! results = conn.findAllOrdered PostgresDb.TableName [ Field.Named "n:NumValue DESC" ] Expect.hasLength results 5 "There should have been 5 documents returned" Expect.equal @@ -356,7 +509,7 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + let! results = conn.findAllOrdered PostgresDb.TableName [ Field.Named "Id DESC" ] Expect.hasLength results 5 "There should have been 5 documents returned" Expect.equal @@ -677,6 +830,769 @@ let integrationTests = Expect.equal "four" doc.Value.Id "An incorrect document was returned" } ] + testList "jsonAll" [ + testTask "succeeds when there is data" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonAll PostgresDb.TableName + verifyAllData json + } + testTask "succeeds when there is no data" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + let! json = conn.jsonAll PostgresDb.TableName + verifyEmpty json + } + ] + testList "jsonAllOrdered" [ + testTask "succeeds when ordering numerically" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonAllOrdered PostgresDb.TableName [ Field.Named "n:NumValue" ] + verifyExpectedOrder json "one" "three" (Some "two") (Some "four") (Some "five") + } + testTask "succeeds when ordering numerically descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonAllOrdered PostgresDb.TableName [ Field.Named "n:NumValue DESC" ] + verifyExpectedOrder json "five" "four" (Some "two") (Some "three") (Some "one") + } + testTask "succeeds when ordering alphabetically" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonAllOrdered PostgresDb.TableName [ Field.Named "Id DESC" ] + verifyExpectedOrder json "two" "three" (Some "one") (Some "four") (Some "five") + } + ] + testList "jsonById" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + let! json = conn.jsonById PostgresDb.TableName "two" + Expect.stringStarts json """{"Id": "two",""" "An incorrect document was returned" + Expect.stringEnds json "}" "JSON should have ended with this document" + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonById PostgresDb.TableName "three hundred eighty-seven" + verifyNoDoc json + } + ] + testList "jsonByFields" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonByFields + PostgresDb.TableName All [ Field.In "Value" [ "purple"; "blue" ]; Field.Exists "Sub" ] + verifySingleById json "four" + } + testTask "succeeds when documents are found using IN with numeric field" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonByFields PostgresDb.TableName All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] + verifySingleById json "three" + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonByFields + PostgresDb.TableName All [ Field.Equal "Value" "mauve"; Field.NotEqual "NumValue" 40 ] + verifyEmpty json + } + testTask "succeeds for InArray when matching documents exist" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! conn.ensureTable PostgresDb.TableName + for doc in ArrayDocument.TestDocuments do do! conn.insert PostgresDb.TableName doc + + let! json = + conn.jsonByFields PostgresDb.TableName All [ Field.InArray "Values" PostgresDb.TableName [ "c" ] ] + verifyBeginEnd json + verifyDocById json "first" + verifyDocById json "second" + } + testTask "succeeds for InArray when no matching documents exist" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! conn.ensureTable PostgresDb.TableName + for doc in ArrayDocument.TestDocuments do do! conn.insert PostgresDb.TableName doc + let! json = + conn.jsonByFields PostgresDb.TableName All [ Field.InArray "Values" PostgresDb.TableName [ "j" ] ] + verifyEmpty json + } + ] + testList "jsoByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonByFieldsOrdered + PostgresDb.TableName All [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + verifyExpectedOrder json "five" "four" None None None + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonByFieldsOrdered + PostgresDb.TableName All [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + verifyExpectedOrder json "four" "five" None None None + } + ] + testList "jsonByContains" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + let! json = conn.jsonByContains PostgresDb.TableName {| Sub = {| Foo = "green" |} |} + verifyBeginEnd json + verifyDocById json "two" + verifyDocById json "four" + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonByContains PostgresDb.TableName {| Value = "mauve" |} + verifyEmpty json + } + ] + testList "jsonByContainsOrdered" [ + // Id = two, Sub.Bar = blue; Id = four, Sub.Bar = red + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonByContainsOrdered + PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar" ] + verifyExpectedOrder json "two" "four" None None None + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonByContainsOrdered + PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar DESC" ] + verifyExpectedOrder json "four" "two" None None None + } + ] + testList "jsonByJsonPath" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + let! json = conn.jsonByJsonPath PostgresDb.TableName "$.NumValue ? (@ < 15)" + verifyBeginEnd json + verifyDocById json "one" + verifyDocById json "two" + verifyDocById json "three" + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonByJsonPath PostgresDb.TableName "$.NumValue ? (@ < 0)" + verifyEmpty json + } + ] + testList "jsonByJsonPathOrdered" [ + // Id = one, NumValue = 0; Id = two, NumValue = 10; Id = three, NumValue = 4 + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonByJsonPathOrdered PostgresDb.TableName "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue" ] + verifyExpectedOrder json "one" "three" (Some "two") None None + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonByJsonPathOrdered + PostgresDb.TableName "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue DESC" ] + verifyExpectedOrder json "two" "three" (Some "one") None None + } + ] + testList "jsonFirstByFields" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonFirstByFields PostgresDb.TableName Any [ Field.Equal "Value" "another" ] + verifyDocById json "two" + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonFirstByFields PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] + verifyAnyById json [ "five"; "four" ] + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonFirstByFields PostgresDb.TableName Any [ Field.Equal "Value" "absent" ] + verifyNoDoc json + } + ] + testList "jsonFirstByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonFirstByFieldsOrdered + PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + verifyDocById json "five" + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonFirstByFieldsOrdered + PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + verifyDocById json "four" + } + ] + testList "jsonFirstByContains" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonFirstByContains PostgresDb.TableName {| Value = "another" |} + verifyDocById json "two" + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonFirstByContains PostgresDb.TableName {| Sub = {| Foo = "green" |} |} + verifyAnyById json [ "two"; "four" ] + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonFirstByContains PostgresDb.TableName {| Value = "absent" |} + verifyNoDoc json + } + ] + testList "jsonFirstByContainsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonFirstByContainsOrdered + PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Value" ] + verifyDocById json "two" + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonFirstByContainsOrdered + PostgresDb.TableName {| Sub = {| Foo = "green" |} |} [ Field.Named "Value DESC" ] + verifyDocById json "four" + } + ] + testList "jsonFirstByJsonPath" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonFirstByJsonPath PostgresDb.TableName """$.Value ? (@ == "FIRST!")""" + verifyDocById json "one" + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonFirstByJsonPath PostgresDb.TableName """$.Sub.Foo ? (@ == "green")""" + verifyAnyById json [ "two"; "four" ] + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = conn.jsonFirstByJsonPath PostgresDb.TableName """$.Id ? (@ == "nope")""" + verifyNoDoc json + } + ] + testList "jsonFirstByJsonPathOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonFirstByJsonPathOrdered + PostgresDb.TableName """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar" ] + verifyDocById json "two" + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + let! json = + conn.jsonFirstByJsonPathOrdered + PostgresDb.TableName """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar DESC" ] + verifyDocById json "four" + } + ] + testList "writeJsonAll" [ + testTask "succeeds when there is data" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonAll PostgresDb.TableName writer + verifyAllData (streamText stream) + } + testTask "succeeds when there is no data" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonAll PostgresDb.TableName writer + verifyEmpty (streamText stream) + } + ] + testList "writeJsonAllOrdered" [ + testTask "succeeds when ordering numerically" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonAllOrdered PostgresDb.TableName writer [ Field.Named "n:NumValue" ] + verifyExpectedOrder (streamText stream) "one" "three" (Some "two") (Some "four") (Some "five") + } + testTask "succeeds when ordering numerically descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonAllOrdered PostgresDb.TableName writer [ Field.Named "n:NumValue DESC" ] + verifyExpectedOrder (streamText stream) "five" "four" (Some "two") (Some "three") (Some "one") + } + testTask "succeeds when ordering alphabetically" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonAllOrdered PostgresDb.TableName writer [ Field.Named "Id DESC" ] + verifyExpectedOrder (streamText stream) "two" "three" (Some "one") (Some "four") (Some "five") + } + ] + testList "writeJsonById" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonById PostgresDb.TableName writer "two" + let json = streamText stream + Expect.stringStarts json """{"Id": "two",""" "An incorrect document was returned" + Expect.stringEnds json "}" "JSON should have ended with this document" + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonById PostgresDb.TableName writer "three hundred eighty-seven" + verifyNoDoc (streamText stream) + } + ] + testList "writeJsonByFields" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByFields + PostgresDb.TableName writer All [ Field.In "Value" [ "purple"; "blue" ]; Field.Exists "Sub" ] + verifySingleById (streamText stream) "four" + } + testTask "succeeds when documents are found using IN with numeric field" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByFields PostgresDb.TableName writer All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] + verifySingleById (streamText stream) "three" + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByFields + PostgresDb.TableName writer All [ Field.Equal "Value" "mauve"; Field.NotEqual "NumValue" 40 ] + verifyEmpty (streamText stream) + } + testTask "succeeds for InArray when matching documents exist" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! conn.ensureTable PostgresDb.TableName + for doc in ArrayDocument.TestDocuments do do! conn.insert PostgresDb.TableName doc + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByFields + PostgresDb.TableName writer All [ Field.InArray "Values" PostgresDb.TableName [ "c" ] ] + let json = streamText stream + verifyBeginEnd json + verifyDocById json "first" + verifyDocById json "second" + } + testTask "succeeds for InArray when no matching documents exist" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! conn.ensureTable PostgresDb.TableName + for doc in ArrayDocument.TestDocuments do do! conn.insert PostgresDb.TableName doc + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByFields + PostgresDb.TableName writer All [ Field.InArray "Values" PostgresDb.TableName [ "j" ] ] + verifyEmpty (streamText stream) + } + ] + testList "writeJsonByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByFieldsOrdered + PostgresDb.TableName writer All [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + verifyExpectedOrder (streamText stream) "five" "four" None None None + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByFieldsOrdered + PostgresDb.TableName writer All [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + verifyExpectedOrder (streamText stream) "four" "five" None None None + } + ] + testList "writeJsonByContains" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByContains PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} + let json = streamText stream + verifyBeginEnd json + verifyDocById json "two" + verifyDocById json "four" + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByContains PostgresDb.TableName writer {| Value = "mauve" |} + verifyEmpty (streamText stream) + } + ] + testList "writeJsonByContainsOrdered" [ + // Id = two, Sub.Bar = blue; Id = four, Sub.Bar = red + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar" ] + verifyExpectedOrder (streamText stream) "two" "four" None None None + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar DESC" ] + verifyExpectedOrder (streamText stream) "four" "two" None None None + } + ] + testList "writeJsonByJsonPath" [ + testTask "succeeds when documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByJsonPath PostgresDb.TableName writer "$.NumValue ? (@ < 15)" + let json = streamText stream + verifyBeginEnd json + verifyDocById json "one" + verifyDocById json "two" + verifyDocById json "three" + } + testTask "succeeds when documents are not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByJsonPath PostgresDb.TableName writer "$.NumValue ? (@ < 0)" + verifyEmpty (streamText stream) + } + ] + testList "writeJsonByJsonPathOrdered" [ + // Id = one, NumValue = 0; Id = two, NumValue = 10; Id = three, NumValue = 4 + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByJsonPathOrdered + PostgresDb.TableName writer "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue" ] + verifyExpectedOrder (streamText stream) "one" "three" (Some "two") None None + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByJsonPathOrdered + PostgresDb.TableName writer "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue DESC" ] + verifyExpectedOrder (streamText stream) "two" "three" (Some "one") None None + } + ] + testList "writeJsonFirstByFields" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "another" ] + verifyDocById (streamText stream) "two" + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] + verifyAnyById (streamText stream) [ "five"; "four" ] + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "absent" ] + verifyNoDoc (streamText stream) + } + ] + testList "writeJsonFirstByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonFirstByFieldsOrdered + PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + verifyDocById (streamText stream) "five" + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonFirstByFieldsOrdered + PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + verifyDocById (streamText stream) "four" + } + ] + testList "writeJsonFirstByContains" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonFirstByContains PostgresDb.TableName writer {| Value = "another" |} + verifyDocById (streamText stream) "two" + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonFirstByContains PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} + verifyAnyById (streamText stream) [ "two"; "four" ] + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonFirstByContains PostgresDb.TableName writer {| Value = "absent" |} + verifyNoDoc (streamText stream) + } + ] + testList "writeJsonFirstByContainsOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonFirstByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Value" ] + verifyDocById (streamText stream) "two" + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonFirstByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Value DESC" ] + verifyDocById (streamText stream) "four" + } + ] + testList "writeJsonFirstByJsonPath" [ + testTask "succeeds when a document is found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonFirstByJsonPath PostgresDb.TableName writer """$.Value ? (@ == "FIRST!")""" + verifyDocById (streamText stream) "one" + } + testTask "succeeds when multiple documents are found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonFirstByJsonPath PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" + verifyAnyById (streamText stream) [ "two"; "four" ] + } + testTask "succeeds when a document is not found" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonFirstByJsonPath PostgresDb.TableName writer """$.Id ? (@ == "nope")""" + verifyNoDoc (streamText stream) + } + ] + testList "writeJsonFirstByJsonPathOrdered" [ + testTask "succeeds when sorting ascending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonFirstByJsonPathOrdered + PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar" ] + verifyDocById (streamText stream) "two" + } + testTask "succeeds when sorting descending" { + use db = PostgresDb.BuildDb() + use conn = mkConn db + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonFirstByJsonPathOrdered + PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar DESC" ] + verifyDocById (streamText stream) "four" + } + ] testList "updateById" [ testTask "succeeds when a document is updated" { use db = PostgresDb.BuildDb() @@ -694,7 +1610,7 @@ let integrationTests = use conn = mkConn db let! before = conn.countAll PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! conn.updateById PostgresDb.TableName "test" { emptyDoc with Id = "x"; Sub = Some { Foo = "blue"; Bar = "red" } } @@ -707,7 +1623,7 @@ let integrationTests = do! loadDocs conn do! conn.updateByFunc - PostgresDb.TableName (_.Id) { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } + PostgresDb.TableName _.Id { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } let! after = conn.findById PostgresDb.TableName "one" Expect.isSome after "There should have been a document returned post-update" Expect.equal @@ -720,10 +1636,10 @@ let integrationTests = use conn = mkConn db let! before = conn.countAll PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! conn.updateByFunc - PostgresDb.TableName (_.Id) { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } + PostgresDb.TableName _.Id { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } } ] testList "patchById" [ @@ -731,7 +1647,7 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + do! conn.patchById PostgresDb.TableName "one" {| NumValue = 44 |} let! after = conn.findById PostgresDb.TableName "one" Expect.isSome after "There should have been a document returned post-update" @@ -742,7 +1658,7 @@ let integrationTests = use conn = mkConn db let! before = conn.countAll PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! conn.patchById PostgresDb.TableName "test" {| Foo = "green" |} } @@ -752,7 +1668,7 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + do! conn.patchByFields PostgresDb.TableName Any [ Field.Equal "Value" "purple" ] {| NumValue = 77 |} let! after = conn.countByFields PostgresDb.TableName Any [ Field.Equal "NumValue" "77" ] Expect.equal after 2 "There should have been 2 documents returned" @@ -762,7 +1678,7 @@ let integrationTests = use conn = mkConn db let! before = conn.countAll PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! conn.patchByFields PostgresDb.TableName Any [ Field.Equal "Value" "burgundy" ] {| Foo = "green" |} } @@ -772,7 +1688,7 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + do! conn.patchByContains PostgresDb.TableName {| Value = "purple" |} {| NumValue = 77 |} let! after = conn.countByContains PostgresDb.TableName {| NumValue = 77 |} Expect.equal after 2 "There should have been 2 documents returned" @@ -782,7 +1698,7 @@ let integrationTests = use conn = mkConn db let! before = conn.countAll PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! conn.patchByContains PostgresDb.TableName {| Value = "burgundy" |} {| Foo = "green" |} } @@ -792,7 +1708,7 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + do! conn.patchByJsonPath PostgresDb.TableName "$.NumValue ? (@ > 10)" {| NumValue = 1000 |} let! after = conn.countByJsonPath PostgresDb.TableName "$.NumValue ? (@ > 999)" Expect.equal after 2 "There should have been 2 documents returned" @@ -802,7 +1718,7 @@ let integrationTests = use conn = mkConn db let! before = conn.countAll PostgresDb.TableName Expect.equal before 0 "There should have been no documents returned" - + // This not raising an exception is the test do! conn.patchByJsonPath PostgresDb.TableName "$.NumValue ? (@ < 0)" {| Foo = "green" |} } @@ -834,14 +1750,14 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + // This not raising an exception is the test do! conn.removeFieldsById PostgresDb.TableName "two" [ "AFieldThatIsNotThere" ] } testTask "succeeds when no document is matched" { use db = PostgresDb.BuildDb() use conn = mkConn db - + // This not raising an exception is the test do! conn.removeFieldsById PostgresDb.TableName "two" [ "Value" ] } @@ -874,14 +1790,14 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + // This not raising an exception is the test do! conn.removeFieldsByFields PostgresDb.TableName Any [ Field.Equal "NumValue" "17" ] [ "Nothing" ] } testTask "succeeds when no document is matched" { use db = PostgresDb.BuildDb() use conn = mkConn db - + // This not raising an exception is the test do! conn.removeFieldsByFields PostgresDb.TableName Any [ Field.NotEqual "Abracadabra" "apple" ] [ "Value" ] @@ -914,14 +1830,14 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + // This not raising an exception is the test do! conn.removeFieldsByContains PostgresDb.TableName {| NumValue = 17 |} [ "Nothing" ] } testTask "succeeds when no document is matched" { use db = PostgresDb.BuildDb() use conn = mkConn db - + // This not raising an exception is the test do! conn.removeFieldsByContains PostgresDb.TableName {| Abracadabra = "apple" |} [ "Value" ] } @@ -953,14 +1869,14 @@ let integrationTests = use db = PostgresDb.BuildDb() use conn = mkConn db do! loadDocs conn - + // This not raising an exception is the test do! conn.removeFieldsByJsonPath PostgresDb.TableName "$.NumValue ? (@ == 17)" [ "Nothing" ] } testTask "succeeds when no document is matched" { use db = PostgresDb.BuildDb() use conn = mkConn db - + // This not raising an exception is the test do! conn.removeFieldsByJsonPath PostgresDb.TableName "$.Abracadabra ? (@ == \"apple\")" [ "Value" ] } -- 2.47.2 From f15feb2238fcba6dc6d2c10cff04f762d4463aff Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Sun, 6 Apr 2025 14:53:50 -0400 Subject: [PATCH 09/22] Split SQLite into files; add JSON extraction funcs --- src/Postgres/Library.fs | 6 +- src/Sqlite/BitBadger.Documents.Sqlite.fsproj | 2 + src/Sqlite/Functions.fs | 418 +++++++ src/Sqlite/Library.fs | 1019 ++---------------- src/Sqlite/WithConn.fs | 510 +++++++++ 5 files changed, 1014 insertions(+), 941 deletions(-) create mode 100644 src/Sqlite/Functions.fs create mode 100644 src/Sqlite/WithConn.fs diff --git a/src/Postgres/Library.fs b/src/Postgres/Library.fs index beb9542..7a5520e 100644 --- a/src/Postgres/Library.fs +++ b/src/Postgres/Library.fs @@ -2,6 +2,7 @@ open System.IO open System.Text +open System.Threading.Tasks /// The type of index to generate for the document [] @@ -342,12 +343,13 @@ module Results = /// The query from which JSON should be extracted [] let writeJsonArray (writer: StreamWriter) (mapFunc: RowReader -> string) sqlProps = backgroundTask { + let await (it: Task) = it.ConfigureAwait(false).GetAwaiter().GetResult() do! writer.WriteAsync "[" let mutable isFirst = true do! sqlProps |> Sql.iterAsync (fun it -> - if isFirst then isFirst <- false else writer.Write "," - writer.WriteAsync(mapFunc it).ConfigureAwait(false).GetAwaiter().GetResult()) + if isFirst then isFirst <- false else await (writer.WriteAsync ",") + (mapFunc >> writer.WriteAsync >> await) it) do! writer.WriteAsync "]" } diff --git a/src/Sqlite/BitBadger.Documents.Sqlite.fsproj b/src/Sqlite/BitBadger.Documents.Sqlite.fsproj index e19b49d..c830a84 100644 --- a/src/Sqlite/BitBadger.Documents.Sqlite.fsproj +++ b/src/Sqlite/BitBadger.Documents.Sqlite.fsproj @@ -8,6 +8,8 @@ + + diff --git a/src/Sqlite/Functions.fs b/src/Sqlite/Functions.fs new file mode 100644 index 0000000..24c3316 --- /dev/null +++ b/src/Sqlite/Functions.fs @@ -0,0 +1,418 @@ +namespace BitBadger.Documents.Sqlite + +open Microsoft.Data.Sqlite + +/// Commands to execute custom SQL queries +[] +module Custom = + + /// Execute a query that returns a list of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function between the document and the domain item + /// A list of results for the given query + [] + let list<'TDoc> query parameters (mapFunc: SqliteDataReader -> 'TDoc) = + use conn = Configuration.dbConn () + WithConn.Custom.list<'TDoc> query parameters mapFunc conn + + /// Execute a query that returns a list of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function between the document and the domain item + /// A list of results for the given query + let List<'TDoc>(query, parameters, mapFunc: System.Func) = + use conn = Configuration.dbConn () + WithConn.Custom.List<'TDoc>(query, parameters, mapFunc, conn) + + /// Execute a query that returns one or no results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function between the document and the domain item + /// Some with the first matching result, or None if not found + [] + let single<'TDoc> query parameters (mapFunc: SqliteDataReader -> 'TDoc) = + use conn = Configuration.dbConn () + WithConn.Custom.single<'TDoc> query parameters mapFunc conn + + /// Execute a query that returns one or no results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function between the document and the domain item + /// The first matching result, or null if not found + let Single<'TDoc when 'TDoc: null and 'TDoc: not struct>( + query, parameters, mapFunc: System.Func) = + use conn = Configuration.dbConn () + WithConn.Custom.Single<'TDoc>(query, parameters, mapFunc, conn) + + /// Execute a query that returns no results + /// The query to retrieve the results + /// Parameters to use for the query + [] + let nonQuery query parameters = + use conn = Configuration.dbConn () + WithConn.Custom.nonQuery query parameters conn + + /// Execute a query that returns a scalar value + /// The query to retrieve the value + /// Parameters to use for the query + /// The mapping function to obtain the value + /// The scalar value for the query + [] + let scalar<'T when 'T: struct> query parameters (mapFunc: SqliteDataReader -> 'T) = + use conn = Configuration.dbConn () + WithConn.Custom.scalar<'T> query parameters mapFunc conn + + /// Execute a query that returns a scalar value + /// The query to retrieve the value + /// Parameters to use for the query + /// The mapping function to obtain the value + /// The scalar value for the query + let Scalar<'T when 'T: struct>(query, parameters, mapFunc: System.Func) = + use conn = Configuration.dbConn () + WithConn.Custom.Scalar<'T>(query, parameters, mapFunc, conn) + + +/// Functions to create tables and indexes +[] +module Definition = + + /// Create a document table + /// The table whose existence should be ensured (may include schema) + [] + let ensureTable name = + use conn = Configuration.dbConn () + WithConn.Definition.ensureTable name conn + + /// Create an index on field(s) within documents in the specified table + /// The table to be indexed (may include schema) + /// The name of the index to create + /// One or more fields to be indexed + [] + let ensureFieldIndex tableName indexName fields = + use conn = Configuration.dbConn () + WithConn.Definition.ensureFieldIndex tableName indexName fields conn + + +/// Document insert/save functions +[] +module Document = + + /// Insert a new document + /// The table into which the document should be inserted (may include schema) + /// The document to be inserted + [] + let insert<'TDoc> tableName (document: 'TDoc) = + use conn = Configuration.dbConn () + WithConn.Document.insert tableName document conn + + /// Save a document, inserting it if it does not exist and updating it if it does (AKA "upsert") + /// The table into which the document should be saved (may include schema) + /// The document to be saved + [] + let save<'TDoc> tableName (document: 'TDoc) = + use conn = Configuration.dbConn () + WithConn.Document.save tableName document conn + + +/// Commands to count documents +[] +module Count = + + /// Count all documents in a table + /// The table in which documents should be counted (may include schema) + /// The count of the documents in the table + [] + let all tableName = + use conn = Configuration.dbConn () + WithConn.Count.all tableName conn + + /// Count matching documents using JSON field comparisons (->> =, etc.) + /// The table in which documents should be counted (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The count of matching documents in the table + [] + let byFields tableName howMatched fields = + use conn = Configuration.dbConn () + WithConn.Count.byFields tableName howMatched fields conn + + +/// Commands to determine if documents exist +[] +module Exists = + + /// Determine if a document exists for the given ID + /// The table in which existence should be checked (may include schema) + /// The ID of the document whose existence should be checked + /// True if a document exists, false if not + [] + let byId tableName (docId: 'TKey) = + use conn = Configuration.dbConn () + WithConn.Exists.byId tableName docId conn + + /// Determine if a document exists using JSON field comparisons (->> =, etc.) + /// The table in which existence should be checked (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// True if any matching documents exist, false if not + [] + let byFields tableName howMatched fields = + use conn = Configuration.dbConn () + WithConn.Exists.byFields tableName howMatched fields conn + + +/// Commands to retrieve documents +[] +module Find = + + /// Retrieve all documents in the given table + /// The table from which documents should be retrieved (may include schema) + /// All documents from the given table + [] + let all<'TDoc> tableName = + use conn = Configuration.dbConn () + WithConn.Find.all<'TDoc> tableName conn + + /// Retrieve all documents in the given table + /// The table from which documents should be retrieved (may include schema) + /// All documents from the given table + let All<'TDoc> tableName = + use conn = Configuration.dbConn () + WithConn.Find.All<'TDoc>(tableName, conn) + + /// Retrieve all documents in the given table ordered by the given fields in the document + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// All documents from the given table, ordered by the given fields + [] + let allOrdered<'TDoc> tableName orderFields = + use conn = Configuration.dbConn () + WithConn.Find.allOrdered<'TDoc> tableName orderFields conn + + /// Retrieve all documents in the given table ordered by the given fields in the document + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// All documents from the given table, ordered by the given fields + let AllOrdered<'TDoc> tableName orderFields = + use conn = Configuration.dbConn () + WithConn.Find.AllOrdered<'TDoc>(tableName, orderFields, conn) + + /// Retrieve a document by its ID + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// Some with the document if found, None otherwise + [] + let byId<'TKey, 'TDoc> tableName docId = + use conn = Configuration.dbConn () + WithConn.Find.byId<'TKey, 'TDoc> tableName docId conn + + /// Retrieve a document by its ID + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The document if found, null otherwise + let ById<'TKey, 'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, docId) = + use conn = Configuration.dbConn () + WithConn.Find.ById<'TKey, 'TDoc>(tableName, docId, conn) + + /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// All documents matching the given fields + [] + let byFields<'TDoc> tableName howMatched fields = + use conn = Configuration.dbConn () + WithConn.Find.byFields<'TDoc> tableName howMatched fields conn + + /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// All documents matching the given fields + let ByFields<'TDoc>(tableName, howMatched, fields) = + use conn = Configuration.dbConn () + WithConn.Find.ByFields<'TDoc>(tableName, howMatched, fields, conn) + + /// + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in + /// the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// All documents matching the given fields, ordered by the other given fields + [] + let byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields = + use conn = Configuration.dbConn () + WithConn.Find.byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn + + /// + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in + /// the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// All documents matching the given fields, ordered by the other given fields + let ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields) = + use conn = Configuration.dbConn () + WithConn.Find.ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) + + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Some with the first document, or None if not found + [] + let firstByFields<'TDoc> tableName howMatched fields = + use conn = Configuration.dbConn () + WithConn.Find.firstByFields<'TDoc> tableName howMatched fields conn + + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The first document, or null if not found + let FirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, howMatched, fields) = + use conn = Configuration.dbConn () + WithConn.Find.FirstByFields<'TDoc>(tableName, howMatched, fields, conn) + + /// + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// + /// Some with the first document ordered by the given fields, or None if not found + /// + [] + let firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields = + use conn = Configuration.dbConn () + WithConn.Find.firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn + + /// + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The first document ordered by the given fields, or null if not found + let FirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( + tableName, howMatched, queryFields, orderFields) = + use conn = Configuration.dbConn () + WithConn.Find.FirstByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) + + +/// Commands to update documents +[] +module Update = + + /// Update (replace) an entire document by its ID + /// The table in which a document should be updated (may include schema) + /// The ID of the document to be updated (replaced) + /// The new document + [] + let byId tableName (docId: 'TKey) (document: 'TDoc) = + use conn = Configuration.dbConn () + WithConn.Update.byId tableName docId document conn + + /// + /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the document + /// + /// The table in which a document should be updated (may include schema) + /// The function to obtain the ID of the document + /// The new document + [] + let byFunc tableName (idFunc: 'TDoc -> 'TKey) (document: 'TDoc) = + use conn = Configuration.dbConn () + WithConn.Update.byFunc tableName idFunc document conn + + /// + /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the document + /// + /// The table in which a document should be updated (may include schema) + /// The function to obtain the ID of the document + /// The new document + let ByFunc(tableName, idFunc: System.Func<'TDoc, 'TKey>, document: 'TDoc) = + use conn = Configuration.dbConn () + WithConn.Update.ByFunc(tableName, idFunc, document, conn) + + +/// Commands to patch (partially update) documents +[] +module Patch = + + /// Patch a document by its ID + /// The table in which a document should be patched (may include schema) + /// The ID of the document to patch + /// The partial document to patch the existing document + [] + let byId tableName (docId: 'TKey) (patch: 'TPatch) = + use conn = Configuration.dbConn () + WithConn.Patch.byId tableName docId patch conn + + /// + /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) + /// + /// The table in which documents should be patched (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The partial document to patch the existing document + [] + let byFields tableName howMatched fields (patch: 'TPatch) = + use conn = Configuration.dbConn () + WithConn.Patch.byFields tableName howMatched fields patch conn + + +/// Commands to remove fields from documents +[] +module RemoveFields = + + /// Remove fields from a document by the document's ID + /// The table in which a document should be modified (may include schema) + /// The ID of the document to modify + /// One or more field names to remove from the document + [] + let byId tableName (docId: 'TKey) fieldNames = + use conn = Configuration.dbConn () + WithConn.RemoveFields.byId tableName docId fieldNames conn + + /// Remove fields from documents via a comparison on JSON fields in the document + /// The table in which documents should be modified (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// One or more field names to remove from the matching documents + [] + let byFields tableName howMatched fields fieldNames = + use conn = Configuration.dbConn () + WithConn.RemoveFields.byFields tableName howMatched fields fieldNames conn + + +/// Commands to delete documents +[] +module Delete = + + /// Delete a document by its ID + /// The table in which a document should be deleted (may include schema) + /// The ID of the document to delete + [] + let byId tableName (docId: 'TKey) = + use conn = Configuration.dbConn () + WithConn.Delete.byId tableName docId conn + + /// Delete documents by matching a JSON field comparison query (->> =, etc.) + /// The table in which documents should be deleted (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + [] + let byFields tableName howMatched fields = + use conn = Configuration.dbConn () + WithConn.Delete.byFields tableName howMatched fields conn diff --git a/src/Sqlite/Library.fs b/src/Sqlite/Library.fs index 215a62b..8e74554 100644 --- a/src/Sqlite/Library.fs +++ b/src/Sqlite/Library.fs @@ -1,5 +1,7 @@ namespace BitBadger.Documents.Sqlite +open System.IO +open System.Text open BitBadger.Documents open Microsoft.Data.Sqlite @@ -36,12 +38,10 @@ module Configuration = [] module Query = - /// - /// Create a WHERE clause fragment to implement a comparison on fields in a JSON document - /// + /// Create a WHERE clause fragment to implement a comparison on fields in a JSON document /// How the fields should be matched /// The fields for the comparisons - /// A WHERE clause implementing the comparisons for the given fields + /// A WHERE clause implementing the comparisons for the given fields [] let whereByFields (howMatched: FieldMatch) fields = let name = ParameterName() @@ -63,21 +63,21 @@ module Query = | _ -> $"{it.Path SQLite AsSql} {it.Comparison.OpSql} {name.Derive it.ParameterName}") |> String.concat $" {howMatched} " - /// Create a WHERE clause fragment to implement an ID-based query + /// Create a WHERE clause fragment to implement an ID-based query /// The ID of the document - /// A WHERE clause fragment identifying a document by its ID + /// A WHERE clause fragment identifying a document by its ID [] let whereById (docId: 'TKey) = whereByFields Any [ { Field.Equal (Configuration.idField ()) docId with ParameterName = Some "@id" } ] - /// Create an UPDATE statement to patch documents + /// Create an UPDATE statement to patch documents /// The table to be updated /// A query to patch documents [] let patch tableName = $"UPDATE %s{tableName} SET data = json_patch(data, json(@data))" - /// Create an UPDATE statement to remove fields from documents + /// Create an UPDATE statement to remove fields from documents /// The table to be updated /// The parameters with the field names to be removed /// A query to remove fields from documents @@ -136,7 +136,7 @@ module Parameters = SqliteParameter(name, Configuration.serializer().Serialize it) /// Create JSON field parameters - /// The Fields to convert to parameters + /// The Fields to convert to parameters /// The current parameters for the query /// A unified sequence of parameter names and values [] @@ -169,7 +169,7 @@ module Parameters = /// Append JSON field name parameters for the given field names to the given parameters /// The name of the parameter to use for each field /// The names of fields to be addressed - /// The name (@name) and parameter value for the field names + /// The name (@name) and parameter value for the field names [] let fieldNameParams paramName fieldNames = fieldNames @@ -189,14 +189,14 @@ module Results = /// Create a domain item from a document, specifying the field in which the document is found /// The field name containing the JSON document - /// A SqliteDataReader set to the row with the document to be constructed + /// A SqliteDataReader set to the row with the document to be constructed /// The constructed domain item [] let fromDocument<'TDoc> field (rdr: SqliteDataReader) : 'TDoc = Configuration.serializer().Deserialize<'TDoc>(rdr.GetString(rdr.GetOrdinal field)) /// Create a domain item from a document - /// A SqliteDataReader set to the row with the document to be constructed + /// A SqliteDataReader set to the row with the document to be constructed /// The constructed domain item [] let fromData<'TDoc> rdr = @@ -232,20 +232,86 @@ module Results = } /// Extract a count from the first column - /// A SqliteDataReader set to the row with the count to retrieve + /// A SqliteDataReader set to the row with the count to retrieve /// The count from the row [] let toCount (rdr: SqliteDataReader) = rdr.GetInt64 0 /// Extract a true/false value from the first column - /// A SqliteDataReader set to the row with the true/false value to retrieve + /// A SqliteDataReader set to the row with the true/false value to retrieve /// The true/false value from the row /// SQLite implements boolean as 1 = true, 0 = false [] let toExists rdr = toCount rdr > 0L + /// Retrieve a JSON document, specifying the field in which the document is found + /// The field name containing the JSON document + /// A SqliteDataReader set to the row with the document to be constructed + /// The JSON document (an empty JSON document if not found) + [] + let jsonFromDocument field (rdr: SqliteDataReader) = + try + let idx = rdr.GetOrdinal field + if rdr.IsDBNull idx then "{}" else rdr.GetString idx + with :? System.IndexOutOfRangeException -> "{}" + + /// Retrieve a JSON document + /// A SqliteDataReader set to the row with the document to be constructed + /// The JSON document (an empty JSON document if not found) + [] + let jsonFromData rdr = + jsonFromDocument "data" rdr + + /// + /// Create a JSON array for the results of the given command, using the specified mapping function + /// + /// The command to execute + /// The mapping function to extract JSON from the query's results + /// A JSON array of items from the reader + [] + let toJsonArray (cmd: SqliteCommand) (mapFunc: SqliteDataReader -> string) = backgroundTask { + use! rdr = cmd.ExecuteReaderAsync() + let it = StringBuilder "[" + while! rdr.ReadAsync() do + if it.Length > 2 then ignore (it.Append ",") + it.Append(mapFunc rdr) |> ignore + return it.Append("]").ToString() + } + + /// + /// Create a JSON array for the results of the given command, using the specified mapping function + /// + /// The command to execute + /// The mapping function to extract JSON from the query's results + /// A JSON array of items from the reader + let ToJsonArray (cmd: SqliteCommand) (mapFunc: System.Func) = + toJsonArray cmd mapFunc.Invoke + + /// Write a JSON array of items for the results of a query to the given StreamWriter + /// The command to execute + /// The StreamWriter to which results should be written + /// The mapping function to extract JSON from the query's results + [] + let writeJsonArray (cmd: SqliteCommand) (writer: StreamWriter) (mapFunc: SqliteDataReader -> string) = + backgroundTask { + use! rdr = cmd.ExecuteReaderAsync() + do! writer.WriteAsync "[" + let mutable isFirst = true + while! rdr.ReadAsync() do + if isFirst then isFirst <- false else do! writer.WriteAsync "," + do! writer.WriteAsync(mapFunc rdr) + do! writer.WriteAsync "]" + } + + /// Write a JSON array of items for the results of a query to the given StreamWriter + /// The command to execute + /// The StreamWriter to which results should be written + /// The mapping function to extract JSON from the query's results + let WriteJsonArray (cmd: SqliteCommand) (writer: StreamWriter) (mapFunc: System.Func) = + writeJsonArray cmd writer mapFunc.Invoke + [] module internal Helpers = @@ -256,928 +322,3 @@ module internal Helpers = let! _ = cmd.ExecuteNonQueryAsync() () } - - -/// Versions of queries that accept a SqliteConnection as the last parameter -module WithConn = - - /// Commands to execute custom SQL queries - [] - module Custom = - - /// Execute a query that returns a list of results - /// The query to retrieve the results - /// Parameters to use for the query - /// The mapping function between the document and the domain item - /// The SqliteConnection to use to execute the query - /// A list of results for the given query - [] - let list<'TDoc> query (parameters: SqliteParameter seq) (mapFunc: SqliteDataReader -> 'TDoc) - (conn: SqliteConnection) = - use cmd = conn.CreateCommand() - cmd.CommandText <- query - cmd.Parameters.AddRange parameters - toCustomList<'TDoc> cmd mapFunc - - /// Execute a query that returns a list of results - /// The query to retrieve the results - /// Parameters to use for the query - /// The mapping function between the document and the domain item - /// The SqliteConnection to use to execute the query - /// A list of results for the given query - let List<'TDoc>( - query, parameters: SqliteParameter seq, mapFunc: System.Func, - conn: SqliteConnection - ) = - use cmd = conn.CreateCommand() - cmd.CommandText <- query - cmd.Parameters.AddRange parameters - ToCustomList<'TDoc>(cmd, mapFunc) - - /// Execute a query that returns one or no results - /// The query to retrieve the results - /// Parameters to use for the query - /// The mapping function between the document and the domain item - /// The SqliteConnection to use to execute the query - /// Some with the first matching result, or None if not found - [] - let single<'TDoc> query parameters (mapFunc: SqliteDataReader -> 'TDoc) conn = backgroundTask { - let! results = list query parameters mapFunc conn - return FSharp.Collections.List.tryHead results - } - - /// Execute a query that returns one or no results - /// The query to retrieve the results - /// Parameters to use for the query - /// The mapping function between the document and the domain item - /// The SqliteConnection to use to execute the query - /// The first matching result, or null if not found - let Single<'TDoc when 'TDoc: null and 'TDoc: not struct>( - query, parameters, mapFunc: System.Func, conn - ) = backgroundTask { - let! result = single<'TDoc> query parameters mapFunc.Invoke conn - return Option.toObj result - } - - /// Execute a query that returns no results - /// The query to retrieve the results - /// Parameters to use for the query - /// The SqliteConnection to use to execute the query - [] - let nonQuery query (parameters: SqliteParameter seq) (conn: SqliteConnection) = - use cmd = conn.CreateCommand() - cmd.CommandText <- query - cmd.Parameters.AddRange parameters - write cmd - - /// Execute a query that returns a scalar value - /// The query to retrieve the value - /// Parameters to use for the query - /// The mapping function to obtain the value - /// The SqliteConnection to use to execute the query - /// The scalar value for the query - [] - let scalar<'T when 'T : struct> query (parameters: SqliteParameter seq) (mapFunc: SqliteDataReader -> 'T) - (conn: SqliteConnection) = backgroundTask { - use cmd = conn.CreateCommand() - cmd.CommandText <- query - cmd.Parameters.AddRange parameters - use! rdr = cmd.ExecuteReaderAsync() - let! isFound = rdr.ReadAsync() - return if isFound then mapFunc rdr else Unchecked.defaultof<'T> - } - - /// Execute a query that returns a scalar value - /// The query to retrieve the value - /// Parameters to use for the query - /// The mapping function to obtain the value - /// The SqliteConnection to use to execute the query - /// The scalar value for the query - let Scalar<'T when 'T: struct>(query, parameters, mapFunc: System.Func, conn) = - scalar<'T> query parameters mapFunc.Invoke conn - - /// Functions to create tables and indexes - [] - module Definition = - - /// Create a document table - /// The table whose existence should be ensured (may include schema) - /// The SqliteConnection to use to execute the query - [] - let ensureTable name conn = backgroundTask { - do! Custom.nonQuery (Query.Definition.ensureTable name) [] conn - do! Custom.nonQuery (Query.Definition.ensureKey name SQLite) [] conn - } - - /// Create an index on field(s) within documents in the specified table - /// The table to be indexed (may include schema) - /// The name of the index to create - /// One or more fields to be indexed - /// The SqliteConnection to use to execute the query - [] - let ensureFieldIndex tableName indexName fields conn = - Custom.nonQuery (Query.Definition.ensureIndexOn tableName indexName fields SQLite) [] conn - - /// Commands to add documents - [] - module Document = - - /// Insert a new document - /// The table into which the document should be inserted (may include schema) - /// The document to be inserted - /// The SqliteConnection to use to execute the query - [] - let insert<'TDoc> tableName (document: 'TDoc) conn = - let query = - match Configuration.autoIdStrategy () with - | Disabled -> Query.insert tableName - | strategy -> - let idField = Configuration.idField () - let dataParam = - if AutoId.NeedsAutoId strategy document idField then - match strategy with - | Number -> $"(SELECT coalesce(max(data->>'{idField}'), 0) + 1 FROM {tableName})" - | Guid -> $"'{AutoId.GenerateGuid()}'" - | RandomString -> $"'{AutoId.GenerateRandomString(Configuration.idStringLength ())}'" - | Disabled -> "@data" - |> function it -> $"json_set(@data, '$.{idField}', {it})" - else "@data" - (Query.insert tableName).Replace("@data", dataParam) - Custom.nonQuery query [ jsonParam "@data" document ] conn - - /// - /// Save a document, inserting it if it does not exist and updating it if it does (AKA "upsert") - /// - /// The table into which the document should be saved (may include schema) - /// The document to be saved - /// The SqliteConnection to use to execute the query - [] - let save<'TDoc> tableName (document: 'TDoc) conn = - Custom.nonQuery (Query.save tableName) [ jsonParam "@data" document ] conn - - /// Commands to count documents - [] - module Count = - - /// Count all documents in a table - /// The table in which documents should be counted (may include schema) - /// The SqliteConnection to use to execute the query - /// The count of the documents in the table - [] - let all tableName conn = - Custom.scalar (Query.count tableName) [] toCount conn - - /// Count matching documents using JSON field comparisons (->> =, etc.) - /// The table in which documents should be counted (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The SqliteConnection to use to execute the query - /// The count of matching documents in the table - [] - let byFields tableName howMatched fields conn = - Custom.scalar - (Query.byFields (Query.count tableName) howMatched fields) (addFieldParams fields []) toCount conn - - /// Commands to determine if documents exist - [] - module Exists = - - /// Determine if a document exists for the given ID - /// The table in which existence should be checked (may include schema) - /// The ID of the document whose existence should be checked - /// The SqliteConnection to use to execute the query - /// True if a document exists, false if not - [] - let byId tableName (docId: 'TKey) conn = - Custom.scalar (Query.exists tableName (Query.whereById docId)) [ idParam docId ] toExists conn - - /// Determine if a document exists using JSON field comparisons (->> =, etc.) - /// The table in which existence should be checked (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The SqliteConnection to use to execute the query - /// True if any matching documents exist, false if not - [] - let byFields tableName howMatched fields conn = - Custom.scalar - (Query.exists tableName (Query.whereByFields howMatched fields)) - (addFieldParams fields []) - toExists - conn - - /// Commands to retrieve documents - [] - module Find = - - /// Retrieve all documents in the given table - /// The table from which documents should be retrieved (may include schema) - /// The SqliteConnection to use to execute the query - /// All documents from the given table - [] - let all<'TDoc> tableName conn = - Custom.list<'TDoc> (Query.find tableName) [] fromData<'TDoc> conn - - /// Retrieve all documents in the given table - /// The table from which documents should be retrieved (may include schema) - /// The SqliteConnection to use to execute the query - /// All documents from the given table - let All<'TDoc>(tableName, conn) = - Custom.List(Query.find tableName, [], fromData<'TDoc>, conn) - - /// Retrieve all documents in the given table ordered by the given fields in the document - /// The table from which documents should be retrieved (may include schema) - /// Fields by which the results should be ordered - /// The SqliteConnection to use to execute the query - /// All documents from the given table, ordered by the given fields - [] - let allOrdered<'TDoc> tableName orderFields conn = - Custom.list<'TDoc> (Query.find tableName + Query.orderBy orderFields SQLite) [] fromData<'TDoc> conn - - /// Retrieve all documents in the given table ordered by the given fields in the document - /// The table from which documents should be retrieved (may include schema) - /// Fields by which the results should be ordered - /// The SqliteConnection to use to execute the query - /// All documents from the given table, ordered by the given fields - let AllOrdered<'TDoc>(tableName, orderFields, conn) = - Custom.List(Query.find tableName + Query.orderBy orderFields SQLite, [], fromData<'TDoc>, conn) - - /// Retrieve a document by its ID - /// The table from which a document should be retrieved (may include schema) - /// The ID of the document to retrieve - /// The SqliteConnection to use to execute the query - /// Some with the document if found, None otherwise - [] - let byId<'TKey, 'TDoc> tableName (docId: 'TKey) conn = - Custom.single<'TDoc> (Query.byId (Query.find tableName) docId) [ idParam docId ] fromData<'TDoc> conn - - /// Retrieve a document by its ID - /// The table from which a document should be retrieved (may include schema) - /// The ID of the document to retrieve - /// The SqliteConnection to use to execute the query - /// The document if found, null otherwise - let ById<'TKey, 'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, docId: 'TKey, conn) = - Custom.Single<'TDoc>(Query.byId (Query.find tableName) docId, [ idParam docId ], fromData<'TDoc>, conn) - - /// Retrieve documents matching JSON field comparisons (->> =, etc.) - /// The table from which documents should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The SqliteConnection to use to execute the query - /// All documents matching the given fields - [] - let byFields<'TDoc> tableName howMatched fields conn = - Custom.list<'TDoc> - (Query.byFields (Query.find tableName) howMatched fields) - (addFieldParams fields []) - fromData<'TDoc> - conn - - /// Retrieve documents matching JSON field comparisons (->> =, etc.) - /// The table from which documents should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The SqliteConnection to use to execute the query - /// All documents matching the given fields - let ByFields<'TDoc>(tableName, howMatched, fields, conn) = - Custom.List<'TDoc>( - Query.byFields (Query.find tableName) howMatched fields, - addFieldParams fields [], - fromData<'TDoc>, - conn) - - /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields - /// in the document - /// - /// The table from which documents should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// Fields by which the results should be ordered - /// The SqliteConnection to use to execute the query - /// All documents matching the given fields, ordered by the other given fields - [] - let byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn = - Custom.list<'TDoc> - (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields SQLite) - (addFieldParams queryFields []) - fromData<'TDoc> - conn - - /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields - /// in the document - /// - /// The table from which documents should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// Fields by which the results should be ordered - /// The SqliteConnection to use to execute the query - /// All documents matching the given fields, ordered by the other given fields - let ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) = - Custom.List<'TDoc>( - Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields SQLite, - addFieldParams queryFields [], - fromData<'TDoc>, - conn) - - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) - /// The table from which a document should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The SqliteConnection to use to execute the query - /// Some with the first document, or None if not found - [] - let firstByFields<'TDoc> tableName howMatched fields conn = - Custom.single - $"{Query.byFields (Query.find tableName) howMatched fields} LIMIT 1" - (addFieldParams fields []) - fromData<'TDoc> - conn - - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) - /// The table from which a document should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The SqliteConnection to use to execute the query - /// The first document, or null if not found - let FirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, howMatched, fields, conn) = - Custom.Single( - $"{Query.byFields (Query.find tableName) howMatched fields} LIMIT 1", - addFieldParams fields [], - fromData<'TDoc>, - conn) - - /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the - /// given fields in the document - /// - /// The table from which a document should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// Fields by which the results should be ordered - /// The SqliteConnection to use to execute the query - /// - /// Some with the first document ordered by the given fields, or None if not found - /// - [] - let firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn = - Custom.single - $"{Query.byFields (Query.find tableName) howMatched queryFields}{Query.orderBy orderFields SQLite} LIMIT 1" - (addFieldParams queryFields []) - fromData<'TDoc> - conn - - /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the - /// given fields in the document - /// - /// The table from which a document should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// Fields by which the results should be ordered - /// The SqliteConnection to use to execute the query - /// The first document ordered by the given fields, or null if not found - let FirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( - tableName, howMatched, queryFields, orderFields, conn) = - Custom.Single( - $"{Query.byFields (Query.find tableName) howMatched queryFields}{Query.orderBy orderFields SQLite} LIMIT 1", - addFieldParams queryFields [], - fromData<'TDoc>, - conn) - - /// Commands to update documents - [] - module Update = - - /// Update (replace) an entire document by its ID - /// The table in which a document should be updated (may include schema) - /// The ID of the document to be updated (replaced) - /// The new document - /// The SqliteConnection to use to execute the query - [] - let byId tableName (docId: 'TKey) (document: 'TDoc) conn = - Custom.nonQuery - (Query.statementWhere (Query.update tableName) (Query.whereById docId)) - [ idParam docId; jsonParam "@data" document ] - conn - - /// - /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the - /// document - /// - /// The table in which a document should be updated (may include schema) - /// The function to obtain the ID of the document - /// The new document - /// The SqliteConnection to use to execute the query - [] - let byFunc tableName (idFunc: 'TDoc -> 'TKey) (document: 'TDoc) conn = - byId tableName (idFunc document) document conn - - /// - /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the - /// document - /// - /// The table in which a document should be updated (may include schema) - /// The function to obtain the ID of the document - /// The new document - /// The SqliteConnection to use to execute the query - let ByFunc(tableName, idFunc: System.Func<'TDoc, 'TKey>, document: 'TDoc, conn) = - byFunc tableName idFunc.Invoke document conn - - /// Commands to patch (partially update) documents - [] - module Patch = - - /// Patch a document by its ID - /// The table in which a document should be patched (may include schema) - /// The ID of the document to patch - /// The partial document to patch the existing document - /// The SqliteConnection to use to execute the query - [] - let byId tableName (docId: 'TKey) (patch: 'TPatch) conn = - Custom.nonQuery - (Query.byId (Query.patch tableName) docId) [ idParam docId; jsonParam "@data" patch ] conn - - /// - /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, - /// etc.) - /// - /// The table in which documents should be patched (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The partial document to patch the existing document - /// The SqliteConnection to use to execute the query - [] - let byFields tableName howMatched fields (patch: 'TPatch) conn = - Custom.nonQuery - (Query.byFields (Query.patch tableName) howMatched fields) - (addFieldParams fields [ jsonParam "@data" patch ]) - conn - - /// Commands to remove fields from documents - [] - module RemoveFields = - - /// Remove fields from a document by the document's ID - /// The table in which a document should be modified (may include schema) - /// The ID of the document to modify - /// One or more field names to remove from the document - /// The SqliteConnection to use to execute the query - [] - let byId tableName (docId: 'TKey) fieldNames conn = - let nameParams = fieldNameParams "@name" fieldNames - Custom.nonQuery - (Query.byId (Query.removeFields tableName nameParams) docId) - (idParam docId |> Seq.singleton |> Seq.append nameParams) - conn - - /// Remove fields from documents via a comparison on JSON fields in the document - /// The table in which documents should be modified (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// One or more field names to remove from the matching documents - /// The SqliteConnection to use to execute the query - [] - let byFields tableName howMatched fields fieldNames conn = - let nameParams = fieldNameParams "@name" fieldNames - Custom.nonQuery - (Query.byFields (Query.removeFields tableName nameParams) howMatched fields) - (addFieldParams fields nameParams) - conn - - /// Commands to delete documents - [] - module Delete = - - /// Delete a document by its ID - /// The table in which a document should be deleted (may include schema) - /// The ID of the document to delete - /// The SqliteConnection to use to execute the query - [] - let byId tableName (docId: 'TKey) conn = - Custom.nonQuery (Query.byId (Query.delete tableName) docId) [ idParam docId ] conn - - /// Delete documents by matching a JSON field comparison query (->> =, etc.) - /// The table in which documents should be deleted (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The SqliteConnection to use to execute the query - [] - let byFields tableName howMatched fields conn = - Custom.nonQuery (Query.byFields (Query.delete tableName) howMatched fields) (addFieldParams fields []) conn - - -/// Commands to execute custom SQL queries -[] -module Custom = - - /// Execute a query that returns a list of results - /// The query to retrieve the results - /// Parameters to use for the query - /// The mapping function between the document and the domain item - /// A list of results for the given query - [] - let list<'TDoc> query parameters (mapFunc: SqliteDataReader -> 'TDoc) = - use conn = Configuration.dbConn () - WithConn.Custom.list<'TDoc> query parameters mapFunc conn - - /// Execute a query that returns a list of results - /// The query to retrieve the results - /// Parameters to use for the query - /// The mapping function between the document and the domain item - /// A list of results for the given query - let List<'TDoc>(query, parameters, mapFunc: System.Func) = - use conn = Configuration.dbConn () - WithConn.Custom.List<'TDoc>(query, parameters, mapFunc, conn) - - /// Execute a query that returns one or no results - /// The query to retrieve the results - /// Parameters to use for the query - /// The mapping function between the document and the domain item - /// Some with the first matching result, or None if not found - [] - let single<'TDoc> query parameters (mapFunc: SqliteDataReader -> 'TDoc) = - use conn = Configuration.dbConn () - WithConn.Custom.single<'TDoc> query parameters mapFunc conn - - /// Execute a query that returns one or no results - /// The query to retrieve the results - /// Parameters to use for the query - /// The mapping function between the document and the domain item - /// The first matching result, or null if not found - let Single<'TDoc when 'TDoc: null and 'TDoc: not struct>( - query, parameters, mapFunc: System.Func) = - use conn = Configuration.dbConn () - WithConn.Custom.Single<'TDoc>(query, parameters, mapFunc, conn) - - /// Execute a query that returns no results - /// The query to retrieve the results - /// Parameters to use for the query - [] - let nonQuery query parameters = - use conn = Configuration.dbConn () - WithConn.Custom.nonQuery query parameters conn - - /// Execute a query that returns a scalar value - /// The query to retrieve the value - /// Parameters to use for the query - /// The mapping function to obtain the value - /// The scalar value for the query - [] - let scalar<'T when 'T: struct> query parameters (mapFunc: SqliteDataReader -> 'T) = - use conn = Configuration.dbConn () - WithConn.Custom.scalar<'T> query parameters mapFunc conn - - /// Execute a query that returns a scalar value - /// The query to retrieve the value - /// Parameters to use for the query - /// The mapping function to obtain the value - /// The scalar value for the query - let Scalar<'T when 'T: struct>(query, parameters, mapFunc: System.Func) = - use conn = Configuration.dbConn () - WithConn.Custom.Scalar<'T>(query, parameters, mapFunc, conn) - - -/// Functions to create tables and indexes -[] -module Definition = - - /// Create a document table - /// The table whose existence should be ensured (may include schema) - [] - let ensureTable name = - use conn = Configuration.dbConn () - WithConn.Definition.ensureTable name conn - - /// Create an index on field(s) within documents in the specified table - /// The table to be indexed (may include schema) - /// The name of the index to create - /// One or more fields to be indexed - [] - let ensureFieldIndex tableName indexName fields = - use conn = Configuration.dbConn () - WithConn.Definition.ensureFieldIndex tableName indexName fields conn - - -/// Document insert/save functions -[] -module Document = - - /// Insert a new document - /// The table into which the document should be inserted (may include schema) - /// The document to be inserted - [] - let insert<'TDoc> tableName (document: 'TDoc) = - use conn = Configuration.dbConn () - WithConn.Document.insert tableName document conn - - /// Save a document, inserting it if it does not exist and updating it if it does (AKA "upsert") - /// The table into which the document should be saved (may include schema) - /// The document to be saved - [] - let save<'TDoc> tableName (document: 'TDoc) = - use conn = Configuration.dbConn () - WithConn.Document.save tableName document conn - - -/// Commands to count documents -[] -module Count = - - /// Count all documents in a table - /// The table in which documents should be counted (may include schema) - /// The count of the documents in the table - [] - let all tableName = - use conn = Configuration.dbConn () - WithConn.Count.all tableName conn - - /// Count matching documents using JSON field comparisons (->> =, etc.) - /// The table in which documents should be counted (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The count of matching documents in the table - [] - let byFields tableName howMatched fields = - use conn = Configuration.dbConn () - WithConn.Count.byFields tableName howMatched fields conn - - -/// Commands to determine if documents exist -[] -module Exists = - - /// Determine if a document exists for the given ID - /// The table in which existence should be checked (may include schema) - /// The ID of the document whose existence should be checked - /// True if a document exists, false if not - [] - let byId tableName (docId: 'TKey) = - use conn = Configuration.dbConn () - WithConn.Exists.byId tableName docId conn - - /// Determine if a document exists using JSON field comparisons (->> =, etc.) - /// The table in which existence should be checked (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// True if any matching documents exist, false if not - [] - let byFields tableName howMatched fields = - use conn = Configuration.dbConn () - WithConn.Exists.byFields tableName howMatched fields conn - - -/// Commands to retrieve documents -[] -module Find = - - /// Retrieve all documents in the given table - /// The table from which documents should be retrieved (may include schema) - /// All documents from the given table - [] - let all<'TDoc> tableName = - use conn = Configuration.dbConn () - WithConn.Find.all<'TDoc> tableName conn - - /// Retrieve all documents in the given table - /// The table from which documents should be retrieved (may include schema) - /// All documents from the given table - let All<'TDoc> tableName = - use conn = Configuration.dbConn () - WithConn.Find.All<'TDoc>(tableName, conn) - - /// Retrieve all documents in the given table ordered by the given fields in the document - /// The table from which documents should be retrieved (may include schema) - /// Fields by which the results should be ordered - /// All documents from the given table, ordered by the given fields - [] - let allOrdered<'TDoc> tableName orderFields = - use conn = Configuration.dbConn () - WithConn.Find.allOrdered<'TDoc> tableName orderFields conn - - /// Retrieve all documents in the given table ordered by the given fields in the document - /// The table from which documents should be retrieved (may include schema) - /// Fields by which the results should be ordered - /// All documents from the given table, ordered by the given fields - let AllOrdered<'TDoc> tableName orderFields = - use conn = Configuration.dbConn () - WithConn.Find.AllOrdered<'TDoc>(tableName, orderFields, conn) - - /// Retrieve a document by its ID - /// The table from which a document should be retrieved (may include schema) - /// The ID of the document to retrieve - /// Some with the document if found, None otherwise - [] - let byId<'TKey, 'TDoc> tableName docId = - use conn = Configuration.dbConn () - WithConn.Find.byId<'TKey, 'TDoc> tableName docId conn - - /// Retrieve a document by its ID - /// The table from which a document should be retrieved (may include schema) - /// The ID of the document to retrieve - /// The document if found, null otherwise - let ById<'TKey, 'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, docId) = - use conn = Configuration.dbConn () - WithConn.Find.ById<'TKey, 'TDoc>(tableName, docId, conn) - - /// Retrieve documents matching JSON field comparisons (->> =, etc.) - /// The table from which documents should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// All documents matching the given fields - [] - let byFields<'TDoc> tableName howMatched fields = - use conn = Configuration.dbConn () - WithConn.Find.byFields<'TDoc> tableName howMatched fields conn - - /// Retrieve documents matching JSON field comparisons (->> =, etc.) - /// The table from which documents should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// All documents matching the given fields - let ByFields<'TDoc>(tableName, howMatched, fields) = - use conn = Configuration.dbConn () - WithConn.Find.ByFields<'TDoc>(tableName, howMatched, fields, conn) - - /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in - /// the document - /// - /// The table from which documents should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// Fields by which the results should be ordered - /// All documents matching the given fields, ordered by the other given fields - [] - let byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields = - use conn = Configuration.dbConn () - WithConn.Find.byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn - - /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in - /// the document - /// - /// The table from which documents should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// Fields by which the results should be ordered - /// All documents matching the given fields, ordered by the other given fields - let ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields) = - use conn = Configuration.dbConn () - WithConn.Find.ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) - - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) - /// The table from which a document should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// Some with the first document, or None if not found - [] - let firstByFields<'TDoc> tableName howMatched fields = - use conn = Configuration.dbConn () - WithConn.Find.firstByFields<'TDoc> tableName howMatched fields conn - - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) - /// The table from which a document should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The first document, or null if not found - let FirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, howMatched, fields) = - use conn = Configuration.dbConn () - WithConn.Find.FirstByFields<'TDoc>(tableName, howMatched, fields, conn) - - /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given - /// fields in the document - /// - /// The table from which a document should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// Fields by which the results should be ordered - /// - /// Some with the first document ordered by the given fields, or None if not found - /// - [] - let firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields = - use conn = Configuration.dbConn () - WithConn.Find.firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn - - /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given - /// fields in the document - /// - /// The table from which a document should be retrieved (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// Fields by which the results should be ordered - /// The first document ordered by the given fields, or null if not found - let FirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( - tableName, howMatched, queryFields, orderFields) = - use conn = Configuration.dbConn () - WithConn.Find.FirstByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) - - -/// Commands to update documents -[] -module Update = - - /// Update (replace) an entire document by its ID - /// The table in which a document should be updated (may include schema) - /// The ID of the document to be updated (replaced) - /// The new document - [] - let byId tableName (docId: 'TKey) (document: 'TDoc) = - use conn = Configuration.dbConn () - WithConn.Update.byId tableName docId document conn - - /// - /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the document - /// - /// The table in which a document should be updated (may include schema) - /// The function to obtain the ID of the document - /// The new document - [] - let byFunc tableName (idFunc: 'TDoc -> 'TKey) (document: 'TDoc) = - use conn = Configuration.dbConn () - WithConn.Update.byFunc tableName idFunc document conn - - /// - /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the document - /// - /// The table in which a document should be updated (may include schema) - /// The function to obtain the ID of the document - /// The new document - let ByFunc(tableName, idFunc: System.Func<'TDoc, 'TKey>, document: 'TDoc) = - use conn = Configuration.dbConn () - WithConn.Update.ByFunc(tableName, idFunc, document, conn) - - -/// Commands to patch (partially update) documents -[] -module Patch = - - /// Patch a document by its ID - /// The table in which a document should be patched (may include schema) - /// The ID of the document to patch - /// The partial document to patch the existing document - [] - let byId tableName (docId: 'TKey) (patch: 'TPatch) = - use conn = Configuration.dbConn () - WithConn.Patch.byId tableName docId patch conn - - /// - /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) - /// - /// The table in which documents should be patched (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// The partial document to patch the existing document - [] - let byFields tableName howMatched fields (patch: 'TPatch) = - use conn = Configuration.dbConn () - WithConn.Patch.byFields tableName howMatched fields patch conn - - -/// Commands to remove fields from documents -[] -module RemoveFields = - - /// Remove fields from a document by the document's ID - /// The table in which a document should be modified (may include schema) - /// The ID of the document to modify - /// One or more field names to remove from the document - [] - let byId tableName (docId: 'TKey) fieldNames = - use conn = Configuration.dbConn () - WithConn.RemoveFields.byId tableName docId fieldNames conn - - /// Remove fields from documents via a comparison on JSON fields in the document - /// The table in which documents should be modified (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - /// One or more field names to remove from the matching documents - [] - let byFields tableName howMatched fields fieldNames = - use conn = Configuration.dbConn () - WithConn.RemoveFields.byFields tableName howMatched fields fieldNames conn - - -/// Commands to delete documents -[] -module Delete = - - /// Delete a document by its ID - /// The table in which a document should be deleted (may include schema) - /// The ID of the document to delete - [] - let byId tableName (docId: 'TKey) = - use conn = Configuration.dbConn () - WithConn.Delete.byId tableName docId conn - - /// Delete documents by matching a JSON field comparison query (->> =, etc.) - /// The table in which documents should be deleted (may include schema) - /// Whether to match any or all of the field conditions - /// The field conditions to match - [] - let byFields tableName howMatched fields = - use conn = Configuration.dbConn () - WithConn.Delete.byFields tableName howMatched fields conn diff --git a/src/Sqlite/WithConn.fs b/src/Sqlite/WithConn.fs new file mode 100644 index 0000000..763678b --- /dev/null +++ b/src/Sqlite/WithConn.fs @@ -0,0 +1,510 @@ +/// Versions of queries that accept a SqliteConnection as the last parameter +module BitBadger.Documents.Sqlite.WithConn + +open BitBadger.Documents +open Microsoft.Data.Sqlite + +/// Commands to execute custom SQL queries +[] +module Custom = + + /// Execute a query that returns a list of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function between the document and the domain item + /// The SqliteConnection to use to execute the query + /// A list of results for the given query + [] + let list<'TDoc> query (parameters: SqliteParameter seq) (mapFunc: SqliteDataReader -> 'TDoc) + (conn: SqliteConnection) = + use cmd = conn.CreateCommand() + cmd.CommandText <- query + cmd.Parameters.AddRange parameters + toCustomList<'TDoc> cmd mapFunc + + /// Execute a query that returns a list of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function between the document and the domain item + /// The SqliteConnection to use to execute the query + /// A list of results for the given query + let List<'TDoc>( + query, parameters: SqliteParameter seq, mapFunc: System.Func, + conn: SqliteConnection + ) = + use cmd = conn.CreateCommand() + cmd.CommandText <- query + cmd.Parameters.AddRange parameters + ToCustomList<'TDoc>(cmd, mapFunc) + + /// Execute a query that returns one or no results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function between the document and the domain item + /// The SqliteConnection to use to execute the query + /// Some with the first matching result, or None if not found + [] + let single<'TDoc> query parameters (mapFunc: SqliteDataReader -> 'TDoc) conn = backgroundTask { + let! results = list query parameters mapFunc conn + return FSharp.Collections.List.tryHead results + } + + /// Execute a query that returns one or no results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function between the document and the domain item + /// The SqliteConnection to use to execute the query + /// The first matching result, or null if not found + let Single<'TDoc when 'TDoc: null and 'TDoc: not struct>( + query, parameters, mapFunc: System.Func, conn + ) = backgroundTask { + let! result = single<'TDoc> query parameters mapFunc.Invoke conn + return Option.toObj result + } + + /// Execute a query that returns no results + /// The query to retrieve the results + /// Parameters to use for the query + /// The SqliteConnection to use to execute the query + [] + let nonQuery query (parameters: SqliteParameter seq) (conn: SqliteConnection) = + use cmd = conn.CreateCommand() + cmd.CommandText <- query + cmd.Parameters.AddRange parameters + write cmd + + /// Execute a query that returns a scalar value + /// The query to retrieve the value + /// Parameters to use for the query + /// The mapping function to obtain the value + /// The SqliteConnection to use to execute the query + /// The scalar value for the query + [] + let scalar<'T when 'T : struct> query (parameters: SqliteParameter seq) (mapFunc: SqliteDataReader -> 'T) + (conn: SqliteConnection) = backgroundTask { + use cmd = conn.CreateCommand() + cmd.CommandText <- query + cmd.Parameters.AddRange parameters + use! rdr = cmd.ExecuteReaderAsync() + let! isFound = rdr.ReadAsync() + return if isFound then mapFunc rdr else Unchecked.defaultof<'T> + } + + /// Execute a query that returns a scalar value + /// The query to retrieve the value + /// Parameters to use for the query + /// The mapping function to obtain the value + /// The SqliteConnection to use to execute the query + /// The scalar value for the query + let Scalar<'T when 'T: struct>(query, parameters, mapFunc: System.Func, conn) = + scalar<'T> query parameters mapFunc.Invoke conn + +/// Functions to create tables and indexes +[] +module Definition = + + /// Create a document table + /// The table whose existence should be ensured (may include schema) + /// The SqliteConnection to use to execute the query + [] + let ensureTable name conn = backgroundTask { + do! Custom.nonQuery (Query.Definition.ensureTable name) [] conn + do! Custom.nonQuery (Query.Definition.ensureKey name SQLite) [] conn + } + + /// Create an index on field(s) within documents in the specified table + /// The table to be indexed (may include schema) + /// The name of the index to create + /// One or more fields to be indexed + /// The SqliteConnection to use to execute the query + [] + let ensureFieldIndex tableName indexName fields conn = + Custom.nonQuery (Query.Definition.ensureIndexOn tableName indexName fields SQLite) [] conn + +/// Commands to add documents +[] +module Document = + + /// Insert a new document + /// The table into which the document should be inserted (may include schema) + /// The document to be inserted + /// The SqliteConnection to use to execute the query + [] + let insert<'TDoc> tableName (document: 'TDoc) conn = + let query = + match Configuration.autoIdStrategy () with + | Disabled -> Query.insert tableName + | strategy -> + let idField = Configuration.idField () + let dataParam = + if AutoId.NeedsAutoId strategy document idField then + match strategy with + | Number -> $"(SELECT coalesce(max(data->>'{idField}'), 0) + 1 FROM {tableName})" + | Guid -> $"'{AutoId.GenerateGuid()}'" + | RandomString -> $"'{AutoId.GenerateRandomString(Configuration.idStringLength ())}'" + | Disabled -> "@data" + |> function it -> $"json_set(@data, '$.{idField}', {it})" + else "@data" + (Query.insert tableName).Replace("@data", dataParam) + Custom.nonQuery query [ jsonParam "@data" document ] conn + + /// + /// Save a document, inserting it if it does not exist and updating it if it does (AKA "upsert") + /// + /// The table into which the document should be saved (may include schema) + /// The document to be saved + /// The SqliteConnection to use to execute the query + [] + let save<'TDoc> tableName (document: 'TDoc) conn = + Custom.nonQuery (Query.save tableName) [ jsonParam "@data" document ] conn + +/// Commands to count documents +[] +module Count = + + /// Count all documents in a table + /// The table in which documents should be counted (may include schema) + /// The SqliteConnection to use to execute the query + /// The count of the documents in the table + [] + let all tableName conn = + Custom.scalar (Query.count tableName) [] toCount conn + + /// Count matching documents using JSON field comparisons (->> =, etc.) + /// The table in which documents should be counted (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + /// The count of matching documents in the table + [] + let byFields tableName howMatched fields conn = + Custom.scalar + (Query.byFields (Query.count tableName) howMatched fields) (addFieldParams fields []) toCount conn + +/// Commands to determine if documents exist +[] +module Exists = + + /// Determine if a document exists for the given ID + /// The table in which existence should be checked (may include schema) + /// The ID of the document whose existence should be checked + /// The SqliteConnection to use to execute the query + /// True if a document exists, false if not + [] + let byId tableName (docId: 'TKey) conn = + Custom.scalar (Query.exists tableName (Query.whereById docId)) [ idParam docId ] toExists conn + + /// Determine if a document exists using JSON field comparisons (->> =, etc.) + /// The table in which existence should be checked (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + /// True if any matching documents exist, false if not + [] + let byFields tableName howMatched fields conn = + Custom.scalar + (Query.exists tableName (Query.whereByFields howMatched fields)) + (addFieldParams fields []) + toExists + conn + +/// Commands to retrieve documents +[] +module Find = + + /// Retrieve all documents in the given table + /// The table from which documents should be retrieved (may include schema) + /// The SqliteConnection to use to execute the query + /// All documents from the given table + [] + let all<'TDoc> tableName conn = + Custom.list<'TDoc> (Query.find tableName) [] fromData<'TDoc> conn + + /// Retrieve all documents in the given table + /// The table from which documents should be retrieved (may include schema) + /// The SqliteConnection to use to execute the query + /// All documents from the given table + let All<'TDoc>(tableName, conn) = + Custom.List(Query.find tableName, [], fromData<'TDoc>, conn) + + /// Retrieve all documents in the given table ordered by the given fields in the document + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + /// All documents from the given table, ordered by the given fields + [] + let allOrdered<'TDoc> tableName orderFields conn = + Custom.list<'TDoc> (Query.find tableName + Query.orderBy orderFields SQLite) [] fromData<'TDoc> conn + + /// Retrieve all documents in the given table ordered by the given fields in the document + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + /// All documents from the given table, ordered by the given fields + let AllOrdered<'TDoc>(tableName, orderFields, conn) = + Custom.List(Query.find tableName + Query.orderBy orderFields SQLite, [], fromData<'TDoc>, conn) + + /// Retrieve a document by its ID + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The SqliteConnection to use to execute the query + /// Some with the document if found, None otherwise + [] + let byId<'TKey, 'TDoc> tableName (docId: 'TKey) conn = + Custom.single<'TDoc> (Query.byId (Query.find tableName) docId) [ idParam docId ] fromData<'TDoc> conn + + /// Retrieve a document by its ID + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The SqliteConnection to use to execute the query + /// The document if found, null otherwise + let ById<'TKey, 'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, docId: 'TKey, conn) = + Custom.Single<'TDoc>(Query.byId (Query.find tableName) docId, [ idParam docId ], fromData<'TDoc>, conn) + + /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + /// All documents matching the given fields + [] + let byFields<'TDoc> tableName howMatched fields conn = + Custom.list<'TDoc> + (Query.byFields (Query.find tableName) howMatched fields) + (addFieldParams fields []) + fromData<'TDoc> + conn + + /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + /// All documents matching the given fields + let ByFields<'TDoc>(tableName, howMatched, fields, conn) = + Custom.List<'TDoc>( + Query.byFields (Query.find tableName) howMatched fields, + addFieldParams fields [], + fromData<'TDoc>, + conn) + + /// + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields + /// in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + /// All documents matching the given fields, ordered by the other given fields + [] + let byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn = + Custom.list<'TDoc> + (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields SQLite) + (addFieldParams queryFields []) + fromData<'TDoc> + conn + + /// + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields + /// in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + /// All documents matching the given fields, ordered by the other given fields + let ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) = + Custom.List<'TDoc>( + Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields SQLite, + addFieldParams queryFields [], + fromData<'TDoc>, + conn) + + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + /// Some with the first document, or None if not found + [] + let firstByFields<'TDoc> tableName howMatched fields conn = + Custom.single + $"{Query.byFields (Query.find tableName) howMatched fields} LIMIT 1" + (addFieldParams fields []) + fromData<'TDoc> + conn + + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + /// The first document, or null if not found + let FirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, howMatched, fields, conn) = + Custom.Single( + $"{Query.byFields (Query.find tableName) howMatched fields} LIMIT 1", + addFieldParams fields [], + fromData<'TDoc>, + conn) + + /// + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the + /// given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + /// + /// Some with the first document ordered by the given fields, or None if not found + /// + [] + let firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn = + Custom.single + $"{Query.byFields (Query.find tableName) howMatched queryFields}{Query.orderBy orderFields SQLite} LIMIT 1" + (addFieldParams queryFields []) + fromData<'TDoc> + conn + + /// + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the + /// given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + /// The first document ordered by the given fields, or null if not found + let FirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( + tableName, howMatched, queryFields, orderFields, conn) = + Custom.Single( + $"{Query.byFields (Query.find tableName) howMatched queryFields}{Query.orderBy orderFields SQLite} LIMIT 1", + addFieldParams queryFields [], + fromData<'TDoc>, + conn) + +/// Commands to update documents +[] +module Update = + + /// Update (replace) an entire document by its ID + /// The table in which a document should be updated (may include schema) + /// The ID of the document to be updated (replaced) + /// The new document + /// The SqliteConnection to use to execute the query + [] + let byId tableName (docId: 'TKey) (document: 'TDoc) conn = + Custom.nonQuery + (Query.statementWhere (Query.update tableName) (Query.whereById docId)) + [ idParam docId; jsonParam "@data" document ] + conn + + /// + /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the + /// document + /// + /// The table in which a document should be updated (may include schema) + /// The function to obtain the ID of the document + /// The new document + /// The SqliteConnection to use to execute the query + [] + let byFunc tableName (idFunc: 'TDoc -> 'TKey) (document: 'TDoc) conn = + byId tableName (idFunc document) document conn + + /// + /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the + /// document + /// + /// The table in which a document should be updated (may include schema) + /// The function to obtain the ID of the document + /// The new document + /// The SqliteConnection to use to execute the query + let ByFunc(tableName, idFunc: System.Func<'TDoc, 'TKey>, document: 'TDoc, conn) = + byFunc tableName idFunc.Invoke document conn + +/// Commands to patch (partially update) documents +[] +module Patch = + + /// Patch a document by its ID + /// The table in which a document should be patched (may include schema) + /// The ID of the document to patch + /// The partial document to patch the existing document + /// The SqliteConnection to use to execute the query + [] + let byId tableName (docId: 'TKey) (patch: 'TPatch) conn = + Custom.nonQuery + (Query.byId (Query.patch tableName) docId) [ idParam docId; jsonParam "@data" patch ] conn + + /// + /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, + /// etc.) + /// + /// The table in which documents should be patched (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The partial document to patch the existing document + /// The SqliteConnection to use to execute the query + [] + let byFields tableName howMatched fields (patch: 'TPatch) conn = + Custom.nonQuery + (Query.byFields (Query.patch tableName) howMatched fields) + (addFieldParams fields [ jsonParam "@data" patch ]) + conn + +/// Commands to remove fields from documents +[] +module RemoveFields = + + /// Remove fields from a document by the document's ID + /// The table in which a document should be modified (may include schema) + /// The ID of the document to modify + /// One or more field names to remove from the document + /// The SqliteConnection to use to execute the query + [] + let byId tableName (docId: 'TKey) fieldNames conn = + let nameParams = fieldNameParams "@name" fieldNames + Custom.nonQuery + (Query.byId (Query.removeFields tableName nameParams) docId) + (idParam docId |> Seq.singleton |> Seq.append nameParams) + conn + + /// Remove fields from documents via a comparison on JSON fields in the document + /// The table in which documents should be modified (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// One or more field names to remove from the matching documents + /// The SqliteConnection to use to execute the query + [] + let byFields tableName howMatched fields fieldNames conn = + let nameParams = fieldNameParams "@name" fieldNames + Custom.nonQuery + (Query.byFields (Query.removeFields tableName nameParams) howMatched fields) + (addFieldParams fields nameParams) + conn + +/// Commands to delete documents +[] +module Delete = + + /// Delete a document by its ID + /// The table in which a document should be deleted (may include schema) + /// The ID of the document to delete + /// The SqliteConnection to use to execute the query + [] + let byId tableName (docId: 'TKey) conn = + Custom.nonQuery (Query.byId (Query.delete tableName) docId) [ idParam docId ] conn + + /// Delete documents by matching a JSON field comparison query (->> =, etc.) + /// The table in which documents should be deleted (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + [] + let byFields tableName howMatched fields conn = + Custom.nonQuery (Query.byFields (Query.delete tableName) howMatched fields) (addFieldParams fields []) conn -- 2.47.2 From 68d9c13ad7310d922294ad6148b63c8c717116e5 Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Sun, 6 Apr 2025 17:27:40 -0400 Subject: [PATCH 10/22] Add SQLite WithConn Json funcs; version bump --- src/Directory.Build.props | 9 +- src/Sqlite/WithConn.fs | 419 +++++++++++++++++++++++++++++++------- 2 files changed, 351 insertions(+), 77 deletions(-) diff --git a/src/Directory.Build.props b/src/Directory.Build.props index 932d9dd..cced572 100644 --- a/src/Directory.Build.props +++ b/src/Directory.Build.props @@ -3,11 +3,10 @@ net8.0;net9.0 embedded true - 4.0.1.0 - 4.0.1.0 - 4.0.1 - From v4.0: Add XML documention (IDE support) -From v3.1: See 4.0 release for breaking changes and compatibility + 4.1.0.0 + 4.1.0.0 + 4.1.0 + Add JSON retrieval and stream-writing functions danieljsummers Bit Badger Solutions README.md diff --git a/src/Sqlite/WithConn.fs b/src/Sqlite/WithConn.fs index 763678b..0a163b0 100644 --- a/src/Sqlite/WithConn.fs +++ b/src/Sqlite/WithConn.fs @@ -1,6 +1,7 @@ /// Versions of queries that accept a SqliteConnection as the last parameter module BitBadger.Documents.Sqlite.WithConn +open System.IO open BitBadger.Documents open Microsoft.Data.Sqlite @@ -12,7 +13,7 @@ module Custom = /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query /// A list of results for the given query [] let list<'TDoc> query (parameters: SqliteParameter seq) (mapFunc: SqliteDataReader -> 'TDoc) @@ -26,7 +27,7 @@ module Custom = /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query /// A list of results for the given query let List<'TDoc>( query, parameters: SqliteParameter seq, mapFunc: System.Func, @@ -37,12 +38,65 @@ module Custom = cmd.Parameters.AddRange parameters ToCustomList<'TDoc>(cmd, mapFunc) + /// Execute a query that returns a JSON array of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The SqliteConnection to use to execute the query + /// A JSON array of results for the given query + [] + let jsonArray + query + (parameters: SqliteParameter seq) + (mapFunc: SqliteDataReader -> string) + (conn: SqliteConnection) = + use cmd = conn.CreateCommand() + cmd.CommandText <- query + cmd.Parameters.AddRange parameters + toJsonArray cmd mapFunc + + /// Execute a query that returns a JSON array of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The SqliteConnection to use to execute the query + /// A JSON array of results for the given query + let JsonArray(query, parameters, mapFunc: System.Func, conn) = + jsonArray query parameters mapFunc.Invoke conn + + /// Execute a query, writing its results to the given StreamWriter + /// The query to retrieve the results + /// Parameters to use for the query + /// The StreamWriter to which the results should be written + /// The mapping function to extract the document + /// The SqliteConnection to use to execute the query + [] + let writeJsonArray + query + (parameters: SqliteParameter seq) + writer + (mapFunc: SqliteDataReader -> string) + (conn: SqliteConnection) = + use cmd = conn.CreateCommand() + cmd.CommandText <- query + cmd.Parameters.AddRange parameters + writeJsonArray cmd writer mapFunc + + /// Execute a query, writing its results to the given StreamWriter + /// The query to retrieve the results + /// Parameters to use for the query + /// The StreamWriter to which the results should be written + /// The mapping function to extract the document + /// The SqliteConnection to use to execute the query + let WriteJsonArray(query, parameters, writer, mapFunc: System.Func, conn) = + writeJsonArray query parameters writer mapFunc.Invoke conn + /// Execute a query that returns one or no results /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// The SqliteConnection to use to execute the query - /// Some with the first matching result, or None if not found + /// The SqliteConnection to use to execute the query + /// Some with the first matching result, or None if not found [] let single<'TDoc> query parameters (mapFunc: SqliteDataReader -> 'TDoc) conn = backgroundTask { let! results = list query parameters mapFunc conn @@ -53,8 +107,8 @@ module Custom = /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// The SqliteConnection to use to execute the query - /// The first matching result, or null if not found + /// The SqliteConnection to use to execute the query + /// The first matching result, or null if not found let Single<'TDoc when 'TDoc: null and 'TDoc: not struct>( query, parameters, mapFunc: System.Func, conn ) = backgroundTask { @@ -62,10 +116,31 @@ module Custom = return Option.toObj result } + /// Execute a query that returns one or no JSON documents + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The SqliteConnection to use to execute the query + /// The JSON document with the first matching result, or an empty document if not found + [] + let jsonSingle query parameters mapFunc conn = backgroundTask { + let! results = jsonArray $"%s{query} LIMIT 1" parameters mapFunc conn + return if results = "[]" then "{}" else results[1..results.Length - 2] + } + + /// Execute a query that returns one or no JSON documents + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The SqliteConnection to use to execute the query + /// The JSON document with the first matching result, or an empty document if not found + let JsonSingle(query, parameters, mapFunc: System.Func, conn) = + jsonSingle query parameters mapFunc.Invoke conn + /// Execute a query that returns no results /// The query to retrieve the results /// Parameters to use for the query - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query [] let nonQuery query (parameters: SqliteParameter seq) (conn: SqliteConnection) = use cmd = conn.CreateCommand() @@ -77,7 +152,7 @@ module Custom = /// The query to retrieve the value /// Parameters to use for the query /// The mapping function to obtain the value - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query /// The scalar value for the query [] let scalar<'T when 'T : struct> query (parameters: SqliteParameter seq) (mapFunc: SqliteDataReader -> 'T) @@ -94,18 +169,19 @@ module Custom = /// The query to retrieve the value /// Parameters to use for the query /// The mapping function to obtain the value - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query /// The scalar value for the query let Scalar<'T when 'T: struct>(query, parameters, mapFunc: System.Func, conn) = scalar<'T> query parameters mapFunc.Invoke conn + /// Functions to create tables and indexes [] module Definition = /// Create a document table /// The table whose existence should be ensured (may include schema) - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query [] let ensureTable name conn = backgroundTask { do! Custom.nonQuery (Query.Definition.ensureTable name) [] conn @@ -116,7 +192,7 @@ module Definition = /// The table to be indexed (may include schema) /// The name of the index to create /// One or more fields to be indexed - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query [] let ensureFieldIndex tableName indexName fields conn = Custom.nonQuery (Query.Definition.ensureIndexOn tableName indexName fields SQLite) [] conn @@ -128,7 +204,7 @@ module Document = /// Insert a new document /// The table into which the document should be inserted (may include schema) /// The document to be inserted - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query [] let insert<'TDoc> tableName (document: 'TDoc) conn = let query = @@ -148,39 +224,39 @@ module Document = (Query.insert tableName).Replace("@data", dataParam) Custom.nonQuery query [ jsonParam "@data" document ] conn - /// - /// Save a document, inserting it if it does not exist and updating it if it does (AKA "upsert") - /// + /// Save a document, inserting it if it does not exist and updating it if it does (AKA "upsert") /// The table into which the document should be saved (may include schema) /// The document to be saved - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query [] let save<'TDoc> tableName (document: 'TDoc) conn = Custom.nonQuery (Query.save tableName) [ jsonParam "@data" document ] conn + /// Commands to count documents [] module Count = /// Count all documents in a table /// The table in which documents should be counted (may include schema) - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query /// The count of the documents in the table [] let all tableName conn = Custom.scalar (Query.count tableName) [] toCount conn - /// Count matching documents using JSON field comparisons (->> =, etc.) + /// Count matching documents using JSON field comparisons (->> =, etc.) /// The table in which documents should be counted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query /// The count of matching documents in the table [] let byFields tableName howMatched fields conn = Custom.scalar (Query.byFields (Query.count tableName) howMatched fields) (addFieldParams fields []) toCount conn + /// Commands to determine if documents exist [] module Exists = @@ -188,17 +264,17 @@ module Exists = /// Determine if a document exists for the given ID /// The table in which existence should be checked (may include schema) /// The ID of the document whose existence should be checked - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query /// True if a document exists, false if not [] let byId tableName (docId: 'TKey) conn = Custom.scalar (Query.exists tableName (Query.whereById docId)) [ idParam docId ] toExists conn - /// Determine if a document exists using JSON field comparisons (->> =, etc.) + /// Determine if a document exists using JSON field comparisons (->> =, etc.) /// The table in which existence should be checked (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query /// True if any matching documents exist, false if not [] let byFields tableName howMatched fields conn = @@ -208,13 +284,14 @@ module Exists = toExists conn -/// Commands to retrieve documents + +/// Commands to retrieve documents as domain items [] module Find = /// Retrieve all documents in the given table /// The table from which documents should be retrieved (may include schema) - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query /// All documents from the given table [] let all<'TDoc> tableName conn = @@ -222,7 +299,7 @@ module Find = /// Retrieve all documents in the given table /// The table from which documents should be retrieved (may include schema) - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query /// All documents from the given table let All<'TDoc>(tableName, conn) = Custom.List(Query.find tableName, [], fromData<'TDoc>, conn) @@ -230,7 +307,7 @@ module Find = /// Retrieve all documents in the given table ordered by the given fields in the document /// The table from which documents should be retrieved (may include schema) /// Fields by which the results should be ordered - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query /// All documents from the given table, ordered by the given fields [] let allOrdered<'TDoc> tableName orderFields conn = @@ -239,7 +316,7 @@ module Find = /// Retrieve all documents in the given table ordered by the given fields in the document /// The table from which documents should be retrieved (may include schema) /// Fields by which the results should be ordered - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query /// All documents from the given table, ordered by the given fields let AllOrdered<'TDoc>(tableName, orderFields, conn) = Custom.List(Query.find tableName + Query.orderBy orderFields SQLite, [], fromData<'TDoc>, conn) @@ -247,8 +324,8 @@ module Find = /// Retrieve a document by its ID /// The table from which a document should be retrieved (may include schema) /// The ID of the document to retrieve - /// The SqliteConnection to use to execute the query - /// Some with the document if found, None otherwise + /// The SqliteConnection to use to execute the query + /// Some with the document if found, None otherwise [] let byId<'TKey, 'TDoc> tableName (docId: 'TKey) conn = Custom.single<'TDoc> (Query.byId (Query.find tableName) docId) [ idParam docId ] fromData<'TDoc> conn @@ -256,16 +333,16 @@ module Find = /// Retrieve a document by its ID /// The table from which a document should be retrieved (may include schema) /// The ID of the document to retrieve - /// The SqliteConnection to use to execute the query - /// The document if found, null otherwise + /// The SqliteConnection to use to execute the query + /// The document if found, null otherwise let ById<'TKey, 'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, docId: 'TKey, conn) = Custom.Single<'TDoc>(Query.byId (Query.find tableName) docId, [ idParam docId ], fromData<'TDoc>, conn) - /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// Retrieve documents matching JSON field comparisons (->> =, etc.) /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query /// All documents matching the given fields [] let byFields<'TDoc> tableName howMatched fields conn = @@ -275,11 +352,11 @@ module Find = fromData<'TDoc> conn - /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// Retrieve documents matching JSON field comparisons (->> =, etc.) /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query /// All documents matching the given fields let ByFields<'TDoc>(tableName, howMatched, fields, conn) = Custom.List<'TDoc>( @@ -289,14 +366,14 @@ module Find = conn) /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields - /// in the document + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in the + /// document /// /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query /// All documents matching the given fields, ordered by the other given fields [] let byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn = @@ -307,14 +384,14 @@ module Find = conn /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields - /// in the document + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in the + /// document /// /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query /// All documents matching the given fields, ordered by the other given fields let ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) = Custom.List<'TDoc>( @@ -323,12 +400,12 @@ module Find = fromData<'TDoc>, conn) - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The SqliteConnection to use to execute the query - /// Some with the first document, or None if not found + /// The SqliteConnection to use to execute the query + /// Some with the first document, or None if not found [] let firstByFields<'TDoc> tableName howMatched fields conn = Custom.single @@ -337,12 +414,12 @@ module Find = fromData<'TDoc> conn - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The SqliteConnection to use to execute the query - /// The first document, or null if not found + /// The SqliteConnection to use to execute the query + /// The first document, or null if not found let FirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, howMatched, fields, conn) = Custom.Single( $"{Query.byFields (Query.find tableName) howMatched fields} LIMIT 1", @@ -351,16 +428,16 @@ module Find = conn) /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the - /// given fields in the document + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document /// /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query /// - /// Some with the first document ordered by the given fields, or None if not found + /// Some with the first document ordered by the given fields, or None if not found /// [] let firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn = @@ -371,15 +448,15 @@ module Find = conn /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the - /// given fields in the document + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document /// /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered - /// The SqliteConnection to use to execute the query - /// The first document ordered by the given fields, or null if not found + /// The SqliteConnection to use to execute the query + /// The first document ordered by the given fields, or null if not found let FirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( tableName, howMatched, queryFields, orderFields, conn) = Custom.Single( @@ -388,6 +465,204 @@ module Find = fromData<'TDoc>, conn) + +/// Commands to retrieve documents as raw JSON +[] +module Json = + + /// Retrieve all JSON documents in the given table + /// The table from which documents should be retrieved (may include schema) + /// The SqliteConnection to use to execute the query + /// All JSON documents from the given table + [] + let all tableName conn = + Custom.jsonArray (Query.find tableName) [] jsonFromData conn + + /// Retrieve all JSON documents in the given table ordered by the given fields in the document + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + /// All JSON documents from the given table, ordered by the given fields + [] + let allOrdered tableName orderFields conn = + Custom.jsonArray (Query.find tableName + Query.orderBy orderFields SQLite) [] jsonFromData conn + + /// Retrieve a JSON document by its ID + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The SqliteConnection to use to execute the query + /// The JSON document if found, an empty JSON document otherwise + [] + let byId<'TKey> tableName (docId: 'TKey) conn = + Custom.jsonSingle (Query.byId (Query.find tableName) docId) [ idParam docId ] jsonFromData conn + + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + /// All JSON documents matching the given fields + [] + let byFields tableName howMatched fields conn = + Custom.jsonArray + (Query.byFields (Query.find tableName) howMatched fields) (addFieldParams fields []) jsonFromData conn + + /// + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given fields + /// in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + /// All JSON documents matching the given fields, ordered by the other given fields + [] + let byFieldsOrdered tableName howMatched queryFields orderFields conn = + Custom.jsonArray + (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields SQLite) + (addFieldParams queryFields []) + jsonFromData + conn + + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + /// The first JSON document if found, an empty JSON document otherwise + [] + let firstByFields tableName howMatched fields conn = + Custom.jsonSingle + (Query.byFields (Query.find tableName) howMatched fields) (addFieldParams fields []) jsonFromData conn + + /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + /// The first JSON document (in order) if found, an empty JSON document otherwise + [] + let firstByFieldsOrdered tableName howMatched queryFields orderFields conn = + Custom.jsonSingle + (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields SQLite) + (addFieldParams queryFields []) + jsonFromData + conn + + /// Write all JSON documents in the given table to the given StreamWriter + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The SqliteConnection to use to execute the query + [] + let writeAll tableName writer conn = + Custom.writeJsonArray (Query.find tableName) [] writer jsonFromData conn + + /// + /// Write all JSON all documents in the given table to the given StreamWriter, ordered by the given fields in + /// the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + [] + let writeAllOrdered tableName writer orderFields conn = + Custom.writeJsonArray (Query.find tableName + Query.orderBy orderFields SQLite) [] writer jsonFromData conn + + /// Write a JSON document to the given StreamWriter by its ID + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The ID of the document to retrieve + /// The SqliteConnection to use to execute the query + [] + let writeById<'TKey> tableName (writer: StreamWriter) (docId: 'TKey) conn = backgroundTask { + let! json = Custom.jsonSingle (Query.byId (Query.find tableName) docId) [ idParam docId ] jsonFromData conn + do! writer.WriteAsync json + } + + /// + /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + [] + let writeByFields tableName writer howMatched fields conn = + Custom.writeJsonArray + (Query.byFields (Query.find tableName) howMatched fields) + (addFieldParams fields []) + writer + jsonFromData + conn + + /// + /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + [] + let writeByFieldsOrdered tableName writer howMatched queryFields orderFields conn = + Custom.writeJsonArray + (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields SQLite) + (addFieldParams queryFields []) + writer + jsonFromData + conn + + /// + /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// (->> =, etc.) + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The SqliteConnection to use to execute the query + /// The first JSON document if found, an empty JSON document otherwise + [] + let writeFirstByFields tableName (writer: StreamWriter) howMatched fields conn = backgroundTask { + let! json = + Custom.jsonSingle + (Query.byFields (Query.find tableName) howMatched fields) (addFieldParams fields []) jsonFromData conn + do! writer.WriteAsync json + } + + /// + /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// (->> =, etc.) ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The SqliteConnection to use to execute the query + /// The first JSON document (in order) if found, an empty JSON document otherwise + [] + let writeFirstByFieldsOrdered tableName (writer: StreamWriter) howMatched queryFields orderFields conn = + backgroundTask { + let! json = + Custom.jsonSingle + (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields SQLite) + (addFieldParams queryFields []) + jsonFromData + conn + do! writer.WriteAsync json + } + + /// Commands to update documents [] module Update = @@ -396,7 +671,7 @@ module Update = /// The table in which a document should be updated (may include schema) /// The ID of the document to be updated (replaced) /// The new document - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query [] let byId tableName (docId: 'TKey) (document: 'TDoc) conn = Custom.nonQuery @@ -405,28 +680,27 @@ module Update = conn /// - /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the - /// document + /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the document /// /// The table in which a document should be updated (may include schema) /// The function to obtain the ID of the document /// The new document - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query [] let byFunc tableName (idFunc: 'TDoc -> 'TKey) (document: 'TDoc) conn = byId tableName (idFunc document) document conn /// - /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the - /// document + /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the document /// /// The table in which a document should be updated (may include schema) /// The function to obtain the ID of the document /// The new document - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query let ByFunc(tableName, idFunc: System.Func<'TDoc, 'TKey>, document: 'TDoc, conn) = byFunc tableName idFunc.Invoke document conn + /// Commands to patch (partially update) documents [] module Patch = @@ -435,21 +709,20 @@ module Patch = /// The table in which a document should be patched (may include schema) /// The ID of the document to patch /// The partial document to patch the existing document - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query [] let byId tableName (docId: 'TKey) (patch: 'TPatch) conn = Custom.nonQuery (Query.byId (Query.patch tableName) docId) [ idParam docId; jsonParam "@data" patch ] conn /// - /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, - /// etc.) + /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) /// /// The table in which documents should be patched (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// The partial document to patch the existing document - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query [] let byFields tableName howMatched fields (patch: 'TPatch) conn = Custom.nonQuery @@ -457,6 +730,7 @@ module Patch = (addFieldParams fields [ jsonParam "@data" patch ]) conn + /// Commands to remove fields from documents [] module RemoveFields = @@ -465,7 +739,7 @@ module RemoveFields = /// The table in which a document should be modified (may include schema) /// The ID of the document to modify /// One or more field names to remove from the document - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query [] let byId tableName (docId: 'TKey) fieldNames conn = let nameParams = fieldNameParams "@name" fieldNames @@ -479,7 +753,7 @@ module RemoveFields = /// Whether to match any or all of the field conditions /// The field conditions to match /// One or more field names to remove from the matching documents - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query [] let byFields tableName howMatched fields fieldNames conn = let nameParams = fieldNameParams "@name" fieldNames @@ -488,23 +762,24 @@ module RemoveFields = (addFieldParams fields nameParams) conn -/// Commands to delete documents + +/// Commands to delete documents [] module Delete = /// Delete a document by its ID /// The table in which a document should be deleted (may include schema) /// The ID of the document to delete - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query [] let byId tableName (docId: 'TKey) conn = Custom.nonQuery (Query.byId (Query.delete tableName) docId) [ idParam docId ] conn - /// Delete documents by matching a JSON field comparison query (->> =, etc.) + /// Delete documents by matching a JSON field comparison query (->> =, etc.) /// The table in which documents should be deleted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The SqliteConnection to use to execute the query + /// The SqliteConnection to use to execute the query [] let byFields tableName howMatched fields conn = Custom.nonQuery (Query.byFields (Query.delete tableName) howMatched fields) (addFieldParams fields []) conn -- 2.47.2 From 1a995c69a4a6b5a0f08d7f6f0184d371b4a6c90a Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Sun, 6 Apr 2025 22:08:24 -0400 Subject: [PATCH 11/22] Add SQLite implicit conn Json funcs --- src/Sqlite/Functions.fs | 262 ++++++++++++++++++++++++++++++++++++---- src/Sqlite/WithConn.fs | 2 - 2 files changed, 240 insertions(+), 24 deletions(-) diff --git a/src/Sqlite/Functions.fs b/src/Sqlite/Functions.fs index 24c3316..2b0ec65 100644 --- a/src/Sqlite/Functions.fs +++ b/src/Sqlite/Functions.fs @@ -25,11 +25,49 @@ module Custom = use conn = Configuration.dbConn () WithConn.Custom.List<'TDoc>(query, parameters, mapFunc, conn) + /// Execute a query that returns a JSON array of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// A JSON array of results for the given query + [] + let jsonArray query parameters mapFunc = + use conn = Configuration.dbConn () + WithConn.Custom.jsonArray query parameters mapFunc conn + + /// Execute a query that returns a JSON array of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// A JSON array of results for the given query + let JsonArray(query, parameters, mapFunc) = + use conn = Configuration.dbConn () + WithConn.Custom.JsonArray(query, parameters, mapFunc, conn) + + /// Execute a query, writing its results to the given StreamWriter + /// The query to retrieve the results + /// Parameters to use for the query + /// The StreamWriter to which the results should be written + /// The mapping function to extract the document + [] + let writeJsonArray query parameters writer mapFunc = + use conn = Configuration.dbConn () + WithConn.Custom.writeJsonArray query parameters writer mapFunc conn + + /// Execute a query, writing its results to the given StreamWriter + /// The query to retrieve the results + /// Parameters to use for the query + /// The StreamWriter to which the results should be written + /// The mapping function to extract the document + let WriteJsonArray(query, parameters, writer, mapFunc) = + use conn = Configuration.dbConn () + WithConn.Custom.WriteJsonArray(query, parameters, writer, mapFunc, conn) + /// Execute a query that returns one or no results /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// Some with the first matching result, or None if not found + /// Some with the first matching result, or None if not found [] let single<'TDoc> query parameters (mapFunc: SqliteDataReader -> 'TDoc) = use conn = Configuration.dbConn () @@ -39,12 +77,31 @@ module Custom = /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// The first matching result, or null if not found + /// The first matching result, or null if not found let Single<'TDoc when 'TDoc: null and 'TDoc: not struct>( query, parameters, mapFunc: System.Func) = use conn = Configuration.dbConn () WithConn.Custom.Single<'TDoc>(query, parameters, mapFunc, conn) + /// Execute a query that returns one or no JSON documents + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The JSON document with the first matching result, or an empty document if not found + [] + let jsonSingle query parameters mapFunc = + use conn = Configuration.dbConn () + WithConn.Custom.jsonSingle query parameters mapFunc conn + + /// Execute a query that returns one or no JSON documents + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The JSON document with the first matching result, or an empty document if not found + let JsonSingle(query, parameters, mapFunc) = + use conn = Configuration.dbConn () + WithConn.Custom.JsonSingle(query, parameters, mapFunc, conn) + /// Execute a query that returns no results /// The query to retrieve the results /// Parameters to use for the query @@ -127,7 +184,7 @@ module Count = use conn = Configuration.dbConn () WithConn.Count.all tableName conn - /// Count matching documents using JSON field comparisons (->> =, etc.) + /// Count matching documents using JSON field comparisons (->> =, etc.) /// The table in which documents should be counted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -151,7 +208,7 @@ module Exists = use conn = Configuration.dbConn () WithConn.Exists.byId tableName docId conn - /// Determine if a document exists using JSON field comparisons (->> =, etc.) + /// Determine if a document exists using JSON field comparisons (->> =, etc.) /// The table in which existence should be checked (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -201,7 +258,7 @@ module Find = /// Retrieve a document by its ID /// The table from which a document should be retrieved (may include schema) /// The ID of the document to retrieve - /// Some with the document if found, None otherwise + /// Some with the document if found, None otherwise [] let byId<'TKey, 'TDoc> tableName docId = use conn = Configuration.dbConn () @@ -210,12 +267,12 @@ module Find = /// Retrieve a document by its ID /// The table from which a document should be retrieved (may include schema) /// The ID of the document to retrieve - /// The document if found, null otherwise + /// The document if found, null otherwise let ById<'TKey, 'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, docId) = use conn = Configuration.dbConn () WithConn.Find.ById<'TKey, 'TDoc>(tableName, docId, conn) - /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// Retrieve documents matching JSON field comparisons (->> =, etc.) /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -225,7 +282,7 @@ module Find = use conn = Configuration.dbConn () WithConn.Find.byFields<'TDoc> tableName howMatched fields conn - /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// Retrieve documents matching JSON field comparisons (->> =, etc.) /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -235,8 +292,8 @@ module Find = WithConn.Find.ByFields<'TDoc>(tableName, howMatched, fields, conn) /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in - /// the document + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in the + /// document /// /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions @@ -249,8 +306,8 @@ module Find = WithConn.Find.byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in - /// the document + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in the + /// document /// /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions @@ -261,27 +318,27 @@ module Find = use conn = Configuration.dbConn () WithConn.Find.ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// Some with the first document, or None if not found + /// Some with the first document, or None if not found [] let firstByFields<'TDoc> tableName howMatched fields = use conn = Configuration.dbConn () WithConn.Find.firstByFields<'TDoc> tableName howMatched fields conn - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The first document, or null if not found + /// The first document, or null if not found let FirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>(tableName, howMatched, fields) = use conn = Configuration.dbConn () WithConn.Find.FirstByFields<'TDoc>(tableName, howMatched, fields, conn) /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given /// fields in the document /// /// The table from which a document should be retrieved (may include schema) @@ -289,7 +346,7 @@ module Find = /// The field conditions to match /// Fields by which the results should be ordered /// - /// Some with the first document ordered by the given fields, or None if not found + /// Some with the first document ordered by the given fields, or None if not found /// [] let firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields = @@ -297,20 +354,181 @@ module Find = WithConn.Find.firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given /// fields in the document /// /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered - /// The first document ordered by the given fields, or null if not found + /// The first document ordered by the given fields, or null if not found let FirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( tableName, howMatched, queryFields, orderFields) = use conn = Configuration.dbConn () WithConn.Find.FirstByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) +/// Commands to retrieve documents as raw JSON +[] +module Json = + + /// Retrieve all JSON documents in the given table + /// The table from which documents should be retrieved (may include schema) + /// All JSON documents from the given table + [] + let all tableName = + use conn = Configuration.dbConn () + WithConn.Json.all tableName conn + + /// Retrieve all JSON documents in the given table ordered by the given fields in the document + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// All JSON documents from the given table, ordered by the given fields + [] + let allOrdered tableName orderFields = + use conn = Configuration.dbConn () + WithConn.Json.allOrdered tableName orderFields conn + + /// Retrieve a JSON document by its ID + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The JSON document if found, an empty JSON document otherwise + [] + let byId<'TKey> tableName (docId: 'TKey) = + use conn = Configuration.dbConn () + WithConn.Json.byId tableName docId conn + + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// All JSON documents matching the given fields + [] + let byFields tableName howMatched fields = + use conn = Configuration.dbConn () + WithConn.Json.byFields tableName howMatched fields conn + + /// + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given fields + /// in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// All JSON documents matching the given fields, ordered by the other given fields + [] + let byFieldsOrdered tableName howMatched queryFields orderFields = + use conn = Configuration.dbConn () + WithConn.Json.byFieldsOrdered tableName howMatched queryFields orderFields conn + + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The first JSON document if found, an empty JSON document otherwise + [] + let firstByFields tableName howMatched fields = + use conn = Configuration.dbConn () + WithConn.Json.firstByFields tableName howMatched fields conn + + /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The first JSON document (in order) if found, an empty JSON document otherwise + [] + let firstByFieldsOrdered tableName howMatched queryFields orderFields = + use conn = Configuration.dbConn () + WithConn.Json.firstByFieldsOrdered tableName howMatched queryFields orderFields conn + + /// Write all JSON documents in the given table to the given StreamWriter + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + [] + let writeAll tableName writer = + use conn = Configuration.dbConn () + WithConn.Json.writeAll tableName writer conn + + /// + /// Write all JSON all documents in the given table to the given StreamWriter, ordered by the given fields in + /// the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Fields by which the results should be ordered + [] + let writeAllOrdered tableName writer orderFields = + use conn = Configuration.dbConn () + WithConn.Json.writeAllOrdered tableName writer orderFields conn + + /// Write a JSON document to the given StreamWriter by its ID + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The ID of the document to retrieve + [] + let writeById<'TKey> tableName writer (docId: 'TKey) = + use conn = Configuration.dbConn () + WithConn.Json.writeById tableName writer docId conn + + /// + /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + [] + let writeByFields tableName writer howMatched fields = + use conn = Configuration.dbConn () + WithConn.Json.writeByFields tableName writer howMatched fields conn + + /// + /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + [] + let writeByFieldsOrdered tableName writer howMatched queryFields orderFields = + use conn = Configuration.dbConn () + WithConn.Json.writeByFieldsOrdered tableName writer howMatched queryFields orderFields conn + + /// + /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// (->> =, etc.) + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + [] + let writeFirstByFields tableName writer howMatched fields = + use conn = Configuration.dbConn () + WithConn.Json.writeFirstByFields tableName writer howMatched fields conn + + /// + /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// (->> =, etc.) ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + [] + let writeFirstByFieldsOrdered tableName writer howMatched queryFields orderFields = + use conn = Configuration.dbConn () + WithConn.Json.writeFirstByFieldsOrdered tableName writer howMatched queryFields orderFields conn + + /// Commands to update documents [] module Update = @@ -360,7 +578,7 @@ module Patch = WithConn.Patch.byId tableName docId patch conn /// - /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) + /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) /// /// The table in which documents should be patched (may include schema) /// Whether to match any or all of the field conditions @@ -408,7 +626,7 @@ module Delete = use conn = Configuration.dbConn () WithConn.Delete.byId tableName docId conn - /// Delete documents by matching a JSON field comparison query (->> =, etc.) + /// Delete documents by matching a JSON field comparison query (->> =, etc.) /// The table in which documents should be deleted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match diff --git a/src/Sqlite/WithConn.fs b/src/Sqlite/WithConn.fs index 0a163b0..1006a3f 100644 --- a/src/Sqlite/WithConn.fs +++ b/src/Sqlite/WithConn.fs @@ -630,7 +630,6 @@ module Json = /// Whether to match any or all of the field conditions /// The field conditions to match /// The SqliteConnection to use to execute the query - /// The first JSON document if found, an empty JSON document otherwise [] let writeFirstByFields tableName (writer: StreamWriter) howMatched fields conn = backgroundTask { let! json = @@ -649,7 +648,6 @@ module Json = /// The field conditions to match /// Fields by which the results should be ordered /// The SqliteConnection to use to execute the query - /// The first JSON document (in order) if found, an empty JSON document otherwise [] let writeFirstByFieldsOrdered tableName (writer: StreamWriter) howMatched queryFields orderFields conn = backgroundTask { -- 2.47.2 From 812ef06af5ceac3e933eeb165dafb83d97995797 Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Mon, 7 Apr 2025 17:26:47 -0400 Subject: [PATCH 12/22] Add F# SQLite Json tests --- src/Sqlite/Extensions.fs | 555 ++++++++++++++++++----- src/Tests/SqliteExtensionTests.fs | 725 +++++++++++++++++++++++++++--- src/Tests/SqliteTests.fs | 587 +++++++++++++++++++++++- src/Tests/Types.fs | 16 + 4 files changed, 1706 insertions(+), 177 deletions(-) diff --git a/src/Sqlite/Extensions.fs b/src/Sqlite/Extensions.fs index 901bc0a..30160ae 100644 --- a/src/Sqlite/Extensions.fs +++ b/src/Sqlite/Extensions.fs @@ -1,6 +1,7 @@ namespace BitBadger.Documents.Sqlite open Microsoft.Data.Sqlite +open WithConn /// F# extensions for the SqliteConnection type [] @@ -14,21 +15,45 @@ module Extensions = /// The mapping function between the document and the domain item /// A list of results for the given query member conn.customList<'TDoc> query parameters mapFunc = - WithConn.Custom.list<'TDoc> query parameters mapFunc conn + Custom.list<'TDoc> query parameters mapFunc conn + + /// Execute a query that returns a JSON array of results + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// A JSON array of results for the given query + member conn.customJsonArray query parameters mapFunc = + Custom.jsonArray query parameters mapFunc conn + + /// Execute a query, writing its results to the given StreamWriter + /// The query to retrieve the results + /// Parameters to use for the query + /// The StreamWriter to which the results should be written + /// The mapping function to extract the document + member conn.writeCustomJsonArray query parameters writer mapFunc = + Custom.writeJsonArray query parameters writer mapFunc conn /// Execute a query that returns one or no results /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// Some with the first matching result, or None if not found + /// Some with the first matching result, or None if not found member conn.customSingle<'TDoc> query parameters mapFunc = - WithConn.Custom.single<'TDoc> query parameters mapFunc conn + Custom.single<'TDoc> query parameters mapFunc conn + + /// Execute a query that returns one or no JSON documents + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The JSON document with the first matching result, or an empty document if not found + member conn.customJsonSingle query parameters mapFunc = + Custom.jsonSingle query parameters mapFunc conn /// Execute a query that returns no results /// The query to retrieve the results /// Parameters to use for the query member conn.customNonQuery query parameters = - WithConn.Custom.nonQuery query parameters conn + Custom.nonQuery query parameters conn /// Execute a query that returns a scalar value /// The query to retrieve the value @@ -36,25 +61,25 @@ module Extensions = /// The mapping function to obtain the value /// The scalar value for the query member conn.customScalar<'T when 'T: struct> query parameters mapFunc = - WithConn.Custom.scalar<'T> query parameters mapFunc conn + Custom.scalar<'T> query parameters mapFunc conn /// Create a document table /// The table whose existence should be ensured (may include schema) member conn.ensureTable name = - WithConn.Definition.ensureTable name conn + Definition.ensureTable name conn /// Create an index on field(s) within documents in the specified table /// The table to be indexed (may include schema) /// The name of the index to create /// One or more fields to be indexed member conn.ensureFieldIndex tableName indexName fields = - WithConn.Definition.ensureFieldIndex tableName indexName fields conn + Definition.ensureFieldIndex tableName indexName fields conn /// Insert a new document /// The table into which the document should be inserted (may include schema) /// The document to be inserted member conn.insert<'TDoc> tableName (document: 'TDoc) = - WithConn.Document.insert<'TDoc> tableName document conn + insert<'TDoc> tableName document conn /// /// Save a document, inserting it if it does not exist and updating it if it does (AKA "upsert") @@ -62,68 +87,68 @@ module Extensions = /// The table into which the document should be saved (may include schema) /// The document to be saved member conn.save<'TDoc> tableName (document: 'TDoc) = - WithConn.Document.save tableName document conn + save tableName document conn /// Count all documents in a table /// The table in which documents should be counted (may include schema) /// The count of the documents in the table member conn.countAll tableName = - WithConn.Count.all tableName conn + Count.all tableName conn - /// Count matching documents using JSON field comparisons (->> =, etc.) + /// Count matching documents using JSON field comparisons (->> =, etc.) /// The table in which documents should be counted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// The count of matching documents in the table member conn.countByFields tableName howMatched fields = - WithConn.Count.byFields tableName howMatched fields conn + Count.byFields tableName howMatched fields conn /// Determine if a document exists for the given ID /// The table in which existence should be checked (may include schema) /// The ID of the document whose existence should be checked /// True if a document exists, false if not member conn.existsById tableName (docId: 'TKey) = - WithConn.Exists.byId tableName docId conn + Exists.byId tableName docId conn - /// Determine if a document exists using JSON field comparisons (->> =, etc.) + /// Determine if a document exists using JSON field comparisons (->> =, etc.) /// The table in which existence should be checked (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// True if any matching documents exist, false if not member conn.existsByFields tableName howMatched fields = - WithConn.Exists.byFields tableName howMatched fields conn + Exists.byFields tableName howMatched fields conn /// Retrieve all documents in the given table /// The table from which documents should be retrieved (may include schema) /// All documents from the given table member conn.findAll<'TDoc> tableName = - WithConn.Find.all<'TDoc> tableName conn + Find.all<'TDoc> tableName conn /// Retrieve all documents in the given table ordered by the given fields in the document /// The table from which documents should be retrieved (may include schema) /// Fields by which the results should be ordered /// All documents from the given table, ordered by the given fields member conn.findAllOrdered<'TDoc> tableName orderFields = - WithConn.Find.allOrdered<'TDoc> tableName orderFields conn + Find.allOrdered<'TDoc> tableName orderFields conn /// Retrieve a document by its ID /// The table from which a document should be retrieved (may include schema) /// The ID of the document to retrieve - /// Some with the document if found, None otherwise + /// Some with the document if found, None otherwise member conn.findById<'TKey, 'TDoc> tableName (docId: 'TKey) = - WithConn.Find.byId<'TKey, 'TDoc> tableName docId conn + Find.byId<'TKey, 'TDoc> tableName docId conn - /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// Retrieve documents matching JSON field comparisons (->> =, etc.) /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// All documents matching the given fields member conn.findByFields<'TDoc> tableName howMatched fields = - WithConn.Find.byFields<'TDoc> tableName howMatched fields conn + Find.byFields<'TDoc> tableName howMatched fields conn /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields - /// in the document + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in + /// the document /// /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions @@ -131,18 +156,18 @@ module Extensions = /// Fields by which the results should be ordered /// All documents matching the given fields, ordered by the other given fields member conn.findByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields = - WithConn.Find.byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn + Find.byFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// Some with the first document, or None if not found + /// Some with the first document, or None if not found member conn.findFirstByFields<'TDoc> tableName howMatched fields = - WithConn.Find.firstByFields<'TDoc> tableName howMatched fields conn + Find.firstByFields<'TDoc> tableName howMatched fields conn /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the /// given fields in the document /// /// The table from which a document should be retrieved (may include schema) @@ -150,17 +175,148 @@ module Extensions = /// The field conditions to match /// Fields by which the results should be ordered /// - /// Some with the first document ordered by the given fields, or None if not found + /// Some with the first document ordered by the given fields, or None if not found /// member conn.findFirstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields = - WithConn.Find.firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn + Find.firstByFieldsOrdered<'TDoc> tableName howMatched queryFields orderFields conn + + /// Retrieve all JSON documents in the given table + /// The table from which documents should be retrieved (may include schema) + /// All JSON documents from the given table + member conn.jsonAll tableName = + Json.all tableName conn + + /// Retrieve all JSON documents in the given table ordered by the given fields in the document + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// All JSON documents from the given table, ordered by the given fields + member conn.jsonAllOrdered tableName orderFields = + Json.allOrdered tableName orderFields conn + + /// Retrieve a JSON document by its ID + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The JSON document if found, an empty JSON document otherwise + member conn.jsonById<'TKey> tableName (docId: 'TKey) = + Json.byId tableName docId conn + + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// All JSON documents matching the given fields + member conn.jsonByFields tableName howMatched fields = + Json.byFields tableName howMatched fields conn + + /// + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// All JSON documents matching the given fields, ordered by the other given fields + member conn.jsonByFieldsOrdered tableName howMatched queryFields orderFields = + Json.byFieldsOrdered tableName howMatched queryFields orderFields conn + + /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The first JSON document if found, an empty JSON document otherwise + member conn.jsonFirstByFields tableName howMatched fields = + Json.firstByFields tableName howMatched fields conn + + /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) ordered by the + /// given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The first JSON document (in order) if found, an empty JSON document otherwise + member conn.jsonFirstByFieldsOrdered tableName howMatched queryFields orderFields = + Json.firstByFieldsOrdered tableName howMatched queryFields orderFields conn + + /// Write all JSON documents in the given table to the given StreamWriter + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + member conn.writeJsonAll tableName writer = + Json.writeAll tableName writer conn + + /// + /// Write all JSON all documents in the given table to the given StreamWriter, ordered by the given + /// fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Fields by which the results should be ordered + member conn.writeJsonAllOrdered tableName writer orderFields = + Json.writeAllOrdered tableName writer orderFields conn + + /// Write a JSON document to the given StreamWriter by its ID + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The ID of the document to retrieve + member conn.writeJsonById<'TKey> tableName writer (docId: 'TKey) = + Json.writeById tableName writer docId conn + + /// + /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, + /// etc.) + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + member conn.writeJsonByFields tableName writer howMatched fields = + Json.writeByFields tableName writer howMatched fields conn + + /// + /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, + /// etc.) ordered by the given fields in the document + /// + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + member conn.writeJsonByFieldsOrdered tableName writer howMatched queryFields orderFields = + Json.writeByFieldsOrdered tableName writer howMatched queryFields orderFields conn + + /// + /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// (->> =, etc.) + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + member conn.writeJsonFirstByFields tableName writer howMatched fields = + Json.writeFirstByFields tableName writer howMatched fields conn + + /// + /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// (->> =, etc.) ordered by the given fields in the document + /// + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + member conn.writeJsonFirstByFieldsOrdered tableName writer howMatched queryFields orderFields = + Json.writeFirstByFieldsOrdered tableName writer howMatched queryFields orderFields conn /// Update (replace) an entire document by its ID /// The table in which a document should be updated (may include schema) /// The ID of the document to be updated (replaced) /// The new document member conn.updateById tableName (docId: 'TKey) (document: 'TDoc) = - WithConn.Update.byId tableName docId document conn + Update.byId tableName docId document conn /// /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the @@ -170,32 +326,31 @@ module Extensions = /// The function to obtain the ID of the document /// The new document member conn.updateByFunc tableName (idFunc: 'TDoc -> 'TKey) (document: 'TDoc) = - WithConn.Update.byFunc tableName idFunc document conn + Update.byFunc tableName idFunc document conn /// Patch a document by its ID /// The table in which a document should be patched (may include schema) /// The ID of the document to patch /// The partial document to patch the existing document member conn.patchById tableName (docId: 'TKey) (patch: 'TPatch) = - WithConn.Patch.byId tableName docId patch conn + Patch.byId tableName docId patch conn /// - /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, - /// etc.) + /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) /// /// The table in which documents should be patched (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// The partial document to patch the existing document member conn.patchByFields tableName howMatched fields (patch: 'TPatch) = - WithConn.Patch.byFields tableName howMatched fields patch conn + Patch.byFields tableName howMatched fields patch conn /// Remove fields from a document by the document's ID /// The table in which a document should be modified (may include schema) /// The ID of the document to modify /// One or more field names to remove from the document member conn.removeFieldsById tableName (docId: 'TKey) fieldNames = - WithConn.RemoveFields.byId tableName docId fieldNames conn + RemoveFields.byId tableName docId fieldNames conn /// Remove fields from documents via a comparison on JSON fields in the document /// The table in which documents should be modified (may include schema) @@ -203,20 +358,20 @@ module Extensions = /// The field conditions to match /// One or more field names to remove from the matching documents member conn.removeFieldsByFields tableName howMatched fields fieldNames = - WithConn.RemoveFields.byFields tableName howMatched fields fieldNames conn + RemoveFields.byFields tableName howMatched fields fieldNames conn /// Delete a document by its ID /// The table in which a document should be deleted (may include schema) /// The ID of the document to delete member conn.deleteById tableName (docId: 'TKey) = - WithConn.Delete.byId tableName docId conn + Delete.byId tableName docId conn - /// Delete documents by matching a JSON field comparison query (->> =, etc.) + /// Delete documents by matching a JSON field comparison query (->> =, etc.) /// The table in which documents should be deleted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match member conn.deleteByFields tableName howMatched fields = - WithConn.Delete.byFields tableName howMatched fields conn + Delete.byFields tableName howMatched fields conn open System.Runtime.CompilerServices @@ -225,36 +380,66 @@ open System.Runtime.CompilerServices type SqliteConnectionCSharpExtensions = /// Execute a query that returns a list of results - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item /// A list of results for the given query [] static member inline CustomList<'TDoc>(conn, query, parameters, mapFunc: System.Func) = - WithConn.Custom.List<'TDoc>(query, parameters, mapFunc, conn) + Custom.List<'TDoc>(query, parameters, mapFunc, conn) + /// Execute a query that returns a JSON array of results + /// The SqliteConnection on which to run the query + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// A JSON array of results for the given query + [] + static member inline CustomJsonArray(conn, query, parameters, mapFunc) = + Custom.JsonArray(query, parameters, mapFunc, conn) + + /// Execute a query, writing its results to the given StreamWriter + /// The SqliteConnection on which to run the query + /// The query to retrieve the results + /// Parameters to use for the query + /// The StreamWriter to which the results should be written + /// The mapping function to extract the document + [] + static member inline WriteCustomJsonArray(conn, query, parameters, writer, mapFunc) = + Custom.WriteJsonArray(query, parameters, writer, mapFunc, conn) + /// Execute a query that returns one or no results - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The query to retrieve the results /// Parameters to use for the query /// The mapping function between the document and the domain item - /// The first matching result, or null if not found + /// The first matching result, or null if not found [] static member inline CustomSingle<'TDoc when 'TDoc: null and 'TDoc: not struct>( conn, query, parameters, mapFunc: System.Func) = - WithConn.Custom.Single<'TDoc>(query, parameters, mapFunc, conn) + Custom.Single<'TDoc>(query, parameters, mapFunc, conn) + + /// Execute a query that returns one or no JSON documents + /// The SqliteConnection on which to run the query + /// The query to retrieve the results + /// Parameters to use for the query + /// The mapping function to extract the document + /// The JSON document with the first matching result, or an empty document if not found + [] + static member inline CustomJsonSingle(conn, query, parameters, mapFunc) = + Custom.JsonSingle(query, parameters, mapFunc, conn) /// Execute a query that returns no results - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The query to retrieve the results /// Parameters to use for the query [] static member inline CustomNonQuery(conn, query, parameters) = - WithConn.Custom.nonQuery query parameters conn + Custom.nonQuery query parameters conn /// Execute a query that returns a scalar value - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The query to retrieve the value /// Parameters to use for the query /// The mapping function to obtain the value @@ -262,118 +447,118 @@ type SqliteConnectionCSharpExtensions = [] static member inline CustomScalar<'T when 'T: struct>( conn, query, parameters, mapFunc: System.Func) = - WithConn.Custom.Scalar<'T>(query, parameters, mapFunc, conn) + Custom.Scalar<'T>(query, parameters, mapFunc, conn) /// Create a document table - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table whose existence should be ensured (may include schema) [] static member inline EnsureTable(conn, name) = - WithConn.Definition.ensureTable name conn + Definition.ensureTable name conn /// Create an index on field(s) within documents in the specified table - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table to be indexed (may include schema) /// The name of the index to create /// One or more fields to be indexed [] static member inline EnsureFieldIndex(conn, tableName, indexName, fields) = - WithConn.Definition.ensureFieldIndex tableName indexName fields conn + Definition.ensureFieldIndex tableName indexName fields conn /// Insert a new document - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table into which the document should be inserted (may include schema) /// The document to be inserted [] static member inline Insert<'TDoc>(conn, tableName, document: 'TDoc) = - WithConn.Document.insert<'TDoc> tableName document conn + insert<'TDoc> tableName document conn /// Save a document, inserting it if it does not exist and updating it if it does (AKA "upsert") - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table into which the document should be saved (may include schema) /// The document to be saved [] static member inline Save<'TDoc>(conn, tableName, document: 'TDoc) = - WithConn.Document.save<'TDoc> tableName document conn + save<'TDoc> tableName document conn /// Count all documents in a table - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table in which documents should be counted (may include schema) /// The count of the documents in the table [] static member inline CountAll(conn, tableName) = - WithConn.Count.all tableName conn + Count.all tableName conn - /// Count matching documents using JSON field comparisons (->> =, etc.) - /// The SqliteConnection on which to run the query + /// Count matching documents using JSON field comparisons (->> =, etc.) + /// The SqliteConnection on which to run the query /// The table in which documents should be counted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// The count of matching documents in the table [] static member inline CountByFields(conn, tableName, howMatched, fields) = - WithConn.Count.byFields tableName howMatched fields conn + Count.byFields tableName howMatched fields conn /// Determine if a document exists for the given ID - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table in which existence should be checked (may include schema) /// The ID of the document whose existence should be checked /// True if a document exists, false if not [] static member inline ExistsById<'TKey>(conn, tableName, docId: 'TKey) = - WithConn.Exists.byId tableName docId conn + Exists.byId tableName docId conn - /// Determine if a document exists using JSON field comparisons (->> =, etc.) - /// The SqliteConnection on which to run the query + /// Determine if a document exists using JSON field comparisons (->> =, etc.) + /// The SqliteConnection on which to run the query /// The table in which existence should be checked (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// True if any matching documents exist, false if not [] static member inline ExistsByFields(conn, tableName, howMatched, fields) = - WithConn.Exists.byFields tableName howMatched fields conn + Exists.byFields tableName howMatched fields conn /// Retrieve all documents in the given table - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// All documents from the given table [] static member inline FindAll<'TDoc>(conn, tableName) = - WithConn.Find.All<'TDoc>(tableName, conn) + Find.All<'TDoc>(tableName, conn) /// Retrieve all documents in the given table ordered by the given fields in the document - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// Fields by which the results should be ordered /// All documents from the given table, ordered by the given fields [] static member inline FindAllOrdered<'TDoc>(conn, tableName, orderFields) = - WithConn.Find.AllOrdered<'TDoc>(tableName, orderFields, conn) + Find.AllOrdered<'TDoc>(tableName, orderFields, conn) /// Retrieve a document by its ID - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// The ID of the document to retrieve - /// The document if found, null otherwise + /// The document if found, null otherwise [] static member inline FindById<'TKey, 'TDoc when 'TDoc: null and 'TDoc: not struct>(conn, tableName, docId: 'TKey) = - WithConn.Find.ById<'TKey, 'TDoc>(tableName, docId, conn) + Find.ById<'TKey, 'TDoc>(tableName, docId, conn) - /// Retrieve documents matching JSON field comparisons (->> =, etc.) - /// The SqliteConnection on which to run the query + /// Retrieve documents matching JSON field comparisons (->> =, etc.) + /// The SqliteConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// All documents matching the given fields [] static member inline FindByFields<'TDoc>(conn, tableName, howMatched, fields) = - WithConn.Find.ByFields<'TDoc>(tableName, howMatched, fields, conn) + Find.ByFields<'TDoc>(tableName, howMatched, fields, conn) /// - /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in + /// Retrieve documents matching JSON field comparisons (->> =, etc.) ordered by the given fields in /// the document /// - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table from which documents should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match @@ -381,108 +566,264 @@ type SqliteConnectionCSharpExtensions = /// All documents matching the given fields, ordered by the other given fields [] static member inline FindByFieldsOrdered<'TDoc>(conn, tableName, howMatched, queryFields, orderFields) = - WithConn.Find.ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) + Find.ByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) - /// The SqliteConnection on which to run the query + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) + /// The SqliteConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match - /// The first document, or null if not found + /// The first document, or null if not found [] static member inline FindFirstByFields<'TDoc when 'TDoc: null and 'TDoc: not struct>( conn, tableName, howMatched, fields) = - WithConn.Find.FirstByFields<'TDoc>(tableName, howMatched, fields, conn) + Find.FirstByFields<'TDoc>(tableName, howMatched, fields, conn) /// - /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given + /// Retrieve the first document matching JSON field comparisons (->> =, etc.) ordered by the given /// fields in the document /// - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table from which a document should be retrieved (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered - /// The first document ordered by the given fields, or null if not found + /// The first document ordered by the given fields, or null if not found [] static member inline FindFirstByFieldsOrdered<'TDoc when 'TDoc: null and 'TDoc: not struct>( conn, tableName, howMatched, queryFields, orderFields) = - WithConn.Find.FirstByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) + Find.FirstByFieldsOrdered<'TDoc>(tableName, howMatched, queryFields, orderFields, conn) + + /// Retrieve all JSON documents in the given table + /// The SqliteConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// All JSON documents from the given table + [] + static member inline JsonAll(conn, tableName) = + Json.all tableName conn + + /// Retrieve all JSON documents in the given table ordered by the given fields in the document + /// The SqliteConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// Fields by which the results should be ordered + /// All JSON documents from the given table, ordered by the given fields + [] + static member inline JsonAllOrdered(conn, tableName, orderFields) = + Json.allOrdered tableName orderFields conn + + /// Retrieve a JSON document by its ID + /// The SqliteConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The ID of the document to retrieve + /// The JSON document if found, an empty JSON document otherwise + [] + static member inline JsonById<'TKey>(conn, tableName, docId: 'TKey) = + Json.byId tableName docId conn + + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) + /// The SqliteConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// All JSON documents matching the given fields + [] + static member inline JsonByFields(conn, tableName, howMatched, fields) = + Json.byFields tableName howMatched fields conn + + /// + /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) ordered by the given fields + /// in the document + /// + /// The SqliteConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// All JSON documents matching the given fields, ordered by the other given fields + [] + static member inline JsonByFieldsOrdered(conn, tableName, howMatched, queryFields, orderFields) = + Json.byFieldsOrdered tableName howMatched queryFields orderFields conn + + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) + /// The SqliteConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// The first JSON document if found, an empty JSON document otherwise + [] + static member inline JsonFirstByFields(conn, tableName, howMatched, fields) = + Json.firstByFields tableName howMatched fields conn + + /// + /// Retrieve the first JSON document matching JSON field comparisons (->> =, etc.) ordered by the given + /// fields in the document + /// + /// The SqliteConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + /// The first JSON document (in order) if found, an empty JSON document otherwise + [] + static member inline JsonFirstByFieldsOrdered(conn, tableName, howMatched, queryFields, orderFields) = + Json.firstByFieldsOrdered tableName howMatched queryFields orderFields conn + + /// Write all JSON documents in the given table to the given StreamWriter + /// The SqliteConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + [] + static member inline WriteJsonAll(conn, tableName, writer) = + Json.writeAll tableName writer conn + + /// + /// Write all JSON all documents in the given table to the given StreamWriter, ordered by the given fields in + /// the document + /// + /// The SqliteConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Fields by which the results should be ordered + [] + static member inline WriteJsonAllOrdered(conn, tableName, writer, orderFields) = + Json.writeAllOrdered tableName writer orderFields conn + + /// Write a JSON document to the given StreamWriter by its ID + /// The SqliteConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// The ID of the document to retrieve + [] + static member inline WriteJsonById<'TKey>(conn, tableName, writer, docId: 'TKey) = + Json.writeById tableName writer docId conn + + /// + /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// + /// The SqliteConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + [] + static member inline WriteJsonByFields(conn, tableName, writer, howMatched, fields) = + Json.writeByFields tableName writer howMatched fields conn + + /// + /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// ordered by the given fields in the document + /// + /// The SqliteConnection on which to run the query + /// The table from which documents should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + [] + static member inline WriteJsonByFieldsOrdered(conn, tableName, writer, howMatched, queryFields, orderFields) = + Json.writeByFieldsOrdered tableName writer howMatched queryFields orderFields conn + + /// + /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// (->> =, etc.) + /// + /// The SqliteConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + [] + static member inline WriteJsonFirstByFields(conn, tableName, writer, howMatched, fields) = + Json.writeFirstByFields tableName writer howMatched fields conn + + /// + /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// (->> =, etc.) ordered by the given fields in the document + /// + /// The SqliteConnection on which to run the query + /// The table from which a document should be retrieved (may include schema) + /// The StreamWriter to which the results should be written + /// Whether to match any or all of the field conditions + /// The field conditions to match + /// Fields by which the results should be ordered + [] + static member inline WriteJsonFirstByFieldsOrdered(conn, tableName, writer, howMatched, queryFields, orderFields) = + Json.writeFirstByFieldsOrdered tableName writer howMatched queryFields orderFields conn /// Update (replace) an entire document by its ID - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table in which a document should be updated (may include schema) /// The ID of the document to be updated (replaced) /// The new document [] static member inline UpdateById<'TKey, 'TDoc>(conn, tableName, docId: 'TKey, document: 'TDoc) = - WithConn.Update.byId tableName docId document conn + Update.byId tableName docId document conn /// /// Update (replace) an entire document by its ID, using the provided function to obtain the ID from the document /// - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table in which a document should be updated (may include schema) /// The function to obtain the ID of the document /// The new document [] static member inline UpdateByFunc<'TKey, 'TDoc>( conn, tableName, idFunc: System.Func<'TDoc, 'TKey>, document: 'TDoc) = - WithConn.Update.ByFunc(tableName, idFunc, document, conn) + Update.ByFunc(tableName, idFunc, document, conn) /// Patch a document by its ID - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table in which a document should be patched (may include schema) /// The ID of the document to patch /// The partial document to patch the existing document [] static member inline PatchById<'TKey, 'TPatch>(conn, tableName, docId: 'TKey, patch: 'TPatch) = - WithConn.Patch.byId tableName docId patch conn + Patch.byId tableName docId patch conn /// - /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) + /// Patch documents using a JSON field comparison query in the WHERE clause (->> =, etc.) /// - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table in which documents should be patched (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// The partial document to patch the existing document [] static member inline PatchByFields<'TPatch>(conn, tableName, howMatched, fields, patch: 'TPatch) = - WithConn.Patch.byFields tableName howMatched fields patch conn + Patch.byFields tableName howMatched fields patch conn /// Remove fields from a document by the document's ID - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table in which a document should be modified (may include schema) /// The ID of the document to modify /// One or more field names to remove from the document [] static member inline RemoveFieldsById<'TKey>(conn, tableName, docId: 'TKey, fieldNames) = - WithConn.RemoveFields.byId tableName docId fieldNames conn + RemoveFields.byId tableName docId fieldNames conn /// Remove fields from documents via a comparison on JSON fields in the document - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table in which documents should be modified (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match /// One or more field names to remove from the matching documents [] static member inline RemoveFieldsByFields(conn, tableName, howMatched, fields, fieldNames) = - WithConn.RemoveFields.byFields tableName howMatched fields fieldNames conn + RemoveFields.byFields tableName howMatched fields fieldNames conn /// Delete a document by its ID - /// The SqliteConnection on which to run the query + /// The SqliteConnection on which to run the query /// The table in which a document should be deleted (may include schema) /// The ID of the document to delete [] static member inline DeleteById<'TKey>(conn, tableName, docId: 'TKey) = - WithConn.Delete.byId tableName docId conn + Delete.byId tableName docId conn - /// Delete documents by matching a JSON field comparison query (->> =, etc.) - /// The SqliteConnection on which to run the query + /// Delete documents by matching a JSON field comparison query (->> =, etc.) + /// The SqliteConnection on which to run the query /// The table in which documents should be deleted (may include schema) /// Whether to match any or all of the field conditions /// The field conditions to match [] static member inline DeleteByFields(conn, tableName, howMatched, fields) = - WithConn.Delete.byFields tableName howMatched fields conn + Delete.byFields tableName howMatched fields conn diff --git a/src/Tests/SqliteExtensionTests.fs b/src/Tests/SqliteExtensionTests.fs index 8f0380e..b17c19e 100644 --- a/src/Tests/SqliteExtensionTests.fs +++ b/src/Tests/SqliteExtensionTests.fs @@ -1,5 +1,6 @@ module SqliteExtensionTests +open System.IO open System.Text.Json open BitBadger.Documents open BitBadger.Documents.Sqlite @@ -10,10 +11,44 @@ open Types /// Integration tests for the F# extensions on the SqliteConnection data type let integrationTests = - let loadDocs () = backgroundTask { - for doc in testDocuments do do! insert SqliteDb.TableName doc + let loadDocs (conn: SqliteConnection) = backgroundTask { + for doc in testDocuments do do! conn.insert SqliteDb.TableName doc } - testList "Sqlite.Extensions" [ + + /// Set up a stream writer for a test + let writeStream (stream: Stream) = + let writer = new StreamWriter(stream) + writer.AutoFlush <- true + writer + + /// Get the text of the given stream + let streamText (stream: Stream) = + stream.Position <- 0L + use reader = new StreamReader(stream) + reader.ReadToEnd() + + /// Verify a JSON array begins with "[" and ends with "]" + let verifyBeginEnd json = + Expect.stringStarts json "[" "The array should have started with `[`" + Expect.stringEnds json "]" "The array should have ended with `]`" + + /// Verify an empty JSON array + let verifyEmpty json = + Expect.equal json "[]" "There should be no documents returned" + + /// Verify an empty JSON document + let verifyNoDoc json = + Expect.equal json "{}" "There should be no document returned" + + /// Verify the presence of any of the given documents in the given JSON + let verifyAny (json: string) (docs: string list) = + match docs |> List.tryFind json.Contains with + | Some _ -> () + | None -> + let theDocs = docs |> String.concat " | " + Expect.isTrue false $"Could not find any of |{theDocs}| in {json}" + + ftestList "Sqlite.Extensions" [ testTask "ensureTable succeeds" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () @@ -108,7 +143,7 @@ let integrationTests = testTask "countAll succeeds" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! theCount = conn.countAll SqliteDb.TableName Expect.equal theCount 5L "There should have been 5 matching documents" @@ -116,7 +151,7 @@ let integrationTests = testTask "countByFields succeeds" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! theCount = conn.countByFields SqliteDb.TableName Any [ Field.Equal "Value" "purple" ] Expect.equal theCount 2L "There should have been 2 matching documents" @@ -125,7 +160,7 @@ let integrationTests = testTask "succeeds when a document exists" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! exists = conn.existsById SqliteDb.TableName "three" Expect.isTrue exists "There should have been an existing document" @@ -133,7 +168,7 @@ let integrationTests = testTask "succeeds when a document does not exist" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! exists = conn.existsById SqliteDb.TableName "seven" Expect.isFalse exists "There should not have been an existing document" @@ -143,7 +178,7 @@ let integrationTests = testTask "succeeds when documents exist" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! exists = conn.existsByFields SqliteDb.TableName Any [ Field.Equal "NumValue" 10 ] Expect.isTrue exists "There should have been existing documents" @@ -151,7 +186,7 @@ let integrationTests = testTask "succeeds when no matching documents exist" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! exists = conn.existsByFields SqliteDb.TableName Any [ Field.Equal "Nothing" "none" ] Expect.isFalse exists "There should not have been any existing documents" @@ -185,7 +220,7 @@ let integrationTests = testTask "succeeds when ordering numerically" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! results = conn.findAllOrdered SqliteDb.TableName [ Field.Named "n:NumValue" ] Expect.hasLength results 5 "There should have been 5 documents returned" @@ -197,7 +232,7 @@ let integrationTests = testTask "succeeds when ordering numerically descending" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! results = conn.findAllOrdered SqliteDb.TableName [ Field.Named "n:NumValue DESC" ] Expect.hasLength results 5 "There should have been 5 documents returned" @@ -209,7 +244,7 @@ let integrationTests = testTask "succeeds when ordering alphabetically" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! results = conn.findAllOrdered SqliteDb.TableName [ Field.Named "Id DESC" ] Expect.hasLength results 5 "There should have been 5 documents returned" @@ -223,7 +258,7 @@ let integrationTests = testTask "succeeds when a document is found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! doc = conn.findById SqliteDb.TableName "two" Expect.isSome doc "There should have been a document returned" @@ -232,7 +267,7 @@ let integrationTests = testTask "succeeds when a document is not found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! doc = conn.findById SqliteDb.TableName "three hundred eighty-seven" Expect.isNone doc "There should not have been a document returned" @@ -242,7 +277,7 @@ let integrationTests = testTask "succeeds when documents are found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! docs = conn.findByFields SqliteDb.TableName Any [ Field.Equal "Sub.Foo" "green" ] Expect.hasLength docs 2 "There should have been two documents returned" @@ -250,7 +285,7 @@ let integrationTests = testTask "succeeds when documents are not found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! docs = conn.findByFields SqliteDb.TableName Any [ Field.Equal "Value" "mauve" ] Expect.isEmpty docs "There should have been no documents returned" @@ -260,7 +295,7 @@ let integrationTests = testTask "succeeds when sorting ascending" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! docs = conn.findByFieldsOrdered @@ -271,7 +306,7 @@ let integrationTests = testTask "succeeds when sorting descending" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! docs = conn.findByFieldsOrdered @@ -284,7 +319,7 @@ let integrationTests = testTask "succeeds when a document is found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! doc = conn.findFirstByFields SqliteDb.TableName Any [ Field.Equal "Value" "another" ] Expect.isSome doc "There should have been a document returned" @@ -293,7 +328,7 @@ let integrationTests = testTask "succeeds when multiple documents are found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! doc = conn.findFirstByFields SqliteDb.TableName Any [ Field.Equal "Sub.Foo" "green" ] Expect.isSome doc "There should have been a document returned" @@ -302,7 +337,7 @@ let integrationTests = testTask "succeeds when a document is not found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! doc = conn.findFirstByFields SqliteDb.TableName Any [ Field.Equal "Value" "absent" ] Expect.isNone doc "There should not have been a document returned" @@ -312,7 +347,7 @@ let integrationTests = testTask "succeeds when sorting ascending" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! doc = conn.findFirstByFieldsOrdered @@ -323,7 +358,7 @@ let integrationTests = testTask "succeeds when sorting descending" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! doc = conn.findFirstByFieldsOrdered @@ -332,11 +367,500 @@ let integrationTests = Expect.equal "four" doc.Value.Id "An incorrect document was returned" } ] + testList "jsonAll" [ + testTask "succeeds when there is data" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + + do! conn.insert SqliteDb.TableName { Foo = "one"; Bar = "two" } + do! conn.insert SqliteDb.TableName { Foo = "three"; Bar = "four" } + do! conn.insert SqliteDb.TableName { Foo = "five"; Bar = "six" } + + let! json = conn.jsonAll SqliteDb.TableName + verifyBeginEnd json + Expect.stringContains json """{"Foo":"one","Bar":"two"}""" "The first document was not found" + Expect.stringContains json """{"Foo":"three","Bar":"four"}""" "The second document was not found" + Expect.stringContains json """{"Foo":"five","Bar":"six"}""" "The third document was not found" + } + testTask "succeeds when there is no data" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + let! json = conn.jsonAll SqliteDb.TableName + verifyEmpty json + } + ] + testList "jsonAllOrdered" [ + testTask "succeeds when ordering numerically" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonAllOrdered SqliteDb.TableName [ Field.Named "n:NumValue" ] + Expect.equal + json + $"[{JsonDocument.one},{JsonDocument.three},{JsonDocument.two},{JsonDocument.four},{JsonDocument.five}]" + "The documents were not ordered correctly" + } + testTask "succeeds when ordering numerically descending" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonAllOrdered SqliteDb.TableName [ Field.Named "n:NumValue DESC" ] + Expect.equal + json + $"[{JsonDocument.five},{JsonDocument.four},{JsonDocument.two},{JsonDocument.three},{JsonDocument.one}]" + "The documents were not ordered correctly" + } + testTask "succeeds when ordering alphabetically" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonAllOrdered SqliteDb.TableName [ Field.Named "Id DESC" ] + Expect.equal + json + $"[{JsonDocument.two},{JsonDocument.three},{JsonDocument.one},{JsonDocument.four},{JsonDocument.five}]" + "The documents were not ordered correctly" + } + ] + testList "jsonById" [ + testTask "succeeds when a document is found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonById SqliteDb.TableName "two" + Expect.equal json JsonDocument.two "The incorrect document was returned" + } + testTask "succeeds when a document is not found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonById SqliteDb.TableName "three hundred eighty-seven" + verifyNoDoc json + } + ] + testList "jsonByFields" [ + testTask "succeeds when documents are found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonByFields SqliteDb.TableName Any [ Field.Greater "NumValue" 15 ] + verifyBeginEnd json + Expect.stringContains json JsonDocument.four "Document `four` should have been returned" + Expect.stringContains json JsonDocument.five "Document `five` should have been returned" + } + testTask "succeeds when documents are found using IN with numeric field" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonByFields SqliteDb.TableName All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] + Expect.equal json $"[{JsonDocument.three}]" "There should have been one document returned" + } + testTask "succeeds when documents are not found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonByFields SqliteDb.TableName Any [ Field.Greater "NumValue" 100 ] + verifyEmpty json + } + testTask "succeeds for InArray when matching documents exist" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! conn.ensureTable SqliteDb.TableName + for doc in ArrayDocument.TestDocuments do do! conn.insert SqliteDb.TableName doc + + let! json = + conn.jsonByFields SqliteDb.TableName All [ Field.InArray "Values" SqliteDb.TableName [ "c" ] ] + verifyBeginEnd json + Expect.stringContains + json """{"Id":"first","Values":["a","b","c"]}""" "Document `first` should have been returned" + Expect.stringContains + json """{"Id":"second","Values":["c","d","e"]}""" "Document `second` should have been returned" + } + testTask "succeeds for InArray when no matching documents exist" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! conn.ensureTable SqliteDb.TableName + for doc in ArrayDocument.TestDocuments do do! conn.insert SqliteDb.TableName doc + + let! json = + conn.jsonByFields SqliteDb.TableName All [ Field.InArray "Values" SqliteDb.TableName [ "j" ] ] + verifyEmpty json + } + ] + testList "jsonByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = + conn.jsonByFieldsOrdered SqliteDb.TableName Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id" ] + Expect.equal json $"[{JsonDocument.five},{JsonDocument.four}]" "Incorrect documents were returned" + } + testTask "succeeds when sorting descending" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = + conn.jsonByFieldsOrdered + SqliteDb.TableName Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id DESC" ] + Expect.equal json $"[{JsonDocument.four},{JsonDocument.five}]" "Incorrect documents were returned" + } + testTask "succeeds when sorting case-sensitively" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = + conn.jsonByFieldsOrdered + SqliteDb.TableName All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "Value" ] + Expect.equal + json + $"[{JsonDocument.three},{JsonDocument.one},{JsonDocument.two}]" + "Documents not ordered correctly" + } + testTask "succeeds when sorting case-insensitively" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = + conn.jsonByFieldsOrdered + SqliteDb.TableName All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "i:Value" ] + Expect.equal + json + $"[{JsonDocument.three},{JsonDocument.two},{JsonDocument.one}]" + "Documents not ordered correctly" + } + ] + testList "jsonFirstByFields" [ + testTask "succeeds when a document is found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonFirstByFields SqliteDb.TableName Any [ Field.Equal "Value" "another" ] + Expect.equal json JsonDocument.two "The incorrect document was returned" + } + testTask "succeeds when multiple documents are found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonFirstByFields SqliteDb.TableName Any [ Field.Equal "Sub.Foo" "green" ] + Expect.notEqual json "{}" "There should have been a document returned" + verifyAny json [ JsonDocument.two; JsonDocument.four ] + } + testTask "succeeds when a document is not found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.jsonFirstByFields SqliteDb.TableName Any [ Field.Equal "Value" "absent" ] + verifyNoDoc json + } + ] + testList "jsonFirstByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = + conn.jsonFirstByFieldsOrdered + SqliteDb.TableName Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar" ] + Expect.equal json JsonDocument.two "An incorrect document was returned" + } + testTask "succeeds when sorting descending" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = + conn.jsonFirstByFieldsOrdered + SqliteDb.TableName Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar DESC" ] + Expect.equal json JsonDocument.four "An incorrect document was returned" + } + ] + testList "writeJsonAll" [ + testTask "succeeds when there is data" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + + do! conn.insert SqliteDb.TableName { Foo = "one"; Bar = "two" } + do! conn.insert SqliteDb.TableName { Foo = "three"; Bar = "four" } + do! conn.insert SqliteDb.TableName { Foo = "five"; Bar = "six" } + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonAll SqliteDb.TableName writer + let json = streamText stream + verifyBeginEnd json + Expect.stringContains json """{"Foo":"one","Bar":"two"}""" "The first document was not found" + Expect.stringContains json """{"Foo":"three","Bar":"four"}""" "The second document was not found" + Expect.stringContains json """{"Foo":"five","Bar":"six"}""" "The third document was not found" + } + testTask "succeeds when there is no data" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonAll SqliteDb.TableName writer + verifyEmpty (streamText stream) + } + ] + testList "writeJsonAllOrdered" [ + testTask "succeeds when ordering numerically" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonAllOrdered SqliteDb.TableName writer [ Field.Named "n:NumValue" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.one},{JsonDocument.three},{JsonDocument.two},{JsonDocument.four},{JsonDocument.five}]" + "The documents were not ordered correctly" + } + testTask "succeeds when ordering numerically descending" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonAllOrdered SqliteDb.TableName writer [ Field.Named "n:NumValue DESC" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.five},{JsonDocument.four},{JsonDocument.two},{JsonDocument.three},{JsonDocument.one}]" + "The documents were not ordered correctly" + } + testTask "succeeds when ordering alphabetically" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonAllOrdered SqliteDb.TableName writer [ Field.Named "Id DESC" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.two},{JsonDocument.three},{JsonDocument.one},{JsonDocument.four},{JsonDocument.five}]" + "The documents were not ordered correctly" + } + ] + testList "writeJsonById" [ + testTask "succeeds when a document is found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonById SqliteDb.TableName writer "two" + Expect.equal (streamText stream) JsonDocument.two "The incorrect document was returned" + } + testTask "succeeds when a document is not found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonById SqliteDb.TableName writer "three hundred eighty-seven" + verifyNoDoc (streamText stream) + } + ] + testList "writeJsonByFields" [ + testTask "succeeds when documents are found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByFields SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] + let json = streamText stream + verifyBeginEnd json + Expect.stringContains json JsonDocument.four "Document `four` should have been returned" + Expect.stringContains json JsonDocument.five "Document `five` should have been returned" + } + testTask "succeeds when documents are found using IN with numeric field" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByFields SqliteDb.TableName writer All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] + Expect.equal (streamText stream) $"[{JsonDocument.three}]" "There should have been one document returned" + } + testTask "succeeds when documents are not found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByFields SqliteDb.TableName writer Any [ Field.Greater "NumValue" 100 ] + verifyEmpty (streamText stream) + } + testTask "succeeds for InArray when matching documents exist" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! conn.ensureTable SqliteDb.TableName + for doc in ArrayDocument.TestDocuments do do! conn.insert SqliteDb.TableName doc + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByFields + SqliteDb.TableName writer All [ Field.InArray "Values" SqliteDb.TableName [ "c" ] ] + let json = streamText stream + verifyBeginEnd json + Expect.stringContains + json """{"Id":"first","Values":["a","b","c"]}""" "Document `first` should have been returned" + Expect.stringContains + json """{"Id":"second","Values":["c","d","e"]}""" "Document `second` should have been returned" + } + testTask "succeeds for InArray when no matching documents exist" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! conn.ensureTable SqliteDb.TableName + for doc in ArrayDocument.TestDocuments do do! conn.insert SqliteDb.TableName doc + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByFields + SqliteDb.TableName writer All [ Field.InArray "Values" SqliteDb.TableName [ "j" ] ] + verifyEmpty (streamText stream) + } + ] + testList "writeJsonByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id" ] + Expect.equal + (streamText stream) $"[{JsonDocument.five},{JsonDocument.four}]" "Incorrect documents were returned" + } + testTask "succeeds when sorting descending" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id DESC" ] + Expect.equal + (streamText stream) $"[{JsonDocument.four},{JsonDocument.five}]" "Incorrect documents were returned" + } + testTask "succeeds when sorting case-sensitively" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByFieldsOrdered + SqliteDb.TableName writer All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "Value" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.three},{JsonDocument.one},{JsonDocument.two}]" + "Documents not ordered correctly" + } + testTask "succeeds when sorting case-insensitively" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonByFieldsOrdered + SqliteDb.TableName writer All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "i:Value" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.three},{JsonDocument.two},{JsonDocument.one}]" + "Documents not ordered correctly" + } + ] + testList "writeJsonFirstByFields" [ + testTask "succeeds when a document is found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Value" "another" ] + Expect.equal (streamText stream) JsonDocument.two "The incorrect document was returned" + } + testTask "succeeds when multiple documents are found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] + let json = streamText stream + Expect.notEqual json "{}" "There should have been a document returned" + verifyAny json [ JsonDocument.two; JsonDocument.four ] + } + testTask "succeeds when a document is not found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Value" "absent" ] + verifyNoDoc (streamText stream) + } + ] + testList "writeJsonFirstByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonFirstByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar" ] + Expect.equal (streamText stream) JsonDocument.two "An incorrect document was returned" + } + testTask "succeeds when sorting descending" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeJsonFirstByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar DESC" ] + Expect.equal (streamText stream) JsonDocument.four "An incorrect document was returned" + } + ] testList "updateById" [ testTask "succeeds when a document is updated" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let testDoc = { emptyDoc with Id = "one"; Sub = Some { Foo = "blue"; Bar = "red" } } do! conn.updateById SqliteDb.TableName "one" testDoc @@ -363,10 +887,10 @@ let integrationTests = testTask "succeeds when a document is updated" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn do! conn.updateByFunc - SqliteDb.TableName (_.Id) { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } + SqliteDb.TableName _.Id { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } let! after = conn.findById SqliteDb.TableName "one" if Option.isNone after then Expect.isTrue false "There should have been a document returned post-update" @@ -384,14 +908,14 @@ let integrationTests = // This not raising an exception is the test do! conn.updateByFunc - SqliteDb.TableName (_.Id) { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } + SqliteDb.TableName _.Id { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } } ] testList "patchById" [ testTask "succeeds when a document is updated" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn do! conn.patchById SqliteDb.TableName "one" {| NumValue = 44 |} let! after = conn.findById SqliteDb.TableName "one" @@ -414,7 +938,7 @@ let integrationTests = testTask "succeeds when a document is updated" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn do! conn.patchByFields SqliteDb.TableName Any [ Field.Equal "Value" "purple" ] {| NumValue = 77 |} let! after = conn.countByFields SqliteDb.TableName Any [ Field.Equal "NumValue" 77 ] @@ -435,7 +959,7 @@ let integrationTests = testTask "succeeds when fields are removed" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn do! conn.removeFieldsById SqliteDb.TableName "two" [ "Sub"; "Value" ] try @@ -448,7 +972,7 @@ let integrationTests = testTask "succeeds when a field is not removed" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn // This not raising an exception is the test do! conn.removeFieldsById SqliteDb.TableName "two" [ "AFieldThatIsNotThere" ] @@ -465,7 +989,7 @@ let integrationTests = testTask "succeeds when a field is removed" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn do! conn.removeFieldsByFields SqliteDb.TableName Any [ Field.Equal "NumValue" 17 ] [ "Sub" ] try @@ -478,7 +1002,7 @@ let integrationTests = testTask "succeeds when a field is not removed" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn // This not raising an exception is the test do! conn.removeFieldsByFields SqliteDb.TableName Any [ Field.Equal "NumValue" 17 ] [ "Nothing" ] @@ -496,7 +1020,7 @@ let integrationTests = testTask "succeeds when a document is deleted" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn do! conn.deleteById SqliteDb.TableName "four" let! remaining = conn.countAll SqliteDb.TableName @@ -505,7 +1029,7 @@ let integrationTests = testTask "succeeds when a document is not deleted" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn do! conn.deleteById SqliteDb.TableName "thirty" let! remaining = conn.countAll SqliteDb.TableName @@ -516,7 +1040,7 @@ let integrationTests = testTask "succeeds when documents are deleted" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn do! conn.deleteByFields SqliteDb.TableName Any [ Field.NotEqual "Value" "purple" ] let! remaining = conn.countAll SqliteDb.TableName @@ -525,18 +1049,103 @@ let integrationTests = testTask "succeeds when documents are not deleted" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn do! conn.deleteByFields SqliteDb.TableName Any [ Field.Equal "Value" "crimson" ] let! remaining = conn.countAll SqliteDb.TableName Expect.equal remaining 5L "There should have been 5 documents remaining" } ] + testList "customList" [ + testTask "succeeds when data is found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! docs = conn.customList (Query.find SqliteDb.TableName) [] fromData + Expect.hasLength docs 5 "There should have been 5 documents returned" + } + testTask "succeeds when data is not found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! docs = + conn.customList + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" + [ SqliteParameter("@value", 100) ] + fromData + Expect.isEmpty docs "There should have been no documents returned" + } + ] + testList "customJsonArray" [ + testTask "succeeds when data is found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! json = conn.customJsonArray (Query.find SqliteDb.TableName) [] jsonFromData + Expect.stringStarts json "[" "The JSON array should have started with `[`" + Expect.stringContains json JsonDocument.one "Document ID `one` should have been found" + Expect.stringContains json JsonDocument.two "Document ID `two` should have been found" + Expect.stringContains json JsonDocument.three "Document ID `three` should have been found" + Expect.stringContains json JsonDocument.four "Document ID `four` should have been found" + Expect.stringContains json JsonDocument.five "Document ID `five` should have been found" + Expect.stringEnds json "]" "The JSON array should have ended with `[`" + } + testTask "succeeds when data is not found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + let! docs = + conn.customJsonArray + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" + [ SqliteParameter("@value", 100) ] + jsonFromData + Expect.equal docs "[]" "There should have been no documents returned" + } + ] + testList "writeCustomJsonArray" [ + testTask "succeeds when data is found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeCustomJsonArray (Query.find SqliteDb.TableName) [] writer jsonFromData + + let json = streamText stream + Expect.stringStarts json "[" "The JSON array should have started with `[`" + Expect.stringContains json JsonDocument.one "Document ID `one` should have been found" + Expect.stringContains json JsonDocument.two "Document ID `two` should have been found" + Expect.stringContains json JsonDocument.three "Document ID `three` should have been found" + Expect.stringContains json JsonDocument.four "Document ID `four` should have been found" + Expect.stringContains json JsonDocument.five "Document ID `five` should have been found" + Expect.stringEnds json "]" "The JSON array should have ended with `[`" + } + testTask "succeeds when data is not found" { + use! db = SqliteDb.BuildDb() + use conn = Configuration.dbConn () + do! loadDocs conn + + use stream = new MemoryStream() + use writer = writeStream stream + do! conn.writeCustomJsonArray + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" + [ SqliteParameter("@value", 100) ] + writer + jsonFromData + + Expect.equal (streamText stream) "[]" "There should have been no documents returned" + } + ] testList "customSingle" [ testTask "succeeds when a row is found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! doc = conn.customSingle @@ -549,7 +1158,7 @@ let integrationTests = testTask "succeeds when a row is not found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn let! doc = conn.customSingle @@ -559,33 +1168,37 @@ let integrationTests = Expect.isNone doc "There should not have been a document returned" } ] - testList "customList" [ - testTask "succeeds when data is found" { + testList "customJsonSingle" [ + testTask "succeeds when a row is found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - - let! docs = conn.customList (Query.find SqliteDb.TableName) [] fromData - Expect.hasLength docs 5 "There should have been 5 documents returned" + do! loadDocs conn + + let! json = + conn.customJsonSingle + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id" + [ SqliteParameter("@id", "one") ] + jsonFromData + Expect.equal json JsonDocument.one "The JSON document is incorrect" } - testTask "succeeds when data is not found" { + testTask "succeeds when a row is not found" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () - - let! docs = - conn.customList - $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" - [ SqliteParameter("@value", 100) ] - fromData - Expect.isEmpty docs "There should have been no documents returned" + do! loadDocs conn + + let! json = + conn.customJsonSingle + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id" + [ SqliteParameter("@id", "eighty") ] + jsonFromData + Expect.equal json "{}" "There should not have been a document returned" } ] testList "customNonQuery" [ testTask "succeeds when operating on data" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn do! conn.customNonQuery $"DELETE FROM {SqliteDb.TableName}" [] @@ -595,7 +1208,7 @@ let integrationTests = testTask "succeeds when no data matches where clause" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - do! loadDocs () + do! loadDocs conn do! conn.customNonQuery $"DELETE FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" diff --git a/src/Tests/SqliteTests.fs b/src/Tests/SqliteTests.fs index c2c3f58..1ba12a1 100644 --- a/src/Tests/SqliteTests.fs +++ b/src/Tests/SqliteTests.fs @@ -1,5 +1,6 @@ module SqliteTests +open System.IO open System.Text.Json open BitBadger.Documents open BitBadger.Documents.Sqlite @@ -135,6 +136,19 @@ let loadDocs () = backgroundTask { for doc in testDocuments do do! insert SqliteDb.TableName doc } +/// Set up a stream writer for a test +let writeStream (stream: Stream) = + let writer = new StreamWriter(stream) + writer.AutoFlush <- true + writer + +/// Get the text of the given stream +let streamText (stream: Stream) = + stream.Position <- 0L + use reader = new StreamReader(stream) + reader.ReadToEnd() + + /// Integration tests for the Configuration module of the SQLite library let configurationTests = testList "Configuration" [ test "useConnectionString / connectionString succeed" { @@ -151,6 +165,85 @@ let configurationTests = testList "Configuration" [ /// Integration tests for the Custom module of the SQLite library let customTests = testList "Custom" [ + testList "list" [ + testTask "succeeds when data is found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! docs = Custom.list (Query.find SqliteDb.TableName) [] fromData + Expect.hasCountOf docs 5u (fun _ -> true) "There should have been 5 documents returned" + } + testTask "succeeds when data is not found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! docs = + Custom.list + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" + [ SqliteParameter("@value", 100) ] + fromData + Expect.isEmpty docs "There should have been no documents returned" + } + ] + testList "jsonArray" [ + testTask "succeeds when data is found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Custom.jsonArray (Query.find SqliteDb.TableName) [] jsonFromData + Expect.stringStarts json "[" "The JSON array should have started with `[`" + Expect.stringContains json JsonDocument.one "Document ID `one` should have been found" + Expect.stringContains json JsonDocument.two "Document ID `two` should have been found" + Expect.stringContains json JsonDocument.three "Document ID `three` should have been found" + Expect.stringContains json JsonDocument.four "Document ID `four` should have been found" + Expect.stringContains json JsonDocument.five "Document ID `five` should have been found" + Expect.stringEnds json "]" "The JSON array should have ended with `[`" + } + testTask "succeeds when data is not found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! docs = + Custom.jsonArray + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" + [ SqliteParameter("@value", 100) ] + jsonFromData + Expect.equal docs "[]" "There should have been no documents returned" + } + ] + testList "writeJsonArray" [ + testTask "succeeds when data is found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Custom.writeJsonArray (Query.find SqliteDb.TableName) [] writer jsonFromData + + let json = streamText stream + Expect.stringStarts json "[" "The JSON array should have started with `[`" + Expect.stringContains json JsonDocument.one "Document ID `one` should have been found" + Expect.stringContains json JsonDocument.two "Document ID `two` should have been found" + Expect.stringContains json JsonDocument.three "Document ID `three` should have been found" + Expect.stringContains json JsonDocument.four "Document ID `four` should have been found" + Expect.stringContains json JsonDocument.five "Document ID `five` should have been found" + Expect.stringEnds json "]" "The JSON array should have ended with `[`" + } + testTask "succeeds when data is not found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Custom.writeJsonArray + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" + [ SqliteParameter("@value", 100) ] + writer + jsonFromData + + Expect.equal (streamText stream) "[]" "There should have been no documents returned" + } + ] testList "single" [ testTask "succeeds when a row is found" { use! db = SqliteDb.BuildDb() @@ -176,24 +269,28 @@ let customTests = testList "Custom" [ Expect.isNone doc "There should not have been a document returned" } ] - testList "list" [ - testTask "succeeds when data is found" { + testList "jsonSingle" [ + testTask "succeeds when a row is found" { use! db = SqliteDb.BuildDb() do! loadDocs () - let! docs = Custom.list (Query.find SqliteDb.TableName) [] fromData - Expect.hasCountOf docs 5u (fun _ -> true) "There should have been 5 documents returned" + let! json = + Custom.jsonSingle + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id" + [ SqliteParameter("@id", "one") ] + jsonFromData + Expect.equal json JsonDocument.one "The JSON document is incorrect" } - testTask "succeeds when data is not found" { + testTask "succeeds when a row is not found" { use! db = SqliteDb.BuildDb() do! loadDocs () - let! docs = - Custom.list - $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" - [ SqliteParameter("@value", 100) ] - fromData - Expect.isEmpty docs "There should have been no documents returned" + let! json = + Custom.jsonSingle + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id" + [ SqliteParameter("@id", "eighty") ] + jsonFromData + Expect.equal json "{}" "There should not have been a document returned" } ] testList "nonQuery" [ @@ -653,6 +750,467 @@ let findTests = testList "Find" [ ] ] +/// Verify a JSON array begins with "[" and ends with "]" +let private verifyBeginEnd json = + Expect.stringStarts json "[" "The array should have started with `[`" + Expect.stringEnds json "]" "The array should have ended with `]`" + +/// Verify an empty JSON array +let private verifyEmpty json = + Expect.equal json "[]" "There should be no documents returned" + +/// Verify an empty JSON document +let private verifyNoDoc json = + Expect.equal json "{}" "There should be no document returned" + +/// Verify the presence of any of the given documents in the given JSON +let private verifyAny (json: string) (docs: string list) = + match docs |> List.tryFind json.Contains with + | Some _ -> () + | None -> + let theDocs = docs |> String.concat " | " + Expect.isTrue false $"Could not find any of |{theDocs}| in {json}" + +/// Integration tests for the Json module of the SQLite library +let jsonTests = testList "Json" [ + testList "all" [ + testTask "succeeds when there is data" { + use! db = SqliteDb.BuildDb() + + do! insert SqliteDb.TableName { Foo = "one"; Bar = "two" } + do! insert SqliteDb.TableName { Foo = "three"; Bar = "four" } + do! insert SqliteDb.TableName { Foo = "five"; Bar = "six" } + + let! json = Json.all SqliteDb.TableName + verifyBeginEnd json + Expect.stringContains json """{"Foo":"one","Bar":"two"}""" "The first document was not found" + Expect.stringContains json """{"Foo":"three","Bar":"four"}""" "The second document was not found" + Expect.stringContains json """{"Foo":"five","Bar":"six"}""" "The third document was not found" + } + testTask "succeeds when there is no data" { + use! db = SqliteDb.BuildDb() + let! json = Json.all SqliteDb.TableName + verifyEmpty json + } + ] + testList "allOrdered" [ + testTask "succeeds when ordering numerically" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.allOrdered SqliteDb.TableName [ Field.Named "n:NumValue" ] + Expect.equal + json + $"[{JsonDocument.one},{JsonDocument.three},{JsonDocument.two},{JsonDocument.four},{JsonDocument.five}]" + "The documents were not ordered correctly" + } + testTask "succeeds when ordering numerically descending" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.allOrdered SqliteDb.TableName [ Field.Named "n:NumValue DESC" ] + Expect.equal + json + $"[{JsonDocument.five},{JsonDocument.four},{JsonDocument.two},{JsonDocument.three},{JsonDocument.one}]" + "The documents were not ordered correctly" + } + testTask "succeeds when ordering alphabetically" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.allOrdered SqliteDb.TableName [ Field.Named "Id DESC" ] + Expect.equal + json + $"[{JsonDocument.two},{JsonDocument.three},{JsonDocument.one},{JsonDocument.four},{JsonDocument.five}]" + "The documents were not ordered correctly" + } + ] + testList "byId" [ + testTask "succeeds when a document is found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.byId SqliteDb.TableName "two" + Expect.equal json JsonDocument.two "The incorrect document was returned" + } + testTask "succeeds when a document is not found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.byId SqliteDb.TableName "three hundred eighty-seven" + verifyNoDoc json + } + ] + testList "byFields" [ + testTask "succeeds when documents are found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.byFields SqliteDb.TableName Any [ Field.Greater "NumValue" 15 ] + verifyBeginEnd json + Expect.stringContains json JsonDocument.four "Document `four` should have been returned" + Expect.stringContains json JsonDocument.five "Document `five` should have been returned" + } + testTask "succeeds when documents are found using IN with numeric field" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.byFields SqliteDb.TableName All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] + Expect.equal json $"[{JsonDocument.three}]" "There should have been one document returned" + } + testTask "succeeds when documents are not found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.byFields SqliteDb.TableName Any [ Field.Greater "NumValue" 100 ] + verifyEmpty json + } + testTask "succeeds for InArray when matching documents exist" { + use! db = SqliteDb.BuildDb() + do! Definition.ensureTable SqliteDb.TableName + for doc in ArrayDocument.TestDocuments do do! insert SqliteDb.TableName doc + + let! json = Json.byFields SqliteDb.TableName All [ Field.InArray "Values" SqliteDb.TableName [ "c" ] ] + verifyBeginEnd json + Expect.stringContains + json """{"Id":"first","Values":["a","b","c"]}""" "Document `first` should have been returned" + Expect.stringContains + json """{"Id":"second","Values":["c","d","e"]}""" "Document `second` should have been returned" + } + testTask "succeeds for InArray when no matching documents exist" { + use! db = SqliteDb.BuildDb() + do! Definition.ensureTable SqliteDb.TableName + for doc in ArrayDocument.TestDocuments do do! insert SqliteDb.TableName doc + + let! json = Json.byFields SqliteDb.TableName All [ Field.InArray "Values" SqliteDb.TableName [ "j" ] ] + verifyEmpty json + } + ] + testList "byFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.byFieldsOrdered SqliteDb.TableName Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id" ] + Expect.equal json $"[{JsonDocument.five},{JsonDocument.four}]" "Incorrect documents were returned" + } + testTask "succeeds when sorting descending" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = + Json.byFieldsOrdered SqliteDb.TableName Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id DESC" ] + Expect.equal json $"[{JsonDocument.four},{JsonDocument.five}]" "Incorrect documents were returned" + } + testTask "succeeds when sorting case-sensitively" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = + Json.byFieldsOrdered SqliteDb.TableName All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "Value" ] + Expect.equal + json $"[{JsonDocument.three},{JsonDocument.one},{JsonDocument.two}]" "Documents not ordered correctly" + } + testTask "succeeds when sorting case-insensitively" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = + Json.byFieldsOrdered + SqliteDb.TableName All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "i:Value" ] + Expect.equal + json $"[{JsonDocument.three},{JsonDocument.two},{JsonDocument.one}]" "Documents not ordered correctly" + } + ] + testList "firstByFields" [ + testTask "succeeds when a document is found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.firstByFields SqliteDb.TableName Any [ Field.Equal "Value" "another" ] + Expect.equal json JsonDocument.two "The incorrect document was returned" + } + testTask "succeeds when multiple documents are found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.firstByFields SqliteDb.TableName Any [ Field.Equal "Sub.Foo" "green" ] + Expect.notEqual json "{}" "There should have been a document returned" + verifyAny json [ JsonDocument.two; JsonDocument.four ] + } + testTask "succeeds when a document is not found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = Json.firstByFields SqliteDb.TableName Any [ Field.Equal "Value" "absent" ] + verifyNoDoc json + } + ] + testList "firstByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = + Json.firstByFieldsOrdered + SqliteDb.TableName Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar" ] + Expect.equal json JsonDocument.two "An incorrect document was returned" + } + testTask "succeeds when sorting descending" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + let! json = + Json.firstByFieldsOrdered + SqliteDb.TableName Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar DESC" ] + Expect.equal json JsonDocument.four "An incorrect document was returned" + } + ] + testList "writeAll" [ + testTask "succeeds when there is data" { + use! db = SqliteDb.BuildDb() + + do! insert SqliteDb.TableName { Foo = "one"; Bar = "two" } + do! insert SqliteDb.TableName { Foo = "three"; Bar = "four" } + do! insert SqliteDb.TableName { Foo = "five"; Bar = "six" } + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeAll SqliteDb.TableName writer + let json = streamText stream + verifyBeginEnd json + Expect.stringContains json """{"Foo":"one","Bar":"two"}""" "The first document was not found" + Expect.stringContains json """{"Foo":"three","Bar":"four"}""" "The second document was not found" + Expect.stringContains json """{"Foo":"five","Bar":"six"}""" "The third document was not found" + } + testTask "succeeds when there is no data" { + use! db = SqliteDb.BuildDb() + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeAll SqliteDb.TableName writer + verifyEmpty (streamText stream) + } + ] + testList "writeAllOrdered" [ + testTask "succeeds when ordering numerically" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeAllOrdered SqliteDb.TableName writer [ Field.Named "n:NumValue" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.one},{JsonDocument.three},{JsonDocument.two},{JsonDocument.four},{JsonDocument.five}]" + "The documents were not ordered correctly" + } + testTask "succeeds when ordering numerically descending" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeAllOrdered SqliteDb.TableName writer [ Field.Named "n:NumValue DESC" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.five},{JsonDocument.four},{JsonDocument.two},{JsonDocument.three},{JsonDocument.one}]" + "The documents were not ordered correctly" + } + testTask "succeeds when ordering alphabetically" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeAllOrdered SqliteDb.TableName writer [ Field.Named "Id DESC" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.two},{JsonDocument.three},{JsonDocument.one},{JsonDocument.four},{JsonDocument.five}]" + "The documents were not ordered correctly" + } + ] + testList "writeById" [ + testTask "succeeds when a document is found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeById SqliteDb.TableName writer "two" + Expect.equal (streamText stream) JsonDocument.two "The incorrect document was returned" + } + testTask "succeeds when a document is not found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeById SqliteDb.TableName writer "three hundred eighty-seven" + verifyNoDoc (streamText stream) + } + ] + testList "writeByFields" [ + testTask "succeeds when documents are found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByFields SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] + let json = streamText stream + verifyBeginEnd json + Expect.stringContains json JsonDocument.four "Document `four` should have been returned" + Expect.stringContains json JsonDocument.five "Document `five` should have been returned" + } + testTask "succeeds when documents are found using IN with numeric field" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByFields SqliteDb.TableName writer All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] + Expect.equal (streamText stream) $"[{JsonDocument.three}]" "There should have been one document returned" + } + testTask "succeeds when documents are not found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByFields SqliteDb.TableName writer Any [ Field.Greater "NumValue" 100 ] + verifyEmpty (streamText stream) + } + testTask "succeeds for InArray when matching documents exist" { + use! db = SqliteDb.BuildDb() + do! Definition.ensureTable SqliteDb.TableName + for doc in ArrayDocument.TestDocuments do do! insert SqliteDb.TableName doc + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByFields SqliteDb.TableName writer All [ Field.InArray "Values" SqliteDb.TableName [ "c" ] ] + let json = streamText stream + verifyBeginEnd json + Expect.stringContains + json """{"Id":"first","Values":["a","b","c"]}""" "Document `first` should have been returned" + Expect.stringContains + json """{"Id":"second","Values":["c","d","e"]}""" "Document `second` should have been returned" + } + testTask "succeeds for InArray when no matching documents exist" { + use! db = SqliteDb.BuildDb() + do! Definition.ensureTable SqliteDb.TableName + for doc in ArrayDocument.TestDocuments do do! insert SqliteDb.TableName doc + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByFields SqliteDb.TableName writer All [ Field.InArray "Values" SqliteDb.TableName [ "j" ] ] + verifyEmpty (streamText stream) + } + ] + testList "writeByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id" ] + Expect.equal + (streamText stream) $"[{JsonDocument.five},{JsonDocument.four}]" "Incorrect documents were returned" + } + testTask "succeeds when sorting descending" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id DESC" ] + Expect.equal + (streamText stream) $"[{JsonDocument.four},{JsonDocument.five}]" "Incorrect documents were returned" + } + testTask "succeeds when sorting case-sensitively" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByFieldsOrdered + SqliteDb.TableName writer All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "Value" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.three},{JsonDocument.one},{JsonDocument.two}]" + "Documents not ordered correctly" + } + testTask "succeeds when sorting case-insensitively" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeByFieldsOrdered + SqliteDb.TableName writer All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "i:Value" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.three},{JsonDocument.two},{JsonDocument.one}]" + "Documents not ordered correctly" + } + ] + testList "writeFirstByFields" [ + testTask "succeeds when a document is found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Value" "another" ] + Expect.equal (streamText stream) JsonDocument.two "The incorrect document was returned" + } + testTask "succeeds when multiple documents are found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] + let json = streamText stream + Expect.notEqual json "{}" "There should have been a document returned" + verifyAny json [ JsonDocument.two; JsonDocument.four ] + } + testTask "succeeds when a document is not found" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Value" "absent" ] + verifyNoDoc (streamText stream) + } + ] + testList "writeFirstByFieldsOrdered" [ + testTask "succeeds when sorting ascending" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeFirstByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar" ] + Expect.equal (streamText stream) JsonDocument.two "An incorrect document was returned" + } + testTask "succeeds when sorting descending" { + use! db = SqliteDb.BuildDb() + do! loadDocs () + + use stream = new MemoryStream() + use writer = writeStream stream + do! Json.writeFirstByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar DESC" ] + Expect.equal (streamText stream) JsonDocument.four "An incorrect document was returned" + } + ] +] + /// Integration tests for the Update module of the SQLite library let updateTests = testList "Update" [ testList "byId" [ @@ -682,7 +1240,7 @@ let updateTests = testList "Update" [ use! db = SqliteDb.BuildDb() do! loadDocs () - do! Update.byFunc SqliteDb.TableName (_.Id) { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } + do! Update.byFunc SqliteDb.TableName _.Id { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } let! after = Find.byId SqliteDb.TableName "one" Expect.isSome after "There should have been a document returned post-update" Expect.equal @@ -697,7 +1255,7 @@ let updateTests = testList "Update" [ Expect.isEmpty before "There should have been no documents returned" // This not raising an exception is the test - do! Update.byFunc SqliteDb.TableName (_.Id) { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } + do! Update.byFunc SqliteDb.TableName _.Id { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } } ] ] @@ -844,7 +1402,7 @@ let deleteTests = testList "Delete" [ ] /// All tests for the SQLite library -let all = testList "Sqlite" [ +let all = ftestList "Sqlite" [ testList "Unit" [ queryTests; parametersTests ] testSequenced <| testList "Integration" [ configurationTests @@ -854,6 +1412,7 @@ let all = testList "Sqlite" [ countTests existsTests findTests + jsonTests updateTests patchTests removeFieldsTests diff --git a/src/Tests/Types.fs b/src/Tests/Types.fs index bec9b16..16bd9a9 100644 --- a/src/Tests/Types.fs +++ b/src/Tests/Types.fs @@ -26,6 +26,22 @@ type JsonDocument = NumValue: int Sub: SubDocument option } +module JsonDocument = + /// The JSON for document ID `one` + let one = """{"Id":"one","Value":"FIRST!","NumValue":0,"Sub":null}""" + + /// The JSON for document ID `two` + let two = """{"Id":"two","Value":"another","NumValue":10,"Sub":{"Foo":"green","Bar":"blue"}}""" + + /// The JSON for document ID `three` + let three = """{"Id":"three","Value":"","NumValue":4,"Sub":null}""" + + /// The JSON for document ID `four` + let four = """{"Id":"four","Value":"purple","NumValue":17,"Sub":{"Foo":"green","Bar":"red"}}""" + + /// The JSON for document ID `five` + let five = """{"Id":"five","Value":"purple","NumValue":18,"Sub":null}""" + /// An empty JsonDocument let emptyDoc = { Id = ""; Value = ""; NumValue = 0; Sub = None } -- 2.47.2 From 7a18ec53e54b033a1ddc23a20599a7020525b79c Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Mon, 7 Apr 2025 21:37:11 -0400 Subject: [PATCH 13/22] Add C# SQLite Json tests --- .../SqliteCSharpExtensionTests.cs | 761 ++++++++++++++++-- src/Tests.CSharp/SqliteCSharpTests.cs | 625 +++++++++++++- src/Tests.CSharp/Types.cs | 18 +- src/Tests/SqliteExtensionTests.fs | 110 +-- src/Tests/SqliteTests.fs | 2 +- 5 files changed, 1394 insertions(+), 122 deletions(-) diff --git a/src/Tests.CSharp/SqliteCSharpExtensionTests.cs b/src/Tests.CSharp/SqliteCSharpExtensionTests.cs index 5d23375..6842b2b 100644 --- a/src/Tests.CSharp/SqliteCSharpExtensionTests.cs +++ b/src/Tests.CSharp/SqliteCSharpExtensionTests.cs @@ -1,17 +1,65 @@ using Expecto.CSharp; using Expecto; using BitBadger.Documents.Sqlite; +using Microsoft.Data.Sqlite; namespace BitBadger.Documents.Tests.CSharp; using static Runner; /// -/// C# tests for the extensions on the SqliteConnection class +/// C# tests for the extensions on the SqliteConnection class /// public static class SqliteCSharpExtensionTests { - private static Task LoadDocs() => SqliteCSharpTests.LoadDocs(); + private static async Task LoadDocs(SqliteConnection conn) + { + foreach (var doc in JsonDocument.TestDocuments) await conn.Insert(SqliteDb.TableName, doc); + } + + /// Verify a JSON array begins with "[" and ends with "]" + private static void VerifyBeginEnd(string json) + { + Expect.stringStarts(json, "[", "The array should have started with `[`"); + Expect.stringEnds(json, "]", "The array should have ended with `]`"); + } + + /// Verify an empty JSON array + private static void VerifyEmpty(string json) + { + Expect.equal(json, "[]", "There should be no documents returned"); + } + + /// Verify an empty JSON document + private static void VerifyNoDoc(string json) + { + Expect.equal(json, "{}", "There should be no document returned"); + } + + /// Set up a stream writer for a test + private static StreamWriter WriteStream(Stream stream) + { + StreamWriter writer = new(stream); + writer.AutoFlush = true; + return writer; + } + + /// Get the text of the given stream + private static string StreamText(Stream stream) + { + stream.Position = 0L; + using StreamReader reader = new(stream); + return reader.ReadToEnd(); + } + + /// Verify the presence of any of the given documents in the given JSON + private static void VerifyAny(string json, IEnumerable docs) + { + var theDocs = docs.ToList(); + if (theDocs.Any(json.Contains)) return; + var anyDocs = string.Join(" | ", theDocs); + Expect.isTrue(false, $"Could not find any of |{anyDocs}| in {json}"); + } /// /// Integration tests for the SQLite extension methods @@ -19,13 +67,99 @@ public static class SqliteCSharpExtensionTests [Tests] public static readonly Test Integration = TestList("Sqlite.C#.Extensions", [ + TestList("CustomList", + [ + TestCase("succeeds when data is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + var docs = await conn.CustomList(Query.Find(SqliteDb.TableName), Parameters.None, + Results.FromData); + Expect.equal(docs.Count, 5, "There should have been 5 documents returned"); + }), + TestCase("succeeds when data is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + var docs = await conn.CustomList( + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", [new("@value", 100)], + Results.FromData); + Expect.isEmpty(docs, "There should have been no documents returned"); + }) + ]), + TestList("CustomJsonArray", + [ + TestCase("succeeds when data is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + var json = await conn.CustomJsonArray(Query.Find(SqliteDb.TableName), [], Results.JsonFromData); + VerifyBeginEnd(json); + Expect.stringContains(json, JsonDocument.One, "Document ID `one` should have been found"); + Expect.stringContains(json, JsonDocument.Two,"Document ID `two` should have been found"); + Expect.stringContains(json, JsonDocument.Three, "Document ID `three` should have been found"); + Expect.stringContains(json, JsonDocument.Four, "Document ID `four` should have been found"); + Expect.stringContains(json, JsonDocument.Five, "Document ID `five` should have been found"); + }), + TestCase("succeeds when data is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + VerifyEmpty(await conn.CustomJsonArray( + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", + [new SqliteParameter("@value", 100)], Results.JsonFromData)); + }) + ]), + TestList("WriteCustomJsonArray", + [ + TestCase("succeeds when data is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteCustomJsonArray(Query.Find(SqliteDb.TableName), [], writer, Results.JsonFromData); + + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, JsonDocument.One, "Document ID `one` should have been found"); + Expect.stringContains(json, JsonDocument.Two,"Document ID `two` should have been found"); + Expect.stringContains(json, JsonDocument.Three, "Document ID `three` should have been found"); + Expect.stringContains(json, JsonDocument.Four, "Document ID `four` should have been found"); + Expect.stringContains(json, JsonDocument.Five, "Document ID `five` should have been found"); + }), + TestCase("succeeds when data is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteCustomJsonArray( + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", + [new SqliteParameter("@value", 100)], writer, Results.JsonFromData); + + VerifyEmpty(StreamText(stream)); + }) + ]), TestList("CustomSingle", [ TestCase("succeeds when a row is found", async () => { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.CustomSingle($"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id", [Parameters.Id("one")], Results.FromData); @@ -36,35 +170,35 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.CustomSingle($"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id", [Parameters.Id("eighty")], Results.FromData); Expect.isNull(doc, "There should not have been a document returned"); }) ]), - TestList("CustomList", + TestList("CustomJsonSingle", [ - TestCase("succeeds when data is found", async () => + TestCase("succeeds when a row is found", async () => { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); - var docs = await conn.CustomList(Query.Find(SqliteDb.TableName), Parameters.None, - Results.FromData); - Expect.equal(docs.Count, 5, "There should have been 5 documents returned"); + var json = await conn.CustomJsonSingle( + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id", + [new SqliteParameter("@id", "one")], Results.JsonFromData); + Expect.equal(json, JsonDocument.One, "The JSON document is incorrect"); }), - TestCase("succeeds when data is not found", async () => + TestCase("succeeds when a row is not found", async () => { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); - var docs = await conn.CustomList( - $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", [new("@value", 100)], - Results.FromData); - Expect.isEmpty(docs, "There should have been no documents returned"); + VerifyNoDoc(await conn.CustomJsonSingle( + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id", + [new SqliteParameter("@id", "eighty")], Results.JsonFromData)); }) ]), TestList("CustomNonQuery", @@ -73,7 +207,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.CustomNonQuery($"DELETE FROM {SqliteDb.TableName}", Parameters.None); @@ -84,7 +218,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.CustomNonQuery($"DELETE FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", [new("@value", 100)]); @@ -210,7 +344,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var theCount = await conn.CountAll(SqliteDb.TableName); Expect.equal(theCount, 5L, "There should have been 5 matching documents"); @@ -219,7 +353,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var theCount = await conn.CountByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")]); @@ -231,7 +365,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsById(SqliteDb.TableName, "three"); Expect.isTrue(exists, "There should have been an existing document"); @@ -240,7 +374,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsById(SqliteDb.TableName, "seven"); Expect.isFalse(exists, "There should not have been an existing document"); @@ -252,7 +386,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsByFields(SqliteDb.TableName, FieldMatch.Any, [Field.GreaterOrEqual("NumValue", 10)]); @@ -262,7 +396,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var exists = await conn.ExistsByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Nothing", "none")]); @@ -297,7 +431,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var results = await conn.FindAllOrdered(SqliteDb.TableName, [Field.Named("n:NumValue")]); Expect.hasLength(results, 5, "There should have been 5 documents returned"); @@ -308,7 +442,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var results = await conn.FindAllOrdered(SqliteDb.TableName, [Field.Named("n:NumValue DESC")]); @@ -320,7 +454,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var results = await conn.FindAllOrdered(SqliteDb.TableName, [Field.Named("Id DESC")]); Expect.hasLength(results, 5, "There should have been 5 documents returned"); @@ -334,7 +468,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindById(SqliteDb.TableName, "two"); Expect.isNotNull(doc, "There should have been a document returned"); @@ -344,7 +478,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindById(SqliteDb.TableName, "eighty-seven"); Expect.isNull(doc, "There should not have been a document returned"); @@ -356,7 +490,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Greater("NumValue", 15)]); @@ -366,7 +500,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Value", "mauve")]); @@ -379,7 +513,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, [Field.Greater("NumValue", 15)], [Field.Named("Id")]); @@ -390,7 +524,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var docs = await conn.FindByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, [Field.Greater("NumValue", 15)], [Field.Named("Id DESC")]); @@ -404,7 +538,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Value", "another")]); @@ -415,7 +549,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Sub.Foo", "green")]); @@ -426,7 +560,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Value", "absent")]); @@ -439,7 +573,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar")]); @@ -450,7 +584,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var doc = await conn.FindFirstByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar DESC")]); @@ -458,13 +592,532 @@ public static class SqliteCSharpExtensionTests Expect.equal("four", doc!.Id, "An incorrect document was returned"); }) ]), + TestList("JsonAll", + [ + TestCase("succeeds when there is data", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + + await conn.Insert(SqliteDb.TableName, new SubDocument { Foo = "one", Bar = "two" }); + await conn.Insert(SqliteDb.TableName, new SubDocument { Foo = "three", Bar = "four" }); + await conn.Insert(SqliteDb.TableName, new SubDocument { Foo = "five", Bar = "six" }); + + var json = await conn.JsonAll(SqliteDb.TableName); + VerifyBeginEnd(json); + Expect.stringContains(json, """{"Foo":"one","Bar":"two"}""", "The first document was not found"); + Expect.stringContains(json, """{"Foo":"three","Bar":"four"}""", "The second document was not found"); + Expect.stringContains(json, """{"Foo":"five","Bar":"six"}""", "The third document was not found"); + }), + TestCase("succeeds when there is no data", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + VerifyEmpty(await conn.JsonAll(SqliteDb.TableName)); + }) + ]), + TestList("JsonAllOrdered", + [ + TestCase("succeeds when ordering numerically", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal(await conn.JsonAllOrdered(SqliteDb.TableName, [Field.Named("n:NumValue")]), + $"[{JsonDocument.One},{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.Four},{JsonDocument.Five}]", + "The documents were not ordered correctly"); + }), + TestCase("succeeds when ordering numerically descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal(await conn.JsonAllOrdered(SqliteDb.TableName, [Field.Named("n:NumValue DESC")]), + $"[{JsonDocument.Five},{JsonDocument.Four},{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One}]", + "The documents were not ordered correctly"); + }), + TestCase("succeeds when ordering alphabetically", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal(await conn.JsonAllOrdered(SqliteDb.TableName, [Field.Named("Id DESC")]), + $"[{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Four},{JsonDocument.Five}]", + "The documents were not ordered correctly"); + }) + ]), + TestList("JsonById", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal(await conn.JsonById(SqliteDb.TableName, "two"), JsonDocument.Two, + "The incorrect document was returned"); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + VerifyNoDoc(await conn.JsonById(SqliteDb.TableName, "three hundred eighty-seven")); + }) + ]), + TestList("JsonByFields", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + var json = await conn.JsonByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Greater("NumValue", 15)]); + VerifyBeginEnd(json); + Expect.stringContains(json, JsonDocument.Four, "Document `four` should have been returned"); + Expect.stringContains(json, JsonDocument.Five, "Document `five` should have been returned"); + }), + TestCase("succeeds when documents are found using IN with numeric field", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal( + await conn.JsonByFields(SqliteDb.TableName, FieldMatch.All, [Field.In("NumValue", [2, 4, 6, 8])]), + $"[{JsonDocument.Three}]", "There should have been one document returned"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + VerifyEmpty(await conn.JsonByFields(SqliteDb.TableName, FieldMatch.Any, + [Field.Greater("NumValue", 100)])); + }), + TestCase("succeeds for InArray when matching documents exist", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await conn.EnsureTable(SqliteDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await conn.Insert(SqliteDb.TableName, doc); + + var json = await conn.JsonByFields(SqliteDb.TableName, FieldMatch.All, + [Field.InArray("Values", SqliteDb.TableName, ["c"])]); + VerifyBeginEnd(json); + Expect.stringContains(json, """{"Id":"first","Values":["a","b","c"]}""", + "Document `first` should have been returned"); + Expect.stringContains(json, """{"Id":"second","Values":["c","d","e"]}""", + "Document `second` should have been returned"); + }), + TestCase("succeeds for InArray when no matching documents exist", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await conn.EnsureTable(SqliteDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await conn.Insert(SqliteDb.TableName, doc); + VerifyEmpty(await conn.JsonByFields(SqliteDb.TableName, FieldMatch.All, + [Field.InArray("Values", SqliteDb.TableName, ["j"])])); + }) + ]), + TestList("JsonByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal( + await conn.JsonByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, [Field.Greater("NumValue", 15)], + [Field.Named("Id")]), $"[{JsonDocument.Five},{JsonDocument.Four}]", + "Incorrect documents were returned"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal( + await conn.JsonByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, [Field.Greater("NumValue", 15)], + [Field.Named("Id DESC")]), $"[{JsonDocument.Four},{JsonDocument.Five}]", + "Incorrect documents were returned"); + }), + TestCase("succeeds when sorting case-sensitively", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal( + await conn.JsonByFieldsOrdered(SqliteDb.TableName, FieldMatch.All, + [Field.LessOrEqual("NumValue", 10)], [Field.Named("Value")]), + $"[{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Two}]", "Documents not ordered correctly"); + }), + TestCase("succeeds when sorting case-insensitively", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal( + await conn.JsonByFieldsOrdered(SqliteDb.TableName, FieldMatch.All, + [Field.LessOrEqual("NumValue", 10)], [Field.Named("i:Value")]), + $"[{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.One}]", "Documents not ordered correctly"); + }) + ]), + TestList("JsonFirstByFields", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal( + await conn.JsonFirstByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Value", "another")]), + JsonDocument.Two, "The incorrect document was returned"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + var json = await conn.JsonFirstByFields(SqliteDb.TableName, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")]); + Expect.notEqual(json, "{}", "There should have been a document returned"); + VerifyAny(json, [JsonDocument.Two, JsonDocument.Four]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + VerifyNoDoc(await conn.JsonFirstByFields(SqliteDb.TableName, FieldMatch.Any, + [Field.Equal("Value", "absent")])); + }) + ]), + TestList("JsonFirstByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal( + await conn.JsonFirstByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar")]), JsonDocument.Two, + "An incorrect document was returned"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + Expect.equal( + await conn.JsonFirstByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar DESC")]), JsonDocument.Four, + "An incorrect document was returned"); + }) + ]), + TestList("WriteJsonAll", + [ + TestCase("succeeds when there is data", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + + await conn.Insert(SqliteDb.TableName, new SubDocument { Foo = "one", Bar = "two" }); + await conn.Insert(SqliteDb.TableName, new SubDocument { Foo = "three", Bar = "four" }); + await conn.Insert(SqliteDb.TableName, new SubDocument { Foo = "five", Bar = "six" }); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonAll(SqliteDb.TableName, writer); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, """{"Foo":"one","Bar":"two"}""", "The first document was not found"); + Expect.stringContains(json, """{"Foo":"three","Bar":"four"}""", "The second document was not found"); + Expect.stringContains(json, """{"Foo":"five","Bar":"six"}""", "The third document was not found"); + }), + TestCase("succeeds when there is no data", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonAll(SqliteDb.TableName, writer); + VerifyEmpty(StreamText(stream)); + }) + ]), + TestList("WriteJsonAllOrdered", + [ + TestCase("succeeds when ordering numerically", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonAllOrdered(SqliteDb.TableName, writer, [Field.Named("n:NumValue")]); + Expect.equal(StreamText(stream), + $"[{JsonDocument.One},{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.Four},{JsonDocument.Five}]", + "The documents were not ordered correctly"); + }), + TestCase("succeeds when ordering numerically descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonAllOrdered(SqliteDb.TableName, writer, [Field.Named("n:NumValue DESC")]); + Expect.equal(StreamText(stream), + $"[{JsonDocument.Five},{JsonDocument.Four},{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One}]", + "The documents were not ordered correctly"); + }), + TestCase("succeeds when ordering alphabetically", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonAllOrdered(SqliteDb.TableName, writer, [Field.Named("Id DESC")]); + Expect.equal(StreamText(stream), + $"[{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Four},{JsonDocument.Five}]", + "The documents were not ordered correctly"); + }) + ]), + TestList("WriteJsonById", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonById(SqliteDb.TableName, writer, "two"); + Expect.equal(StreamText(stream), JsonDocument.Two, "The incorrect document was returned"); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonById(SqliteDb.TableName, writer, "three hundred eighty-seven"); + VerifyNoDoc(StreamText(stream)); + }) + ]), + TestList("WriteJsonByFields", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 15)]); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, JsonDocument.Four, "Document `four` should have been returned"); + Expect.stringContains(json, JsonDocument.Five, "Document `five` should have been returned"); + }), + TestCase("succeeds when documents are found using IN with numeric field", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByFields(SqliteDb.TableName, writer, FieldMatch.All, + [Field.In("NumValue", [2, 4, 6, 8])]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Three}]", + "There should have been one document returned"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 100)]); + VerifyEmpty(StreamText(stream)); + }), + TestCase("succeeds for InArray when matching documents exist", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await conn.EnsureTable(SqliteDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await conn.Insert(SqliteDb.TableName, doc); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByFields(SqliteDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", SqliteDb.TableName, ["c"])]); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, """{"Id":"first","Values":["a","b","c"]}""", + "Document `first` should have been returned"); + Expect.stringContains(json, """{"Id":"second","Values":["c","d","e"]}""", + "Document `second` should have been returned"); + }), + TestCase("succeeds for InArray when no matching documents exist", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await conn.EnsureTable(SqliteDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await conn.Insert(SqliteDb.TableName, doc); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByFields(SqliteDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", SqliteDb.TableName, ["j"])]); + VerifyEmpty(StreamText(stream)); + }) + ]), + TestList("WriteJsonByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 15)], [Field.Named("Id")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Five},{JsonDocument.Four}]", + "Incorrect documents were returned"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 15)], [Field.Named("Id DESC")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Four},{JsonDocument.Five}]", + "Incorrect documents were returned"); + }), + TestCase("succeeds when sorting case-sensitively", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.All, + [Field.LessOrEqual("NumValue", 10)], [Field.Named("Value")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Two}]", + "Documents not ordered correctly"); + }), + TestCase("succeeds when sorting case-insensitively", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.All, + [Field.LessOrEqual("NumValue", 10)], [Field.Named("i:Value")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.One}]", + "Documents not ordered correctly"); + }) + ]), + TestList("WriteJsonFirstByFields", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "another")]); + Expect.equal(StreamText(stream), JsonDocument.Two, "The incorrect document was returned"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")]); + var json = StreamText(stream); + Expect.notEqual(json, "{}", "There should have been a document returned"); + VerifyAny(json, [JsonDocument.Two, JsonDocument.Four]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "absent")]); + VerifyNoDoc(StreamText(stream)); + }) + ]), + TestList("WriteJsonFirstByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonFirstByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar")]); + Expect.equal(StreamText(stream), JsonDocument.Two, "An incorrect document was returned"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await using var conn = Sqlite.Configuration.DbConn(); + await LoadDocs(conn); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await conn.WriteJsonFirstByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar DESC")]); + Expect.equal(StreamText(stream), JsonDocument.Four, "An incorrect document was returned"); + }) + ]), TestList("UpdateById", [ TestCase("succeeds when a document is updated", async () => { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); var testDoc = new JsonDocument { Id = "one", Sub = new() { Foo = "blue", Bar = "red" } }; await conn.UpdateById(SqliteDb.TableName, "one", testDoc); @@ -493,7 +1146,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.UpdateByFunc(SqliteDb.TableName, doc => doc.Id, new JsonDocument { Id = "one", Value = "le un", NumValue = 1 }); @@ -522,7 +1175,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.PatchById(SqliteDb.TableName, "one", new { NumValue = 44 }); var after = await conn.FindById(SqliteDb.TableName, "one"); @@ -547,7 +1200,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.PatchByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Value", "purple")], new { NumValue = 77 }); @@ -572,7 +1225,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.RemoveFieldsById(SqliteDb.TableName, "two", ["Sub", "Value"]); var updated = await Find.ById(SqliteDb.TableName, "two"); @@ -584,8 +1237,8 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); - + await LoadDocs(conn); + // This not raising an exception is the test await conn.RemoveFieldsById(SqliteDb.TableName, "two", ["AFieldThatIsNotThere"]); }), @@ -593,7 +1246,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - + // This not raising an exception is the test await conn.RemoveFieldsById(SqliteDb.TableName, "two", ["Value"]); }) @@ -604,7 +1257,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.RemoveFieldsByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("NumValue", 17)], ["Sub"]); @@ -616,8 +1269,8 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); - + await LoadDocs(conn); + // This not raising an exception is the test await conn.RemoveFieldsByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("NumValue", 17)], ["Nothing"]); @@ -626,7 +1279,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - + // This not raising an exception is the test await conn.RemoveFieldsByFields(SqliteDb.TableName, FieldMatch.Any, [Field.NotEqual("Abracadabra", "apple")], ["Value"]); @@ -638,7 +1291,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteById(SqliteDb.TableName, "four"); var remaining = await conn.CountAll(SqliteDb.TableName); @@ -648,7 +1301,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteById(SqliteDb.TableName, "thirty"); var remaining = await conn.CountAll(SqliteDb.TableName); @@ -661,7 +1314,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteByFields(SqliteDb.TableName, FieldMatch.Any, [Field.NotEqual("Value", "purple")]); var remaining = await conn.CountAll(SqliteDb.TableName); @@ -671,7 +1324,7 @@ public static class SqliteCSharpExtensionTests { await using var db = await SqliteDb.BuildDb(); await using var conn = Sqlite.Configuration.DbConn(); - await LoadDocs(); + await LoadDocs(conn); await conn.DeleteByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Value", "crimson")]); var remaining = await conn.CountAll(SqliteDb.TableName); diff --git a/src/Tests.CSharp/SqliteCSharpTests.cs b/src/Tests.CSharp/SqliteCSharpTests.cs index 14aa617..5c89aab 100644 --- a/src/Tests.CSharp/SqliteCSharpTests.cs +++ b/src/Tests.CSharp/SqliteCSharpTests.cs @@ -2,6 +2,7 @@ using Expecto; using Microsoft.FSharp.Core; using BitBadger.Documents.Sqlite; +using Microsoft.Data.Sqlite; namespace BitBadger.Documents.Tests.CSharp; @@ -147,7 +148,7 @@ public static class SqliteCSharpTests /// /// Add the test documents to the database /// - internal static async Task LoadDocs() + private static async Task LoadDocs() { foreach (var doc in JsonDocument.TestDocuments) await Document.Insert(SqliteDb.TableName, doc); } @@ -169,11 +170,135 @@ public static class SqliteCSharpTests } }); + /// Verify a JSON array begins with "[" and ends with "]" + private static void VerifyBeginEnd(string json) + { + Expect.stringStarts(json, "[", "The array should have started with `[`"); + Expect.stringEnds(json, "]", "The array should have ended with `]`"); + } + + /// Verify an empty JSON array + private static void VerifyEmpty(string json) + { + Expect.equal(json, "[]", "There should be no documents returned"); + } + + /// Verify an empty JSON document + private static void VerifyNoDoc(string json) + { + Expect.equal(json, "{}", "There should be no document returned"); + } + + /// Set up a stream writer for a test + private static StreamWriter WriteStream(Stream stream) + { + StreamWriter writer = new(stream); + writer.AutoFlush = true; + return writer; + } + + /// Get the text of the given stream + private static string StreamText(Stream stream) + { + stream.Position = 0L; + using StreamReader reader = new(stream); + return reader.ReadToEnd(); + } + + /// Verify the presence of any of the given documents in the given JSON + private static void VerifyAny(string json, IEnumerable docs) + { + var theDocs = docs.ToList(); + if (theDocs.Any(json.Contains)) return; + var anyDocs = string.Join(" | ", theDocs); + Expect.isTrue(false, $"Could not find any of |{anyDocs}| in {json}"); + } + /// /// Integration tests for the Custom module of the SQLite library /// private static readonly Test CustomTests = TestList("Custom", [ + TestList("List", + [ + TestCase("succeeds when data is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + var docs = await Custom.List(Query.Find(SqliteDb.TableName), Parameters.None, + Results.FromData); + Expect.equal(docs.Count, 5, "There should have been 5 documents returned"); + }), + TestCase("succeeds when data is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + var docs = await Custom.List( + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", [new("@value", 100)], + Results.FromData); + Expect.isEmpty(docs, "There should have been no documents returned"); + }) + ]), + TestList("JsonArray", + [ + TestCase("succeeds when data is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + var json = await Custom.JsonArray(Query.Find(SqliteDb.TableName), [], Results.JsonFromData); + VerifyBeginEnd(json); + Expect.stringContains(json, JsonDocument.One, "Document ID `one` should have been found"); + Expect.stringContains(json, JsonDocument.Two,"Document ID `two` should have been found"); + Expect.stringContains(json, JsonDocument.Three, "Document ID `three` should have been found"); + Expect.stringContains(json, JsonDocument.Four, "Document ID `four` should have been found"); + Expect.stringContains(json, JsonDocument.Five, "Document ID `five` should have been found"); + }), + TestCase("succeeds when data is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + VerifyEmpty(await Custom.JsonArray( + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", + [new SqliteParameter("@value", 100)], Results.JsonFromData)); + }) + ]), + TestList("WriteJsonArray", + [ + TestCase("succeeds when data is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Custom.WriteJsonArray(Query.Find(SqliteDb.TableName), [], writer, Results.JsonFromData); + + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, JsonDocument.One, "Document ID `one` should have been found"); + Expect.stringContains(json, JsonDocument.Two,"Document ID `two` should have been found"); + Expect.stringContains(json, JsonDocument.Three, "Document ID `three` should have been found"); + Expect.stringContains(json, JsonDocument.Four, "Document ID `four` should have been found"); + Expect.stringContains(json, JsonDocument.Five, "Document ID `five` should have been found"); + }), + TestCase("succeeds when data is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Custom.WriteJsonArray( + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", + [new SqliteParameter("@value", 100)], writer, Results.JsonFromData); + + VerifyEmpty(StreamText(stream)); + }) + ]), TestList("Single", [ TestCase("succeeds when a row is found", async () => @@ -196,26 +321,24 @@ public static class SqliteCSharpTests Expect.isNull(doc, "There should not have been a document returned"); }) ]), - TestList("List", + TestList("JsonSingle", [ - TestCase("succeeds when data is found", async () => + TestCase("succeeds when a row is found", async () => { await using var db = await SqliteDb.BuildDb(); await LoadDocs(); - var docs = await Custom.List(Query.Find(SqliteDb.TableName), Parameters.None, - Results.FromData); - Expect.equal(docs.Count, 5, "There should have been 5 documents returned"); + var json = await Custom.JsonSingle($"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id", + [new SqliteParameter("@id", "one")], Results.JsonFromData); + Expect.equal(json, JsonDocument.One, "The JSON document is incorrect"); }), - TestCase("succeeds when data is not found", async () => + TestCase("succeeds when a row is not found", async () => { await using var db = await SqliteDb.BuildDb(); await LoadDocs(); - var docs = await Custom.List( - $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", [new("@value", 100)], - Results.FromData); - Expect.isEmpty(docs, "There should have been no documents returned"); + VerifyNoDoc(await Custom.JsonSingle($"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id", + [new SqliteParameter("@id", "eighty")], Results.JsonFromData)); }) ]), TestList("NonQuery", @@ -757,6 +880,485 @@ public static class SqliteCSharpTests ]) ]); + /// Integration tests for the Json module of the SQLite library + private static readonly Test JsonTests = TestList("Json", + [ + TestList("All", + [ + TestCase("succeeds when there is data", async () => + { + await using var db = await SqliteDb.BuildDb(); + + await Document.Insert(SqliteDb.TableName, new SubDocument { Foo = "one", Bar = "two" }); + await Document.Insert(SqliteDb.TableName, new SubDocument { Foo = "three", Bar = "four" }); + await Document.Insert(SqliteDb.TableName, new SubDocument { Foo = "five", Bar = "six" }); + + var json = await Json.All(SqliteDb.TableName); + VerifyBeginEnd(json); + Expect.stringContains(json, """{"Foo":"one","Bar":"two"}""", "The first document was not found"); + Expect.stringContains(json, """{"Foo":"three","Bar":"four"}""", "The second document was not found"); + Expect.stringContains(json, """{"Foo":"five","Bar":"six"}""", "The third document was not found"); + }), + TestCase("succeeds when there is no data", async () => + { + await using var db = await SqliteDb.BuildDb(); + VerifyEmpty(await Json.All(SqliteDb.TableName)); + }) + ]), + TestList("AllOrdered", + [ + TestCase("succeeds when ordering numerically", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal(await Json.AllOrdered(SqliteDb.TableName, [Field.Named("n:NumValue")]), + $"[{JsonDocument.One},{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.Four},{JsonDocument.Five}]", + "The documents were not ordered correctly"); + }), + TestCase("succeeds when ordering numerically descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal(await Json.AllOrdered(SqliteDb.TableName, [Field.Named("n:NumValue DESC")]), + $"[{JsonDocument.Five},{JsonDocument.Four},{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One}]", + "The documents were not ordered correctly"); + }), + TestCase("succeeds when ordering alphabetically", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal(await Json.AllOrdered(SqliteDb.TableName, [Field.Named("Id DESC")]), + $"[{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Four},{JsonDocument.Five}]", + "The documents were not ordered correctly"); + }) + ]), + TestList("ById", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal(await Json.ById(SqliteDb.TableName, "two"), JsonDocument.Two, + "The incorrect document was returned"); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + VerifyNoDoc(await Json.ById(SqliteDb.TableName, "three hundred eighty-seven")); + }) + ]), + TestList("ByFields", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + var json = await Json.ByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Greater("NumValue", 15)]); + VerifyBeginEnd(json); + Expect.stringContains(json, JsonDocument.Four, "Document `four` should have been returned"); + Expect.stringContains(json, JsonDocument.Five, "Document `five` should have been returned"); + }), + TestCase("succeeds when documents are found using IN with numeric field", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal( + await Json.ByFields(SqliteDb.TableName, FieldMatch.All, [Field.In("NumValue", [2, 4, 6, 8])]), + $"[{JsonDocument.Three}]", "There should have been one document returned"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + VerifyEmpty(await Json.ByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Greater("NumValue", 100)])); + }), + TestCase("succeeds for InArray when matching documents exist", async () => + { + await using var db = await SqliteDb.BuildDb(); + await Definition.EnsureTable(SqliteDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(SqliteDb.TableName, doc); + + var json = await Json.ByFields(SqliteDb.TableName, FieldMatch.All, + [Field.InArray("Values", SqliteDb.TableName, ["c"])]); + VerifyBeginEnd(json); + Expect.stringContains(json, """{"Id":"first","Values":["a","b","c"]}""", + "Document `first` should have been returned"); + Expect.stringContains(json, """{"Id":"second","Values":["c","d","e"]}""", + "Document `second` should have been returned"); + }), + TestCase("succeeds for InArray when no matching documents exist", async () => + { + await using var db = await SqliteDb.BuildDb(); + await Definition.EnsureTable(SqliteDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(SqliteDb.TableName, doc); + VerifyEmpty(await Json.ByFields(SqliteDb.TableName, FieldMatch.All, + [Field.InArray("Values", SqliteDb.TableName, ["j"])])); + }) + ]), + TestList("ByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal( + await Json.ByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, [Field.Greater("NumValue", 15)], + [Field.Named("Id")]), $"[{JsonDocument.Five},{JsonDocument.Four}]", + "Incorrect documents were returned"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal( + await Json.ByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, [Field.Greater("NumValue", 15)], + [Field.Named("Id DESC")]), $"[{JsonDocument.Four},{JsonDocument.Five}]", + "Incorrect documents were returned"); + }), + TestCase("succeeds when sorting case-sensitively", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal( + await Json.ByFieldsOrdered(SqliteDb.TableName, FieldMatch.All, [Field.LessOrEqual("NumValue", 10)], + [Field.Named("Value")]), + $"[{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Two}]", "Documents not ordered correctly"); + }), + TestCase("succeeds when sorting case-insensitively", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal( + await Json.ByFieldsOrdered(SqliteDb.TableName, FieldMatch.All, [Field.LessOrEqual("NumValue", 10)], + [Field.Named("i:Value")]), + $"[{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.One}]", "Documents not ordered correctly"); + }) + ]), + TestList("FirstByFields", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal( + await Json.FirstByFields(SqliteDb.TableName, FieldMatch.Any, [Field.Equal("Value", "another")]), + JsonDocument.Two, "The incorrect document was returned"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + var json = await Json.FirstByFields(SqliteDb.TableName, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")]); + Expect.notEqual(json, "{}", "There should have been a document returned"); + VerifyAny(json, [JsonDocument.Two, JsonDocument.Four]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + VerifyNoDoc(await Json.FirstByFields(SqliteDb.TableName, FieldMatch.Any, + [Field.Equal("Value", "absent")])); + }) + ]), + TestList("FirstByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal( + await Json.FirstByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar")]), JsonDocument.Two, + "An incorrect document was returned"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + Expect.equal( + await Json.FirstByFieldsOrdered(SqliteDb.TableName, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar DESC")]), JsonDocument.Four, + "An incorrect document was returned"); + }) + ]), + TestList("WriteAll", + [ + TestCase("succeeds when there is data", async () => + { + await using var db = await SqliteDb.BuildDb(); + + await Document.Insert(SqliteDb.TableName, new SubDocument { Foo = "one", Bar = "two" }); + await Document.Insert(SqliteDb.TableName, new SubDocument { Foo = "three", Bar = "four" }); + await Document.Insert(SqliteDb.TableName, new SubDocument { Foo = "five", Bar = "six" }); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteAll(SqliteDb.TableName, writer); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, """{"Foo":"one","Bar":"two"}""", "The first document was not found"); + Expect.stringContains(json, """{"Foo":"three","Bar":"four"}""", "The second document was not found"); + Expect.stringContains(json, """{"Foo":"five","Bar":"six"}""", "The third document was not found"); + }), + TestCase("succeeds when there is no data", async () => + { + await using var db = await SqliteDb.BuildDb(); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteAll(SqliteDb.TableName, writer); + VerifyEmpty(StreamText(stream)); + }) + ]), + TestList("WriteAllOrdered", + [ + TestCase("succeeds when ordering numerically", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteAllOrdered(SqliteDb.TableName, writer, [Field.Named("n:NumValue")]); + Expect.equal(StreamText(stream), + $"[{JsonDocument.One},{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.Four},{JsonDocument.Five}]", + "The documents were not ordered correctly"); + }), + TestCase("succeeds when ordering numerically descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteAllOrdered(SqliteDb.TableName, writer, [Field.Named("n:NumValue DESC")]); + Expect.equal(StreamText(stream), + $"[{JsonDocument.Five},{JsonDocument.Four},{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One}]", + "The documents were not ordered correctly"); + }), + TestCase("succeeds when ordering alphabetically", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteAllOrdered(SqliteDb.TableName, writer, [Field.Named("Id DESC")]); + Expect.equal(StreamText(stream), + $"[{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Four},{JsonDocument.Five}]", + "The documents were not ordered correctly"); + }) + ]), + TestList("WriteById", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteById(SqliteDb.TableName, writer, "two"); + Expect.equal(StreamText(stream), JsonDocument.Two, "The incorrect document was returned"); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteById(SqliteDb.TableName, writer, "three hundred eighty-seven"); + VerifyNoDoc(StreamText(stream)); + }) + ]), + TestList("WriteByFields", + [ + TestCase("succeeds when documents are found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByFields(SqliteDb.TableName, writer, FieldMatch.Any, [Field.Greater("NumValue", 15)]); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, JsonDocument.Four, "Document `four` should have been returned"); + Expect.stringContains(json, JsonDocument.Five, "Document `five` should have been returned"); + }), + TestCase("succeeds when documents are found using IN with numeric field", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByFields(SqliteDb.TableName, writer, FieldMatch.All, + [Field.In("NumValue", [2, 4, 6, 8])]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Three}]", + "There should have been one document returned"); + }), + TestCase("succeeds when documents are not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByFields(SqliteDb.TableName, writer, FieldMatch.Any, [Field.Greater("NumValue", 100)]); + VerifyEmpty(StreamText(stream)); + }), + TestCase("succeeds for InArray when matching documents exist", async () => + { + await using var db = await SqliteDb.BuildDb(); + await Definition.EnsureTable(SqliteDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(SqliteDb.TableName, doc); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByFields(SqliteDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", SqliteDb.TableName, ["c"])]); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, """{"Id":"first","Values":["a","b","c"]}""", + "Document `first` should have been returned"); + Expect.stringContains(json, """{"Id":"second","Values":["c","d","e"]}""", + "Document `second` should have been returned"); + }), + TestCase("succeeds for InArray when no matching documents exist", async () => + { + await using var db = await SqliteDb.BuildDb(); + await Definition.EnsureTable(SqliteDb.TableName); + foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(SqliteDb.TableName, doc); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByFields(SqliteDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", SqliteDb.TableName, ["j"])]); + VerifyEmpty(StreamText(stream)); + }) + ]), + TestList("WriteByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 15)], [Field.Named("Id")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Five},{JsonDocument.Four}]", + "Incorrect documents were returned"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 15)], [Field.Named("Id DESC")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Four},{JsonDocument.Five}]", + "Incorrect documents were returned"); + }), + TestCase("succeeds when sorting case-sensitively", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.All, + [Field.LessOrEqual("NumValue", 10)], [Field.Named("Value")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Two}]", + "Documents not ordered correctly"); + }), + TestCase("succeeds when sorting case-insensitively", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.All, + [Field.LessOrEqual("NumValue", 10)], [Field.Named("i:Value")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.One}]", + "Documents not ordered correctly"); + }) + ]), + TestList("WriteFirstByFields", + [ + TestCase("succeeds when a document is found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "another")]); + Expect.equal(StreamText(stream), JsonDocument.Two, "The incorrect document was returned"); + }), + TestCase("succeeds when multiple documents are found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")]); + var json = StreamText(stream); + Expect.notEqual(json, "{}", "There should have been a document returned"); + VerifyAny(json, [JsonDocument.Two, JsonDocument.Four]); + }), + TestCase("succeeds when a document is not found", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "absent")]); + VerifyNoDoc(StreamText(stream)); + }) + ]), + TestList("WriteFirstByFieldsOrdered", + [ + TestCase("succeeds when sorting ascending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteFirstByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar")]); + Expect.equal(StreamText(stream), JsonDocument.Two, "An incorrect document was returned"); + }), + TestCase("succeeds when sorting descending", async () => + { + await using var db = await SqliteDb.BuildDb(); + await LoadDocs(); + + using MemoryStream stream = new(); + await using var writer = WriteStream(stream); + await Json.WriteFirstByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar DESC")]); + Expect.equal(StreamText(stream), JsonDocument.Four, "An incorrect document was returned"); + }) + ]) + ]); + /// /// Integration tests for the Update module of the SQLite library /// @@ -1006,6 +1608,7 @@ public static class SqliteCSharpTests CountTests, ExistsTests, FindTests, + JsonTests, UpdateTests, PatchTests, RemoveFieldsTests, diff --git a/src/Tests.CSharp/Types.cs b/src/Tests.CSharp/Types.cs index 3acb0d0..e388084 100644 --- a/src/Tests.CSharp/Types.cs +++ b/src/Tests.CSharp/Types.cs @@ -18,7 +18,7 @@ public class JsonDocument public string Value { get; set; } = ""; public int NumValue { get; set; } = 0; public SubDocument? Sub { get; set; } = null; - + /// /// A set of documents used for integration tests /// @@ -30,6 +30,22 @@ public class JsonDocument new() { Id = "four", Value = "purple", NumValue = 17, Sub = new() { Foo = "green", Bar = "red" } }, new() { Id = "five", Value = "purple", NumValue = 18 } ]; + + /// The JSON for document ID `one` + public static string One = """{"Id":"one","Value":"FIRST!","NumValue":0,"Sub":null}"""; + + /// The JSON for document ID `two` + public static string Two = """{"Id":"two","Value":"another","NumValue":10,"Sub":{"Foo":"green","Bar":"blue"}}"""; + + /// The JSON for document ID `three` + public static string Three = """{"Id":"three","Value":"","NumValue":4,"Sub":null}"""; + + /// The JSON for document ID `four` + public static string Four = """{"Id":"four","Value":"purple","NumValue":17,"Sub":{"Foo":"green","Bar":"red"}}"""; + + /// The JSON for document ID `five` + public static string Five = """{"Id":"five","Value":"purple","NumValue":18,"Sub":null}"""; + } public class ArrayDocument diff --git a/src/Tests/SqliteExtensionTests.fs b/src/Tests/SqliteExtensionTests.fs index b17c19e..44dca44 100644 --- a/src/Tests/SqliteExtensionTests.fs +++ b/src/Tests/SqliteExtensionTests.fs @@ -14,7 +14,7 @@ let integrationTests = let loadDocs (conn: SqliteConnection) = backgroundTask { for doc in testDocuments do do! conn.insert SqliteDb.TableName doc } - + /// Set up a stream writer for a test let writeStream (stream: Stream) = let writer = new StreamWriter(stream) @@ -48,7 +48,7 @@ let integrationTests = let theDocs = docs |> String.concat " | " Expect.isTrue false $"Could not find any of |{theDocs}| in {json}" - ftestList "Sqlite.Extensions" [ + testList "Sqlite.Extensions" [ testTask "ensureTable succeeds" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () @@ -57,12 +57,12 @@ let integrationTests = $"SELECT EXISTS (SELECT 1 FROM {SqliteDb.Catalog} WHERE name = @name) AS it" [ SqliteParameter("@name", name) ] toExists - + let! exists = itExists "ensured" let! alsoExists = itExists "idx_ensured_key" Expect.isFalse exists "The table should not exist already" Expect.isFalse alsoExists "The key index should not exist already" - + do! conn.ensureTable "ensured" let! exists' = itExists "ensured" let! alsoExists' = itExists "idx_ensured_key" @@ -77,10 +77,10 @@ let integrationTests = $"SELECT EXISTS (SELECT 1 FROM {SqliteDb.Catalog} WHERE name = 'idx_ensured_test') AS it" [] toExists - + let! exists = indexExists () Expect.isFalse exists "The index should not exist already" - + do! conn.ensureTable "ensured" do! conn.ensureFieldIndex "ensured" "test" [ "Name"; "Age" ] let! exists' = indexExists () @@ -92,7 +92,7 @@ let integrationTests = use conn = Configuration.dbConn () let! before = conn.findAll SqliteDb.TableName Expect.equal before [] "There should be no documents in the table" - + let testDoc = { emptyDoc with Id = "turkey"; Sub = Some { Foo = "gobble"; Bar = "gobble" } } do! conn.insert SqliteDb.TableName testDoc let! after = conn.findAll SqliteDb.TableName @@ -116,7 +116,7 @@ let integrationTests = use conn = Configuration.dbConn () let! before = conn.findAll SqliteDb.TableName Expect.equal before [] "There should be no documents in the table" - + let testDoc = { emptyDoc with Id = "test"; Sub = Some { Foo = "a"; Bar = "b" } } do! conn.save SqliteDb.TableName testDoc let! after = conn.findAll SqliteDb.TableName @@ -127,11 +127,11 @@ let integrationTests = use conn = Configuration.dbConn () let testDoc = { emptyDoc with Id = "test"; Sub = Some { Foo = "a"; Bar = "b" } } do! conn.insert SqliteDb.TableName testDoc - + let! before = conn.findById SqliteDb.TableName "test" if Option.isNone before then Expect.isTrue false "There should have been a document returned" Expect.equal before.Value testDoc "The document is not correct" - + let upd8Doc = { testDoc with Sub = Some { Foo = "c"; Bar = "d" } } do! conn.save SqliteDb.TableName upd8Doc let! after = conn.findById SqliteDb.TableName "test" @@ -144,7 +144,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + let! theCount = conn.countAll SqliteDb.TableName Expect.equal theCount 5L "There should have been 5 matching documents" } @@ -152,7 +152,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + let! theCount = conn.countByFields SqliteDb.TableName Any [ Field.Equal "Value" "purple" ] Expect.equal theCount 2L "There should have been 2 matching documents" } @@ -161,7 +161,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + let! exists = conn.existsById SqliteDb.TableName "three" Expect.isTrue exists "There should have been an existing document" } @@ -169,7 +169,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + let! exists = conn.existsById SqliteDb.TableName "seven" Expect.isFalse exists "There should not have been an existing document" } @@ -179,7 +179,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + let! exists = conn.existsByFields SqliteDb.TableName Any [ Field.Equal "NumValue" 10 ] Expect.isTrue exists "There should have been existing documents" } @@ -187,7 +187,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + let! exists = conn.existsByFields SqliteDb.TableName Any [ Field.Equal "Nothing" "none" ] Expect.isFalse exists "There should not have been any existing documents" } @@ -196,11 +196,11 @@ let integrationTests = testTask "succeeds when there is data" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - + do! insert SqliteDb.TableName { Foo = "one"; Bar = "two" } do! insert SqliteDb.TableName { Foo = "three"; Bar = "four" } do! insert SqliteDb.TableName { Foo = "five"; Bar = "six" } - + let! results = conn.findAll SqliteDb.TableName let expected = [ { Foo = "one"; Bar = "two" } @@ -221,7 +221,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + let! results = conn.findAllOrdered SqliteDb.TableName [ Field.Named "n:NumValue" ] Expect.hasLength results 5 "There should have been 5 documents returned" Expect.equal @@ -233,7 +233,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + let! results = conn.findAllOrdered SqliteDb.TableName [ Field.Named "n:NumValue DESC" ] Expect.hasLength results 5 "There should have been 5 documents returned" Expect.equal @@ -245,7 +245,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + let! results = conn.findAllOrdered SqliteDb.TableName [ Field.Named "Id DESC" ] Expect.hasLength results 5 "There should have been 5 documents returned" Expect.equal @@ -259,7 +259,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + let! doc = conn.findById SqliteDb.TableName "two" Expect.isSome doc "There should have been a document returned" Expect.equal doc.Value.Id "two" "The incorrect document was returned" @@ -268,7 +268,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + let! doc = conn.findById SqliteDb.TableName "three hundred eighty-seven" Expect.isNone doc "There should not have been a document returned" } @@ -278,7 +278,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + let! docs = conn.findByFields SqliteDb.TableName Any [ Field.Equal "Sub.Foo" "green" ] Expect.hasLength docs 2 "There should have been two documents returned" } @@ -286,7 +286,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + let! docs = conn.findByFields SqliteDb.TableName Any [ Field.Equal "Value" "mauve" ] Expect.isEmpty docs "There should have been no documents returned" } @@ -320,7 +320,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + let! doc = conn.findFirstByFields SqliteDb.TableName Any [ Field.Equal "Value" "another" ] Expect.isSome doc "There should have been a document returned" Expect.equal doc.Value.Id "two" "The incorrect document was returned" @@ -329,7 +329,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + let! doc = conn.findFirstByFields SqliteDb.TableName Any [ Field.Equal "Sub.Foo" "green" ] Expect.isSome doc "There should have been a document returned" Expect.contains [ "two"; "four" ] doc.Value.Id "An incorrect document was returned" @@ -338,7 +338,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + let! doc = conn.findFirstByFields SqliteDb.TableName Any [ Field.Equal "Value" "absent" ] Expect.isNone doc "There should not have been a document returned" } @@ -861,7 +861,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + let testDoc = { emptyDoc with Id = "one"; Sub = Some { Foo = "blue"; Bar = "red" } } do! conn.updateById SqliteDb.TableName "one" testDoc let! after = conn.findById SqliteDb.TableName "one" @@ -872,10 +872,10 @@ let integrationTests = testTask "succeeds when no document is updated" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - + let! before = conn.findAll SqliteDb.TableName Expect.isEmpty before "There should have been no documents returned" - + // This not raising an exception is the test do! conn.updateById SqliteDb.TableName @@ -888,7 +888,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + do! conn.updateByFunc SqliteDb.TableName _.Id { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } let! after = conn.findById SqliteDb.TableName "one" @@ -902,10 +902,10 @@ let integrationTests = testTask "succeeds when no document is updated" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - + let! before = conn.findAll SqliteDb.TableName Expect.isEmpty before "There should have been no documents returned" - + // This not raising an exception is the test do! conn.updateByFunc SqliteDb.TableName _.Id { Id = "one"; Value = "le un"; NumValue = 1; Sub = None } @@ -916,7 +916,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + do! conn.patchById SqliteDb.TableName "one" {| NumValue = 44 |} let! after = conn.findById SqliteDb.TableName "one" if Option.isNone after then @@ -926,10 +926,10 @@ let integrationTests = testTask "succeeds when no document is updated" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - + let! before = conn.findAll SqliteDb.TableName Expect.isEmpty before "There should have been no documents returned" - + // This not raising an exception is the test do! conn.patchById SqliteDb.TableName "test" {| Foo = "green" |} } @@ -939,7 +939,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + do! conn.patchByFields SqliteDb.TableName Any [ Field.Equal "Value" "purple" ] {| NumValue = 77 |} let! after = conn.countByFields SqliteDb.TableName Any [ Field.Equal "NumValue" 77 ] Expect.equal after 2L "There should have been 2 documents returned" @@ -947,10 +947,10 @@ let integrationTests = testTask "succeeds when no document is updated" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - + let! before = conn.findAll SqliteDb.TableName Expect.isEmpty before "There should have been no documents returned" - + // This not raising an exception is the test do! conn.patchByFields SqliteDb.TableName Any [ Field.Equal "Value" "burgundy" ] {| Foo = "green" |} } @@ -960,7 +960,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + do! conn.removeFieldsById SqliteDb.TableName "two" [ "Sub"; "Value" ] try let! _ = conn.findById SqliteDb.TableName "two" @@ -973,14 +973,14 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + // This not raising an exception is the test do! conn.removeFieldsById SqliteDb.TableName "two" [ "AFieldThatIsNotThere" ] } testTask "succeeds when no document is matched" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - + // This not raising an exception is the test do! conn.removeFieldsById SqliteDb.TableName "two" [ "Value" ] } @@ -990,7 +990,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + do! conn.removeFieldsByFields SqliteDb.TableName Any [ Field.Equal "NumValue" 17 ] [ "Sub" ] try let! _ = conn.findById SqliteDb.TableName "four" @@ -1003,14 +1003,14 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + // This not raising an exception is the test do! conn.removeFieldsByFields SqliteDb.TableName Any [ Field.Equal "NumValue" 17 ] [ "Nothing" ] } testTask "succeeds when no document is matched" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - + // This not raising an exception is the test do! conn.removeFieldsByFields SqliteDb.TableName Any [ Field.NotEqual "Abracadabra" "apple" ] [ "Value" ] @@ -1021,7 +1021,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + do! conn.deleteById SqliteDb.TableName "four" let! remaining = conn.countAll SqliteDb.TableName Expect.equal remaining 4L "There should have been 4 documents remaining" @@ -1030,7 +1030,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + do! conn.deleteById SqliteDb.TableName "thirty" let! remaining = conn.countAll SqliteDb.TableName Expect.equal remaining 5L "There should have been 5 documents remaining" @@ -1041,7 +1041,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + do! conn.deleteByFields SqliteDb.TableName Any [ Field.NotEqual "Value" "purple" ] let! remaining = conn.countAll SqliteDb.TableName Expect.equal remaining 2L "There should have been 2 documents remaining" @@ -1050,7 +1050,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + do! conn.deleteByFields SqliteDb.TableName Any [ Field.Equal "Value" "crimson" ] let! remaining = conn.countAll SqliteDb.TableName Expect.equal remaining 5L "There should have been 5 documents remaining" @@ -1061,7 +1061,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + let! docs = conn.customList (Query.find SqliteDb.TableName) [] fromData Expect.hasLength docs 5 "There should have been 5 documents returned" } @@ -1069,7 +1069,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + let! docs = conn.customList $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" @@ -1146,7 +1146,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + let! doc = conn.customSingle $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id" @@ -1159,7 +1159,7 @@ let integrationTests = use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () do! loadDocs conn - + let! doc = conn.customSingle $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'Id' = @id" @@ -1221,7 +1221,7 @@ let integrationTests = testTask "customScalar succeeds" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () - + let! nbr = conn.customScalar "SELECT 5 AS test_value" [] _.GetInt32(0) Expect.equal nbr 5 "The query should have returned the number 5" } diff --git a/src/Tests/SqliteTests.fs b/src/Tests/SqliteTests.fs index 1ba12a1..15e5d1d 100644 --- a/src/Tests/SqliteTests.fs +++ b/src/Tests/SqliteTests.fs @@ -1402,7 +1402,7 @@ let deleteTests = testList "Delete" [ ] /// All tests for the SQLite library -let all = ftestList "Sqlite" [ +let all = testList "Sqlite" [ testList "Unit" [ queryTests; parametersTests ] testSequenced <| testList "Integration" [ configurationTests -- 2.47.2 From 98f216d6dbdc7b242743bf2d7a8d7daae6f50e1a Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Tue, 8 Apr 2025 07:19:00 -0400 Subject: [PATCH 14/22] Edit docs for common project --- src/Common/Library.fs | 278 ++++++++++++------------ src/Tests.CSharp/CommonCSharpTests.cs | 6 +- src/Tests.CSharp/PostgresCSharpTests.cs | 98 ++++----- 3 files changed, 191 insertions(+), 191 deletions(-) diff --git a/src/Common/Library.fs b/src/Common/Library.fs index 8bae732..c5668b7 100644 --- a/src/Common/Library.fs +++ b/src/Common/Library.fs @@ -5,40 +5,40 @@ open System.Security.Cryptography /// The types of comparisons available for JSON fields /// type Comparison = - - /// Equals (=) + + /// Equals (=) | Equal of Value: obj - - /// Greater Than (>) + + /// Greater Than (>) | Greater of Value: obj - - /// Greater Than or Equal To (>=) + + /// Greater Than or Equal To (>=) | GreaterOrEqual of Value: obj - - /// Less Than (<) + + /// Less Than (<) | Less of Value: obj - - /// Less Than or Equal To (<=) - | LessOrEqual of Value: obj - - /// Not Equal to (<>) + + /// Less Than or Equal To (<=) + | LessOrEqual of Value: obj + + /// Not Equal to (<>) | NotEqual of Value: obj - - /// Between (BETWEEN) + + /// Between (BETWEEN) | Between of Min: obj * Max: obj - - /// In (IN) + + /// In (IN) | In of Values: obj seq - - /// In Array (PostgreSQL: |?, SQLite: EXISTS / json_each / IN) + + /// In Array (PostgreSQL: |?, SQLite: EXISTS / json_each / IN) | InArray of Table: string * Values: obj seq - - /// Exists (IS NOT NULL) + + /// Exists (IS NOT NULL) | Exists - - /// Does Not Exist (IS NULL) + + /// Does Not Exist (IS NULL) | NotExists - + /// The operator SQL for this comparison member this.OpSql = match this with @@ -50,7 +50,7 @@ type Comparison = | NotEqual _ -> "<>" | Between _ -> "BETWEEN" | In _ -> "IN" - | InArray _ -> "?|" // PostgreSQL only; SQL needs a subquery for this + | InArray _ -> "?|" // PostgreSQL only; SQL needs a subquery for this | Exists -> "IS NOT NULL" | NotExists -> "IS NULL" @@ -62,120 +62,120 @@ type Dialect = | SQLite -/// The format in which an element of a JSON field should be extracted +/// The format in which an element of a JSON field should be extracted [] type FieldFormat = - + /// - /// Use ->> or #>>; extracts a text (PostgreSQL) or SQL (SQLite) value + /// Use ->> or #>>; extracts a text (PostgreSQL) or SQL (SQLite) value /// | AsSql - - /// Use -> or #>; extracts a JSONB (PostgreSQL) or JSON (SQLite) value + + /// Use -> or #>; extracts a JSONB (PostgreSQL) or JSON (SQLite) value | AsJson -/// Criteria for a field WHERE clause +/// Criteria for a field WHERE clause type Field = { - + /// The name of the field Name: string - + /// The comparison for the field Comparison: Comparison - + /// The name of the parameter for this field ParameterName: string option - + /// The table qualifier for this field Qualifier: string option } with - + /// Create a comparison against a field /// The name of the field against which the comparison should be applied /// The comparison for the given field - /// A new Field instance implementing the given comparison + /// A new Field instance implementing the given comparison static member Where name (comparison: Comparison) = { Name = name; Comparison = comparison; ParameterName = None; Qualifier = None } - - /// Create an equals (=) field criterion + + /// Create an equals (=) field criterion /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member Equal<'T> name (value: 'T) = Field.Where name (Equal value) - - /// Create an equals (=) field criterion (alias) + + /// Create an equals (=) field criterion (alias) /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member EQ<'T> name (value: 'T) = Field.Equal name value - - /// Create a greater than (>) field criterion + + /// Create a greater than (>) field criterion /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member Greater<'T> name (value: 'T) = Field.Where name (Greater value) - - /// Create a greater than (>) field criterion (alias) + + /// Create a greater than (>) field criterion (alias) /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member GT<'T> name (value: 'T) = Field.Greater name value - - /// Create a greater than or equal to (>=) field criterion + + /// Create a greater than or equal to (>=) field criterion /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member GreaterOrEqual<'T> name (value: 'T) = Field.Where name (GreaterOrEqual value) - - /// Create a greater than or equal to (>=) field criterion (alias) + + /// Create a greater than or equal to (>=) field criterion (alias) /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member GE<'T> name (value: 'T) = Field.GreaterOrEqual name value - - /// Create a less than (<) field criterion + + /// Create a less than (<) field criterion /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member Less<'T> name (value: 'T) = Field.Where name (Less value) - - /// Create a less than (<) field criterion (alias) + + /// Create a less than (<) field criterion (alias) /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member LT<'T> name (value: 'T) = Field.Less name value - - /// Create a less than or equal to (<=) field criterion + + /// Create a less than or equal to (<=) field criterion /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member LessOrEqual<'T> name (value: 'T) = Field.Where name (LessOrEqual value) - - /// Create a less than or equal to (<=) field criterion (alias) + + /// Create a less than or equal to (<=) field criterion (alias) /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member LE<'T> name (value: 'T) = Field.LessOrEqual name value - - /// Create a not equals (<>) field criterion + + /// Create a not equals (<>) field criterion /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member NotEqual<'T> name (value: 'T) = Field.Where name (NotEqual value) - - /// Create a not equals (<>) field criterion (alias) + + /// Create a not equals (<>) field criterion (alias) /// The name of the field to be compared /// The value for the comparison /// A field with the given comparison static member NE<'T> name (value: 'T) = Field.NotEqual name value - + /// Create a Between field criterion /// The name of the field to be compared /// The minimum value for the comparison range @@ -183,27 +183,27 @@ type Field = { /// A field with the given comparison static member Between<'T> name (min: 'T) (max: 'T) = Field.Where name (Between(min, max)) - + /// Create a Between field criterion (alias) /// The name of the field to be compared /// The minimum value for the comparison range /// The maximum value for the comparison range /// A field with the given comparison static member BT<'T> name (min: 'T) (max: 'T) = Field.Between name min max - + /// Create an In field criterion /// The name of the field to be compared /// The values for the comparison /// A field with the given comparison static member In<'T> name (values: 'T seq) = Field.Where name (In (Seq.map box values)) - + /// Create an In field criterion (alias) /// The name of the field to be compared /// The values for the comparison /// A field with the given comparison static member IN<'T> name (values: 'T seq) = Field.In name values - + /// Create an InArray field criterion /// The name of the field to be compared /// The name of the table in which the field's documents are stored @@ -211,34 +211,34 @@ type Field = { /// A field with the given comparison static member InArray<'T> name tableName (values: 'T seq) = Field.Where name (InArray(tableName, Seq.map box values)) - - /// Create an exists (IS NOT NULL) field criterion + + /// Create an exists (IS NOT NULL) field criterion /// The name of the field to be compared /// A field with the given comparison static member Exists name = Field.Where name Exists - - /// Create an exists (IS NOT NULL) field criterion (alias) + + /// Create an exists (IS NOT NULL) field criterion (alias) /// The name of the field to be compared /// A field with the given comparison static member EX name = Field.Exists name - - /// Create a not exists (IS NULL) field criterion + + /// Create a not exists (IS NULL) field criterion /// The name of the field to be compared /// A field with the given comparison static member NotExists name = Field.Where name NotExists - - /// Create a not exists (IS NULL) field criterion (alias) + + /// Create a not exists (IS NULL) field criterion (alias) /// The name of the field to be compared /// A field with the given comparison static member NEX name = Field.NotExists name - - /// Transform a field name (a.b.c) to a path for the given SQL dialect + + /// Transform a field name (a.b.c) to a path for the given SQL dialect /// The name of the field in dotted format /// The SQL dialect to use when converting the name to nested path format /// Whether to reference this path as a JSON value or a SQL value - /// A string with the path required to address the nested document value + /// A string with the path required to address the nested document value static member NameToPath (name: string) dialect format = let path = if name.Contains '.' then @@ -254,19 +254,19 @@ type Field = { else match format with AsJson -> $"->'{name}'" | AsSql -> $"->>'{name}'" $"data{path}" - + /// Create a field with a given name, but no other properties filled /// The field name, along with any other qualifications if used in a sorting context - /// Comparison will be Equal, value will be an empty string + /// Comparison will be Equal, value will be an empty string static member Named name = Field.Where name (Equal "") - + /// Specify the name of the parameter for this field - /// The parameter name (including : or @) + /// The parameter name (including : or @) /// A field with the given parameter name specified member this.WithParameterName name = { this with ParameterName = Some name } - + /// Specify a qualifier (alias) for the table from which this field will be referenced /// The table alias for this field comparison /// A field with the given qualifier specified @@ -276,7 +276,7 @@ type Field = { /// Get the qualified path to the field /// The SQL dialect to use when converting the name to nested path format /// Whether to reference this path as a JSON value or a SQL value - /// A string with the qualified path required to address the nested document value + /// A string with the qualified path required to address the nested document value member this.Path dialect format = (this.Qualifier |> Option.map (fun q -> $"{q}.") |> Option.defaultValue "") + Field.NameToPath this.Name dialect format @@ -285,13 +285,13 @@ type Field = { /// How fields should be matched [] type FieldMatch = - - /// Any field matches (OR) + + /// Any field matches (OR) | Any - - /// All fields match (AND) + + /// All fields match (AND) | All - + /// The SQL value implementing each matching strategy override this.ToString() = match this with Any -> "OR" | All -> "AND" @@ -299,10 +299,10 @@ type FieldMatch = /// Derive parameter names (each instance wraps a counter to uniquely name anonymous fields) type ParameterName() = - + /// The counter for the next field value let mutable currentIdx = -1 - + /// /// Return the specified name for the parameter, or an anonymous parameter name if none is specified /// @@ -319,30 +319,30 @@ type ParameterName() = /// Automatically-generated document ID strategies [] type AutoId = - + /// No automatic IDs will be generated | Disabled - + /// Generate a MAX-plus-1 numeric value for documents | Number - - /// Generate a GUID for each document (as a lowercase, no-dashes, 32-character string) + + /// Generate a GUID for each document (as a lowercase, no-dashes, 32-character string) | Guid - + /// Generate a random string of hexadecimal characters for each document | RandomString with - /// Generate a GUID string - /// A GUID string + /// Generate a GUID string + /// A GUID string static member GenerateGuid() = System.Guid.NewGuid().ToString "N" - + /// Generate a string of random hexadecimal characters /// The number of characters to generate /// A string of the given length with random hexadecimal characters static member GenerateRandomString(length: int) = RandomNumberGenerator.GetHexString(length, lowercase = true) - + /// Does the given document need an automatic ID generated? /// The auto-ID strategy currently in use /// The document being inserted @@ -387,26 +387,26 @@ with /// The required document serialization implementation type IDocumentSerializer = - + /// Serialize an object to a JSON string abstract Serialize<'T> : 'T -> string - + /// Deserialize a JSON string into an object abstract Deserialize<'T> : string -> 'T /// Document serializer defaults module DocumentSerializer = - + open System.Text.Json open System.Text.Json.Serialization - + /// The default JSON serializer options to use with the stock serializer let private jsonDefaultOpts = let o = JsonSerializerOptions() o.Converters.Add(JsonFSharpConverter()) o - + /// The default JSON serializer [] let ``default`` = @@ -424,7 +424,7 @@ module Configuration = /// The serializer to use for document manipulation let mutable private serializerValue = DocumentSerializer.``default`` - + /// Register a serializer to use for translating documents to domain types /// The serializer to use when manipulating documents [] @@ -436,46 +436,46 @@ module Configuration = [] let serializer () = serializerValue - + /// The serialized name of the ID field for documents let mutable private idFieldValue = "Id" - + /// Specify the name of the ID field for documents /// The name of the ID field for documents [] let useIdField it = idFieldValue <- it - + /// Retrieve the currently configured ID field for documents /// The currently configured ID field [] let idField () = idFieldValue - + /// The automatic ID strategy used by the library let mutable private autoIdValue = Disabled - + /// Specify the automatic ID generation strategy used by the library /// The automatic ID generation strategy to use [] let useAutoIdStrategy it = autoIdValue <- it - + /// Retrieve the currently configured automatic ID generation strategy /// The current automatic ID generation strategy [] let autoIdStrategy () = autoIdValue - + /// The length of automatically generated random strings let mutable private idStringLengthValue = 16 - + /// Specify the length of automatically generated random strings /// The length of automatically generated random strings [] let useIdStringLength length = idStringLengthValue <- length - + /// Retrieve the currently configured length of automatically generated random strings /// The current length of automatically generated random strings [] @@ -486,31 +486,31 @@ module Configuration = /// Query construction functions [] module Query = - - /// Combine a query (SELECT, UPDATE, etc.) and a WHERE clause + + /// Combine a query (SELECT, UPDATE, etc.) and a WHERE clause /// The first part of the statement - /// The WHERE clause for the statement - /// The two parts of the query combined with WHERE + /// The WHERE clause for the statement + /// The two parts of the query combined with WHERE [] let statementWhere statement where = $"%s{statement} WHERE %s{where}" - + /// Queries to define tables and indexes module Definition = - + /// SQL statement to create a document table /// The name of the table to create (may include schema) - /// The type of data for the column (JSON, JSONB, etc.) + /// The type of data for the column (JSON, JSONB, etc.) /// A query to create a document table [] let ensureTableFor name dataType = $"CREATE TABLE IF NOT EXISTS %s{name} (data %s{dataType} NOT NULL)" - + /// Split a schema and table name let private splitSchemaAndTable (tableName: string) = let parts = tableName.Split '.' if Array.length parts = 1 then "", tableName else parts[0], parts[1] - + /// SQL statement to create an index on one or more fields in a JSON document /// The table on which an index should be created (may include schema) /// The name of the index to be created @@ -537,7 +537,7 @@ module Query = [] let ensureKey tableName dialect = (ensureIndexOn tableName "key" [ Configuration.idField () ] dialect).Replace("INDEX", "UNIQUE INDEX") - + /// Query to insert a document /// The table into which to insert (may include schema) /// A query to insert a document @@ -554,48 +554,48 @@ module Query = let save tableName = sprintf "INSERT INTO %s VALUES (@data) ON CONFLICT ((data->>'%s')) DO UPDATE SET data = EXCLUDED.data" - tableName (Configuration.idField ()) - + tableName (Configuration.idField ()) + /// Query to count documents in a table /// The table in which to count documents (may include schema) /// A query to count documents - /// This query has no WHERE clause + /// This query has no WHERE clause [] let count tableName = $"SELECT COUNT(*) AS it FROM %s{tableName}" - + /// Query to check for document existence in a table /// The table in which existence should be checked (may include schema) - /// The WHERE clause with the existence criteria + /// The WHERE clause with the existence criteria /// A query to check document existence [] let exists tableName where = $"SELECT EXISTS (SELECT 1 FROM %s{tableName} WHERE %s{where}) AS it" - + /// Query to select documents from a table /// The table from which documents should be found (may include schema) /// A query to retrieve documents - /// This query has no WHERE clause + /// This query has no WHERE clause [] let find tableName = $"SELECT data FROM %s{tableName}" - + /// Query to update (replace) a document /// The table in which documents should be replaced (may include schema) /// A query to update documents - /// This query has no WHERE clause + /// This query has no WHERE clause [] let update tableName = $"UPDATE %s{tableName} SET data = @data" - + /// Query to delete documents from a table /// The table in which documents should be deleted (may include schema) /// A query to delete documents - /// This query has no WHERE clause + /// This query has no WHERE clause [] let delete tableName = $"DELETE FROM %s{tableName}" - + /// Create a SELECT clause to retrieve the document data from the given table /// The table from which documents should be found (may include schema) /// A query to retrieve documents @@ -603,11 +603,11 @@ module Query = [] let selectFromTable tableName = find tableName - - /// Create an ORDER BY clause for the given fields + + /// Create an ORDER BY clause for the given fields /// One or more fields by which to order /// The SQL dialect for the generated clause - /// An ORDER BY clause for the given fields + /// An ORDER BY clause for the given fields [] let orderBy fields dialect = if Seq.isEmpty fields then "" diff --git a/src/Tests.CSharp/CommonCSharpTests.cs b/src/Tests.CSharp/CommonCSharpTests.cs index 911b732..90cbcba 100644 --- a/src/Tests.CSharp/CommonCSharpTests.cs +++ b/src/Tests.CSharp/CommonCSharpTests.cs @@ -16,7 +16,7 @@ internal class TestSerializer : IDocumentSerializer } /// -/// C# Tests for common functionality in BitBadger.Documents +/// C# Tests for common functionality in BitBadger.Documents /// public static class CommonCSharpTests { @@ -417,7 +417,7 @@ public static class CommonCSharpTests }) ]) ]); - + /// /// Unit tests for the Configuration static class /// @@ -647,7 +647,7 @@ public static class CommonCSharpTests }) ]) ]); - + /// /// Unit tests /// diff --git a/src/Tests.CSharp/PostgresCSharpTests.cs b/src/Tests.CSharp/PostgresCSharpTests.cs index df8ab6d..b0b3393 100644 --- a/src/Tests.CSharp/PostgresCSharpTests.cs +++ b/src/Tests.CSharp/PostgresCSharpTests.cs @@ -344,6 +344,55 @@ public static class PostgresCSharpTests return reader.ReadToEnd(); } + /// Verify a JSON array begins with "[" and ends with "]" + private static void VerifyBeginEnd(string json) + { + Expect.stringStarts(json, "[", "The array should have started with `[`"); + Expect.stringEnds(json, "]", "The array should have ended with `]`"); + } + + /// Verify the presence of a document by its ID + private static void VerifyDocById(string json, string docId) + { + Expect.stringContains(json, $"{{\"Id\": \"{docId}\",", $"Document `{docId}` not present"); + } + + /// Verify the presence of a document by its ID + private static void VerifySingleById(string json, string docId) + { + VerifyBeginEnd(json); + Expect.stringContains(json, $"{{\"Id\": \"{docId}\",", $"Document `{docId}` not present"); + } + + /// Verify the presence of any of the given document IDs in the given JSON + private static void VerifyAnyById(string json, IEnumerable docIds) + { + var theIds = docIds.ToList(); + if (theIds.Any(it => json.Contains($"{{\"Id\": \"{it}\""))) return; + var ids = string.Join(", ", theIds); + Expect.isTrue(false, $"Could not find any of IDs {ids} in {json}"); + } + + /// Verify the JSON for `all` returning data + private static void VerifyAllData(string json) + { + VerifyBeginEnd(json); + IEnumerable ids = ["one", "two", "three", "four", "five"]; + foreach (var docId in ids) VerifyDocById(json, docId); + } + + /// Verify an empty JSON array + private static void VerifyEmpty(string json) + { + Expect.equal(json, "[]", "There should be no documents returned"); + } + + /// Verify an empty JSON document + private static void VerifyNoDoc(string json) + { + Expect.equal(json, "{}", "There should be no document returned"); + } + /// /// Integration tests for the Configuration module of the PostgreSQL library /// @@ -1291,55 +1340,6 @@ public static class PostgresCSharpTests ]) ]); - /// Verify a JSON array begins with "[" and ends with "]" - private static void VerifyBeginEnd(string json) - { - Expect.stringStarts(json, "[", "The array should have started with `[`"); - Expect.stringEnds(json, "]", "The array should have ended with `]`"); - } - - /// Verify the presence of a document by its ID - private static void VerifyDocById(string json, string docId) - { - Expect.stringContains(json, $"{{\"Id\": \"{docId}\",", $"Document `{docId}` not present"); - } - - /// Verify the presence of a document by its ID - private static void VerifySingleById(string json, string docId) - { - VerifyBeginEnd(json); - Expect.stringContains(json, $"{{\"Id\": \"{docId}\",", $"Document `{docId}` not present"); - } - - /// Verify the presence of any of the given document IDs in the given JSON - private static void VerifyAnyById(string json, IEnumerable docIds) - { - var theIds = docIds.ToList(); - if (theIds.Any(it => json.Contains($"{{\"Id\": \"{it}\""))) return; - var ids = string.Join(", ", theIds); - Expect.isTrue(false, $"Could not find any of IDs {ids} in {json}"); - } - - /// Verify the JSON for `all` returning data - private static void VerifyAllData(string json) - { - VerifyBeginEnd(json); - IEnumerable ids = ["one", "two", "three", "four", "five"]; - foreach (var docId in ids) VerifyDocById(json, docId); - } - - /// Verify an empty JSON array - private static void VerifyEmpty(string json) - { - Expect.equal(json, "[]", "There should be no documents returned"); - } - - /// Verify an empty JSON document - private static void VerifyNoDoc(string json) - { - Expect.equal(json, "{}", "There should be no document returned"); - } - /// Verify the JSON for an ordered query private static void VerifyExpectedOrder(string json, string idFirst, string idSecond, string? idThird = null, string? idFourth = null, string? idFifth = null) -- 2.47.2 From 004f91bc014381ed6101798ea3414df1230c9488 Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Tue, 8 Apr 2025 18:53:34 -0400 Subject: [PATCH 15/22] Add shell of docfx files --- .gitignore | 4 ++++ docfx.json | 48 +++++++++++++++++++++++++++++++++++++++++ docs/getting-started.md | 1 + docs/introduction.md | 1 + docs/toc.yml | 4 ++++ index.md | 11 ++++++++++ toc.yml | 4 ++++ 7 files changed, 73 insertions(+) create mode 100644 docfx.json create mode 100644 docs/getting-started.md create mode 100644 docs/introduction.md create mode 100644 docs/toc.yml create mode 100644 index.md create mode 100644 toc.yml diff --git a/.gitignore b/.gitignore index 06998cb..47c72c2 100644 --- a/.gitignore +++ b/.gitignore @@ -400,3 +400,7 @@ FodyWeavers.xsd # Test run files src/*-tests.txt + +# Documentation builds and intermediate files +_site/ +api/ diff --git a/docfx.json b/docfx.json new file mode 100644 index 0000000..a99a721 --- /dev/null +++ b/docfx.json @@ -0,0 +1,48 @@ +{ + "$schema": "https://raw.githubusercontent.com/dotnet/docfx/main/schemas/docfx.schema.json", + "metadata": [ + { + "src": [ + { + "src": "./src", + "files": [ + "Common/bin/Release/net9.0/*.dll", + "Postgres/bin/Release/net9.0/*.dll", + "Sqlite/bin/Release/net9.0/*.dll" + ] + } + ], + "dest": "api" + } + ], + "build": { + "content": [ + { + "files": [ + "api/*.{md,yml}" + ], + "exclude": [ + "_site/**" + ] + } + ], + "resource": [ + { + "files": [ + "images/**" + ] + } + ], + "output": "_site", + "template": [ + "default", + "modern" + ], + "globalMetadata": { + "_appName": "BitBadger.Documents", + "_appTitle": "BitBadger.Documents", + "_enableSearch": true, + "pdf": false + } + } +} \ No newline at end of file diff --git a/docs/getting-started.md b/docs/getting-started.md new file mode 100644 index 0000000..8b3a794 --- /dev/null +++ b/docs/getting-started.md @@ -0,0 +1 @@ +# Getting Started \ No newline at end of file diff --git a/docs/introduction.md b/docs/introduction.md new file mode 100644 index 0000000..f6ecaa6 --- /dev/null +++ b/docs/introduction.md @@ -0,0 +1 @@ +# Introduction \ No newline at end of file diff --git a/docs/toc.yml b/docs/toc.yml new file mode 100644 index 0000000..d7e9ea8 --- /dev/null +++ b/docs/toc.yml @@ -0,0 +1,4 @@ +- name: Introduction + href: introduction.md +- name: Getting Started + href: getting-started.md \ No newline at end of file diff --git a/index.md b/index.md new file mode 100644 index 0000000..f9859f8 --- /dev/null +++ b/index.md @@ -0,0 +1,11 @@ +--- +_layout: landing +--- + +# This is the **HOMEPAGE**. + +Refer to [Markdown](http://daringfireball.net/projects/markdown/) for how to write markdown files. + +## Quick Start Notes: + +1. Add images to the *images* folder if the file is referencing an image. \ No newline at end of file diff --git a/toc.yml b/toc.yml new file mode 100644 index 0000000..061acc6 --- /dev/null +++ b/toc.yml @@ -0,0 +1,4 @@ +- name: Docs + href: docs/ +- name: API + href: api/ \ No newline at end of file -- 2.47.2 From 9ea10cc6dbf8ef7eb612672b9328121d6dbd5321 Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Wed, 9 Apr 2025 07:14:21 -0400 Subject: [PATCH 16/22] WIP on proof of concept w/docfx --- docfx.json | 2 +- docs/basic-usage.md | 148 +++++++++++++++++++++++++++++++ docs/getting-started.md | 188 +++++++++++++++++++++++++++++++++++++++- docs/introduction.md | 1 - docs/toc.yml | 6 +- index.md | 94 +++++++++++++++++++- toc.yml | 2 +- 7 files changed, 430 insertions(+), 11 deletions(-) create mode 100644 docs/basic-usage.md delete mode 100644 docs/introduction.md diff --git a/docfx.json b/docfx.json index a99a721..312c800 100644 --- a/docfx.json +++ b/docfx.json @@ -19,7 +19,7 @@ "content": [ { "files": [ - "api/*.{md,yml}" + "**/*.{md,yml}" ], "exclude": [ "_site/**" diff --git a/docs/basic-usage.md b/docs/basic-usage.md new file mode 100644 index 0000000..55bc9ca --- /dev/null +++ b/docs/basic-usage.md @@ -0,0 +1,148 @@ +# Basic Usage + +_Documentation pages for `BitBadger.Npgsql.Documents` redirect here. This library replaced it as of v3; see project home if this applies to you._ + +## Overview + +There are several categories of operations that can be accomplished against documents. + +- **Count** returns the number of documents matching some criteria +- **Exists** returns true if any documents match the given criteria +- **Insert** adds a new document, failing if the ID field is not unique +- **Save** adds a new document, updating an existing one if the ID is already present ("upsert") +- **Update** updates an existing document, doing nothing if no documents satisfy the criteria +- **Patch** updates a portion of an existing document, doing nothing if no documents satisfy the criteria +- **Find** returns the documents matching some criteria as domain objects +- **Json** returns or writes documents matching some criteria as JSON text +- **RemoveFields** removes fields from documents matching some criteria +- **Delete** removes documents matching some criteria + +`Insert` and `Save` were the only two that don't mention criteria. For the others, "some criteria" can be defined a few different ways: +- **All** references all documents in the table; applies to Count and Find +- **ById** looks for a single document on which to operate; applies to all but Count +- **ByFields** uses JSON field comparisons to select documents for further processing (PostgreSQL will use a numeric comparison if the field value is numeric, or a string comparison otherwise; SQLite will do its usual [best-guess on types][]{target=_blank rel=noopener}); applies to all but Update +- **ByContains** (PostgreSQL only) uses a JSON containment query (the `@>` operator) to find documents where the given sub-document occurs (think of this as an `=` comparison based on one or more properties in the document; looking for hotels with `{ "Country": "USA", "Rating": 4 }` would find all hotels with a rating of 4 in the United States); applies to all but Update +- **ByJsonPath** (PostgreSQL only) uses a JSON patch match query (the `@?` operator) to make specific queries against a document's structure (it also supports more operators than a containment query; to find all hotels rated 4 _or higher_ in the United States, we could query for `"$ ? (@.Country == \"USA\" && @.Rating > 4)"`); applies to all but Update + +Finally, `Find` and `Json` also have `FirstBy*` implementations for all supported criteria types, and `Find*Ordered` implementations to sort the results in the database. + +## Saving Documents + +The library provides three different ways to save data. The first equates to a SQL `INSERT` statement, and adds a single document to the repository. + +```csharp +// C#, All + var room = new Room(/* ... */); + // Parameters are table name and document + await Document.Insert("room", room); +``` + +```fsharp +// F#, All + let room = { Room.empty with (* ... *) } + do! insert "room" room +``` + +The second is `Save`; and inserts the data it if does not exist and replaces the document if it does exist (what some call an "upsert"). It utilizes the `ON CONFLICT` syntax to ensure an atomic statement. Its parameters are the same as those for `Insert`. + +The third equates to a SQL `UPDATE` statement. `Update` applies to a full document and is usually used by ID, while `Patch` is used for partial updates and may be done by field comparison, JSON containment, or JSON Path match. For a few examples, let's begin with a query that may back the "edit hotel" page. This page lets the user update nearly all the details for the hotel, so updating the entire document would be appropriate. + +```csharp +// C#, All + var hotel = await Document.Find.ById("hotel", hotelId); + if (!(hotel is null)) + { + // update hotel properties from the posted form + await Update.ById("hotel", hotel.Id, hotel); + } +``` + +```fsharp +// F#, All + match! Find.byId "hotel" hotelId with + | Some hotel -> + do! Update.byId "hotel" hotel.Id updated + { hotel with (* properties from posted form *) } + | None -> () +``` + +For the next example, suppose we are upgrading our hotel, and need to take rooms 221-240 out of service*. We can utilize a patch via JSON Path** to accomplish this. + +```csharp +// C#, PostgreSQL + await Patch.ByJsonPath("room", + "$ ? (@.HotelId == \"abc\" && (@.RoomNumber >= 221 && @.RoomNumber <= 240)", + new { InService = false }); +``` + +```fsharp +// F#, PostgreSQL + do! Patch.byJsonPath "room" + "$ ? (@.HotelId == \"abc\" && (@.RoomNumber >= 221 && @.RoomNumber <= 240)" + {| InService = false |}; +``` + +_* - we are ignoring the current reservations, end date, etc. This is very naïve example!_ + +\** - Both PostgreSQL and SQLite can also accomplish this using the `Between` comparison and a `ByFields` query: + +```csharp +// C#, Both + await Patch.ByFields("room", FieldMatch.Any, [Field.Between("RoomNumber", 221, 240)], + new { InService = false }); +``` + +```fsharp +// F#, Both + do! Patch.byFields "room" Any [ Field.Between "RoomNumber" 221 240 ] {| InService = false |} +``` + +This could also be done with `All`/`FieldMatch.All` and `GreaterOrEqual` and `LessOrEqual` field comparisons, or even a custom query; these are fully explained in the [Advanced Usage][] section. + +> There is an `Update.ByFunc` variant that takes an ID extraction function run against the document instead of its ID. This is detailed in the [Advanced Usage][] section. + +## Finding Documents as Domain Items + +Functions to find documents start with `Find.`. There are variants to find all documents in a table, find by ID, find by JSON field comparisons, find by JSON containment, or find by JSON Path. The hotel update example above utilizes an ID lookup; the descriptions of JSON containment and JSON Path show examples of the criteria used to retrieve using those techniques. + +`Find` methods and functions are generic; specifying the return type is crucial. Additionally, `ById` will need the type of the key being passed. In C#, `ById` and the `FirstBy*` methods will return `TDoc?`, with the value if it was found or `null` if it was not; `All` and other `By*` methods return `List` (from `System.Collections.Generic`). In F#, `byId` and the `firstBy*` functions will return `'TDoc option`; `all` and other `by*` functions return `'TDoc list`. + +`Find*Ordered` methods and function append an `ORDER BY` clause to the query that will sort the results in the database. These take, as their last parameter, a sequence of `Field` items; a `.Named` method allows for field creation for these names. Within these names, prefixing the name with `n:` will tell PostgreSQL to sort this field numerically rather than alphabetically; it has no effect in SQLite (it does its own [type coercion][best-guess on types]). Adding " DESC" at the end will sort high-to-low instead of low-to-high. + +## Finding Documents as JSON + +All `Find` methods and functions have two corresponding `Json` functions. + +* The first set return the expected document(s) as a `string`, and will always return valid JSON. Single-document queries with nothing found will return `{}`, while zero-to-many queries will return `[]` if no documents match the given criteria. +* The second set are prefixed with `Write`, and take a `StreamWriter` immediately after the table name parameter. These functions write results to the given stream instead of returning them, which can be useful for JSON API scenarios. + +## Deleting Documents + +Functions to delete documents start with `Delete.`. Document deletion is supported by ID, JSON field comparison, JSON containment, or JSON Path match. The pattern is the same as for finding or partially updating. _(There is no library method provided to delete all documents, though deleting by JSON field comparison where a non-existent field is null would accomplish this.)_ + +## Counting Documents + +Functions to count documents start with `Count.`. Documents may be counted by a table in its entirety, by JSON field comparison, by JSON containment, or by JSON Path match. _(Counting by ID is an existence check!)_ + +## Document Existence + +Functions to check for existence start with `Exists.`. Documents may be checked for existence by ID, JSON field comparison, JSON containment, or JSON Path match. + +## What / How Cross-Reference + +The table below shows which commands are available for each access method. (X = supported for both, P = PostgreSQL only) + +Operation | `All` | `ById` | `ByFields` | `ByContains` | `ByJsonPath` | `FirstByFields` | `FirstByContains` | `FirstByJsonPath` +----------|:-----:|:------:|:---------:|:------------:|:------------:|:--------------:|:-----------------:|:----------------:| +`Count` | X | | X | P | P | +`Exists` | | X | X | P | P | +`Find` | X | X | X | P | P | X | P | P | +`Patch` | | X | X | P | P | +`RemoveFields` | | X | X | P | P | +`Delete` | | X | X | P | P | + +`Insert`, `Save`, and `Update.*` operate on single documents. + +[best-guess on types]: https://sqlite.org/datatype3.html "Datatypes in SQLite • SQLite" +[JSON Path]: https://www.postgresql.org/docs/15/functions-json.html#FUNCTIONS-SQLJSON-PATH "JSON Functions and Operators • PostgreSQL Documentation" +[Advanced Usage]: /open-source/relational-documents/dotnet/advanced-usage.html "Advanced Usage • BitBadger.Documents • Bit Badger Solutions" diff --git a/docs/getting-started.md b/docs/getting-started.md index 8b3a794..b8acfa9 100644 --- a/docs/getting-started.md +++ b/docs/getting-started.md @@ -1 +1,187 @@ -# Getting Started \ No newline at end of file +# Getting Started +## Overview + +Each library has three different ways to execute commands: +- Functions/methods that have no connection parameter at all; for these, each call obtains a new connection. _(Connection pooling greatly reduced this overhead and churn on the database)_ +- Functions/methods that take a connection as the last parameter; these use the given connection to execute the commands. +- Extensions on the `NpgsqlConnection` or `SqliteConnection` type (native for both C# and F#); these are the same as the prior ones, and the names follow a similar pattern (ex. `Count.All()` is exposed as `conn.CountAll()`). + +This provides flexibility in how connections are managed. If your application does not care about it, configuring the library is all that is required. If your application generally does not care, but needs a connection on occasion, one can be obtained from the library and used as required. If you are developing a web application, and want to use one connection per request, you can register the library's connection functions as a factory, and have that connection injected. We will cover the how-to below for each scenario, but it is worth considering before getting started. + +> A note on functions: the F# functions use `camelCase`, while C# calls use `PascalCase`. To cut down on the noise, this documentation will generally use the C# `Count.All` form; know that this is `Count.all` for F#, `conn.CountAll()` for the C# extension method, and `conn.countAll` for the F# extension. + +## Namespaces + +### C# + +```csharp +using BitBadger.Documents; +using BitBadger.Documents.[Postgres|Sqlite]; +``` + +### F# + +```fsharp +open BitBadger.Documents +open BitBadger.Documents.[Postgres|Sqlite] +``` + +For F#, this order is significant; both namespaces have modules that share names, and this order will control which one shadows the other. + +## Configuring the Connection + +### The Connection String + +Both PostgreSQL and SQLite use the standard ADO.NET connection string format ([`Npgsql` docs][], [`Microsoft.Data.Sqlite` docs][]). The usual location for these is an `appsettings.json` file, which is then parsed into an `IConfiguration` instance. For SQLite, all the library needs is a connection string: + +```csharp +// C#, SQLite +// ... + var config = ...; // parsed IConfiguration + Sqlite.Configuration.UseConnectionString(config.GetConnectionString("SQLite")); +// ... +``` + +```fsharp +// F#, SQLite +// ... + let config = ...; // parsed IConfiguration + Configuration.useConnectionString (config.GetConnectionString("SQLite")) +// ... +``` + +For PostgreSQL, the library needs an `NpgsqlDataSource` instead. There is a builder that takes a connection string and creates it, so it still is not a lot of code: _(although this implements `IDisposable`, do not declare it with `using` or `use`; the library handles disposal if required)_ + +```csharp +// C#, PostgreSQL +// ... + var config = ...; // parsed IConfiguration + var dataSource = new NpgsqlDataSourceBuilder(config.GetConnectionString("Postgres")).Build(); + Postgres.Configuration.UseDataSource(dataSource); +// ... +``` + +```fsharp +// F#, PostgreSQL +// ... + let config = ...; // parsed IConfiguration + let dataSource = new NpgsqlDataSourceBuilder(config.GetConnectionString("Postgres")).Build() + Configuration.useDataSource dataSource +// ... +``` + +### The Connection + +- If the application does not care to control the connection, use the methods/functions that do not require one. +- To retrieve an occasional connection (possibly to do multiple updates in a transaction), the `Configuration` static class/module for each implementation has a way. (For both of these, define the result with `using` or `use` so that they are disposed properly.) + - For PostgreSQL, the `DataSource()` method returns the configured `NpgsqlDataSource` instance; from this, `OpenConnection[Async]()` can be used to obtain a connection. + - For SQLite, the `DbConn()` method returns a new, open `SqliteConnection`. +- To use a connection per request in a web application scenario, register it with DI. + +```csharp +// C#, PostgreSQL + builder.Services.AddScoped(svcProvider => + Postgres.Configuration.DataSource().OpenConnection()); +// C#, SQLite + builder.Services.AddScoped(svcProvider => Sqlite.Configuration.DbConn()); +``` + +```fsharp +// F#, PostgreSQL + let _ = builder.Services.AddScoped Configuration.dataSource().OpenConnection()) +// F#, SQLite + let _ = builder.Services.AddScoped(fun sp -> Configuration.dbConn ()) +``` + +After registering, this connection will be available on the request context and can be injected in the constructor for things like Razor Pages or MVC Controllers. + +## Configuring Document IDs + +### Field Name + +A common .NET pattern when naming unique identifiers for entities / documents / etc. is the name `Id`. By default, this library assumes that this field is the identifier for your documents. If your code follows this pattern, you will be happy with the default behavior. If you use a different property, or [implement a custom serializer][ser] to modify the JSON representation of your documents' IDs, though, you will need to configure that field name before you begin calling other functions or methods. A great spot for this is just after you configure the connection string or data source (above). If you have decided that the field "Name" is the unique identifier for your documents, your setup would look something like... + +```csharp +// C#, All +Configuration.UseIdField("Name"); +``` + +```fsharp +// F#, All +Configuration.useIdField "Name" +``` + +Setting this will make `EnsureTable` create the unique index on that field when it creates a table, and will make all the `ById` functions and methods look for `data->>'Name'` instead of `data->>'Id'`. JSON is case-sensitive, so if the JSON is camel-cased, this should be configured to be `id` instead of `Id` (or `name` to follow the example above). + +### Generation Strategy + +The library can also generate IDs if they are missing. There are three different types of IDs, and each case of the `AutoId` enumeration/discriminated union can be passed to `Configuration.UseAutoIdStrategy()` to configure the library. + +- `Number` generates a "max ID plus 1" query based on the current values of the table. +- `Guid` generates a 32-character string from a Globally Unique Identifier (GUID), lowercase with no dashes. +- `RandomString` generates random bytes and converts them to a lowercase hexadecimal string. By default, the string is 16 characters, but can be changed via `Configuration.UseIdStringLength()`. _(You can also use `AutoId.GenerateRandomString(length)` to generate these strings for other purposes; they make good salts, transient keys, etc.)_ + +All of these are off by default (the `Disabled` case). Even when ID generation is configured, though, only IDs of 0 (for `Number`) or empty strings (for `Guid` and `RandomString`) will be generated. IDs are only generated on `Insert`. + +> Numeric IDs are a one-time decision. In PostgreSQL, once a document has a non-numeric ID, attempts to insert an automatic number will fail. One could switch from numbers to strings, and the IDs would be treated as such (`"33"` instead of `33`, for example). SQLite does a best-guess typing of columns, but once a string ID is there, the "max + 1" algorithm will not return the expected results. + +## Ensuring Tables and Indexes Exist + +Both PostgreSQL and SQLite store data in tables and can utilize indexes to retrieve that data efficiently. Each application will need to determine the tables and indexes it expects. + +To discover these concepts, let's consider a naive example of a hotel chain; they have several hotels, and each hotel has several rooms. While each hotel could have its rooms as part of a `Hotel` document, there would likely be a lot of contention when concurrent updates for rooms, so we will put rooms in their own table. The hotel will store attributes like name, address, etc.; while each room will have the hotel's ID (named `Id`), along with things like room number, floor, and a list of date ranges where the room is not available. (This could be for customer reservation, maintenance, etc.) + +_(Note that all "ensure" methods/functions below use the `IF NOT EXISTS` clause; they are safe to run each time the application starts up, and will do nothing if the tables or indexes already exist.)_ + +### PostgreSQL + +We have a few options when it comes to indexing our documents. We can index a specific JSON field; each table's primary key is implemented as a unique index on the configured ID field. We can also use a GIN index to index the entire document, and that index can even be [optimized for a subset of JSON Path operators][json-index]. + +Let's create a general-purpose index on hotels, a "HotelId" index on rooms, and an optimized document index on rooms. + +```csharp +// C#, Postgresql + await Definition.EnsureTable("hotel"); + await Definition.EnsureDocumentIndex("hotel", DocumentIndex.Full); + await Definition.EnsureTable("room"); + // parameters are table name, index name, and fields to be indexed + await Definition.EnsureFieldIndex("room", "hotel_id", new[] { "HotelId" }); + await Definition.EnsureDocumentIndex("room", DocumentIndex.Optimized); +``` + +```fsharp +// F#, PostgreSQL + do! Definition.ensureTable "hotel" + do! Definition.ensureDocumentIndex "hotel" Full + do! Definition.ensureTable "room" + do! Definition.ensureFieldIndex "room" "hotel_id" [ "HotelId" ] + do! Definition.ensureDocumentIndex "room" Optimized +``` + +### SQLite + +For SQLite, the only option for JSON indexes (outside some quite complex techniques) are indexes on fields. Just as traditional relational indexes, these fields can be specified in expected query order. In our example, if we indexed our rooms on hotel ID and room number, it could also be used for efficient retrieval just by hotel ID. + +Let's create hotel and room tables, then index rooms by hotel ID and room number. + +```csharp +// C#, SQLite + await Definition.EnsureTable("hotel"); + await Definition.EnsureTable("room"); + await Definition.EnsureIndex("room", "hotel_and_nbr", new[] { "HotelId", "RoomNumber" }); +``` + +```fsharp +// F# + do! Definition.ensureTable "hotel" + do! Definition.ensureTable "room" + do! Definition.ensureIndex "room" "hotel_and_nbr", [ "HotelId"; "RoomNumber" ] +``` + +Now that we have tables, let's [use them][]! + +[`Npgsql` docs]: https://www.npgsql.org/doc/connection-string-parameters "Connection String Parameter • Npgsql" +[`Microsoft.Data.Sqlite` docs]: https://learn.microsoft.com/en-us/dotnet/standard/data/sqlite/connection-strings "Connection Strings • Microsoft.Data.Sqlite • Microsoft Learn" +[ser]: ./advanced/custom-serialization.html "Advanced Usage: Custom Serialization • BitBadger.Documents" +[json-index]: https://www.postgresql.org/docs/current/datatype-json.html#JSON-INDEXING +[use them]: ./basic-usage.html "Basic Usage • BitBadger.Documents" diff --git a/docs/introduction.md b/docs/introduction.md deleted file mode 100644 index f6ecaa6..0000000 --- a/docs/introduction.md +++ /dev/null @@ -1 +0,0 @@ -# Introduction \ No newline at end of file diff --git a/docs/toc.yml b/docs/toc.yml index d7e9ea8..016a104 100644 --- a/docs/toc.yml +++ b/docs/toc.yml @@ -1,4 +1,4 @@ -- name: Introduction - href: introduction.md - name: Getting Started - href: getting-started.md \ No newline at end of file + href: getting-started.md +- name: Basic Usage + href: basic-usage.md \ No newline at end of file diff --git a/index.md b/index.md index f9859f8..fbe5dcd 100644 --- a/index.md +++ b/index.md @@ -2,10 +2,96 @@ _layout: landing --- -# This is the **HOMEPAGE**. +BitBadger.Documents provides a lightweight document-style interface over [PostgreSQL][]'s and [SQLite][]'s JSON storage capabilities, with first-class support for both C# and F# programs. _(It is developed by the community; it is not officially affiliated with either project.)_ -Refer to [Markdown](http://daringfireball.net/projects/markdown/) for how to write markdown files. +> NOTE: v4.1 is the latest version. See below for upgrading. -## Quick Start Notes: +> Expecting `BitBadger.Npgsql.Documents`? This library replaced it as of v3. -1. Add images to the *images* folder if the file is referencing an image. \ No newline at end of file +## Installing + +### PostgreSQL + +[![Nuget (with prereleases)][pkg-shield-pgsql]][pkg-link-pgsql] + +``` +dotnet add package BitBadger.Documents.Postgres +``` + +### SQLite + +[![Nuget (with prereleases)][pkg-shield-sqlite]][pkg-link-sqlite] + +``` +dotnet add package BitBadger.Documents.Sqlite +``` + +## Using + +- **[Getting Started][]** provides an overview of the libraries' functions, how to provide connection details, and how to ensure required tables and indexes exist. +- **[Basic Usage][]** details document-level retrieval, persistence, and deletion. +- **[Advanced Usage][]** demonstrates how to use the building blocks provided by this library to write slightly-more complex queries. + +## Upgrading Major Versions + +* [v3 to v4][v3v4] ([Release][v4rel]) - Multiple field queries, ordering support, and automatic IDs +* [v2 to v3][v2v3] ([Release][v3rel]; upgrade from `BitBadger.Npgsql.Documents`) - Namespace / project change +* [v1 to v2][v1v2] ([Release][v2rel]) - Data storage format change + +## Why Documents? + +Document databases usually store JSON objects (as their "documents") to provide a schemaless persistence of data; they also provide fault-tolerant ways to query that possibly-unstructured data. [MongoDB][] was the pioneer and is the leader in this space, but there are several who provide their own take on it, and their own programming API to come along with it. They also usually have some sort of clustering, replication, and sharding solution that allows them to be scaled out (horizontally) to handle a large amount of traffic. + +As a mature relational database, PostgreSQL has a long history of robust data access from the .NET environment; Npgsql is actively developed, and provides both ADO.NET and EF Core APIs. PostgreSQL also has well-established, battle-tested horizontal scaling options. Additionally, the [Npgsql.FSharp][] project provides a functional API over Npgsql's ADO.NET data access. These three factors make PostgreSQL an excellent choice for document storage, and its relational nature can help in areas where traditional document databases become more complex. + +SQLite is another mature relational database implemented as a single file, with its access run in-process with the calling application. It works very nicely on its own, with caching and write-ahead logging options; a companion project called [Litestream][] allows these files to be continuously streamed elsewhere, providing point-in-time recovery capabilities one would expect from a relational database. Microsoft provides ADO.NET (and EF Core) drivers for SQLite as part of .NET. These combine to make SQLite a compelling choice, and the hybrid relational/document model allows users to select the model of data that fits their model the best. + +In both cases, the document access functions provided by this library are dead-simple. For more complex queries, it also provides the building blocks to construct these with minimal code. + +## Why Not [something else]? + +We are blessed to live in a time where there are a lot of good data storage options that are more than efficient enough for the majority of use cases. Rather than speaking ill of other projects, here is the vision of the benefits these libraries aim to provide: + +### PostgreSQL + +PostgreSQL is the most popular non-WordPress database for good reason. + +- **Quality** - PostgreSQL's reputation is one of a rock-solid, well-maintained, and continually evolving database. +- **Availability** - Nearly every cloud database provider offers PostgreSQL, and for custom servers, it is a package install away from being up and running. +- **Efficiency** - PostgreSQL is very efficient, and its indexing of JSONB allows for quick access via any field in a document. +- **Maintainability** - The terms "separation of concerns" and "locality of behavior" often compete within a code base, and separation of concerns often wins out; cluttering your logic with SQL can be less than optimal. Using this library, though, it may separate the concerns enough that the calls can be placed directly in the regular logic, providing one fewer place that must be looked up when tracing through the code. +- **Simplicity** - SQL is a familiar language; even when writing manual queries against the data store created by this library, everything one knows about SQL applies, with [a few operators added][json-ops]. +- **Reliability** - The library has a full suite of tests against both the C# and F# APIs, [run against every supported PostgreSQL version][tests] to ensure the functionality provided is what is advertised. + +### SQLite + +The [SQLite "About" page][sqlite-about] has a short description of the project and its strengths. Simplicity, flexibility, and a large install base speak for themselves. A lot of people believe they will need a lot of features offered by server-based relational databases, and live with that complexity even when the project is small. A smarter move may be to build with SQLite; if the need arises for something more, the project is very likely a success! + +Many of the benefits listed for PostgreSQL apply here as well, including its test coverage - but SQLite removes the requirement to run it as a server! + +## Support + +Issues can be filed on the project's GitHub repository. + + +[PostgreSQL]: https://www.postgresql.org/ "PostgreSQL" +[SQLite]: https://sqlite.org/ "SQLite" +[pkg-shield-pgsql]: https://img.shields.io/nuget/vpre/BitBadger.Documents.Postgres +[pkg-link-pgsql]: https://www.nuget.org/packages/BitBadger.Documents.Postgres/ "BitBadger.Documents.Postgres • NuGet" +[pkg-shield-sqlite]: https://img.shields.io/nuget/vpre/BitBadger.Documents.Sqlite +[pkg-link-sqlite]: https://www.nuget.org/packages/BitBadger.Documents.Sqlite/ "BitBadger.Documents.Sqlite • NuGet" +[Getting Started]: docs/getting-started.html "Getting Started • BitBadger.Documents" +[Basic Usage]: /open-source/relational-documents/dotnet/basic-usage.html "Basic Usage • BitBadger.Documents • Bit Badger Solutions" +[Advanced Usage]: /open-source/relational-documents/dotnet/advanced-usage.html "Advanced Usage • BitBadger.Documents • Bit Badger Solutions" +[v3v4]: /open-source/relational-documents/dotnet/upgrade-v3-to-v4.html "Upgrade from v3 to v4 • BitBadger.Documents • Bit Badger Solutions" +[v4rel]: https://git.bitbadger.solutions/bit-badger/BitBadger.Documents/releases/tag/v4 "Version 4 • Releases • BitBadger.Documents • Bit Badger Solutions Git" +[v2v3]: /open-source/relational-documents/dotnet/upgrade-v2-to-v3.html "Upgrade from v2 to v3 • BitBadger.Documents • Bit Badger Solutions" +[v3rel]: https://git.bitbadger.solutions/bit-badger/BitBadger.Documents/releases/tag/v3 "Version 3 • Releases • BitBadger.Documents • Bit Badger Solutions Git" +[v1v2]: /open-source/relational-documents/dotnet/upgrade-v1-to-v2.html "Upgrade from v1 to v2 • BitBadger.Npgsql.Documents • Bit Badger Solutions" +[v2rel]: https://github.com/bit-badger/BitBadger.Npgsql.Documents/releases/tag/v2 "Version 2 • Releases • BitBadger.Npgsql.Documents • GitHub" +[MongoDB]: https://www.mongodb.com/ "MongoDB" +[Npgsql.FSharp]: https://zaid-ajaj.github.io/Npgsql.FSharp/#/ "Npgsql.FSharp" +[Litestream]: https://litestream.io/ "Litestream" +[sqlite-about]: https://sqlite.org/about.html "About • SQLite" +[json-ops]: https://www.postgresql.org/docs/15/functions-json.html#FUNCTIONS-JSON-OP-TABLE "JSON Functions and Operators • Documentation • PostgreSQL" +[tests]: https://github.com/bit-badger/BitBadger.Documents/actions/workflows/ci.yml "Actions • BitBadger.Documents • GitHub" diff --git a/toc.yml b/toc.yml index 061acc6..630fb84 100644 --- a/toc.yml +++ b/toc.yml @@ -1,4 +1,4 @@ - name: Docs - href: docs/ + href: docs/getting-started.md - name: API href: api/ \ No newline at end of file -- 2.47.2 From 5eee3ce92a8cab3f8acb1a35c2a9ada311287aee Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Thu, 10 Apr 2025 22:55:01 -0400 Subject: [PATCH 17/22] WIP on StreamWriter to PipeWriter change --- src/Common/Library.fs | 45 ++ src/Postgres/Extensions.fs | 136 ++-- src/Postgres/Functions.fs | 72 +- src/Postgres/Library.fs | 26 +- src/Postgres/WithProps.fs | 120 ++-- src/Sqlite/Extensions.fs | 66 +- src/Sqlite/Functions.fs | 36 +- src/Sqlite/Library.fs | 25 +- src/Sqlite/WithConn.fs | 67 +- src/Tests.CSharp/CommonCSharpTests.cs | 110 ++++ .../PostgresCSharpExtensionTests.cs | 613 +++++++++++++----- src/Tests.CSharp/PostgresCSharpTests.cs | 607 ++++++++++++----- .../SqliteCSharpExtensionTests.cs | 413 ++++++++---- src/Tests.CSharp/SqliteCSharpTests.cs | 413 ++++++++---- src/Tests/CommonTests.fs | 78 +++ src/Tests/PostgresExtensionTests.fs | 434 ++++++++----- src/Tests/PostgresTests.fs | 431 +++++++----- src/Tests/SqliteExtensionTests.fs | 323 +++++---- src/Tests/SqliteTests.fs | 313 +++++---- 19 files changed, 2884 insertions(+), 1444 deletions(-) diff --git a/src/Common/Library.fs b/src/Common/Library.fs index c5668b7..fe55d12 100644 --- a/src/Common/Library.fs +++ b/src/Common/Library.fs @@ -1,6 +1,7 @@ namespace BitBadger.Documents open System.Security.Cryptography +open System.Text /// The types of comparisons available for JSON fields /// @@ -631,3 +632,47 @@ module Query = |> function path -> path + defaultArg direction "") |> String.concat ", " |> function it -> $" ORDER BY {it}" + + +open System.IO.Pipelines + +/// Functions that manipulate PipeWriters +[] +module PipeWriter = + + /// Write a UTF-8 string to this pipe + /// The PipeWriter to which the string should be written + /// The string to be written to the pipe + /// true if the pipe is still open, false if not + [] + let writeString (writer: PipeWriter) (text: string) = backgroundTask { + try + let! writeResult = writer.WriteAsync(Encoding.UTF8.GetBytes text) + let! flushResult = writer.FlushAsync() + return not (writeResult.IsCompleted || flushResult.IsCompleted) + with :? System.ObjectDisposedException -> return false + } + + /// Write an array of strings, abandoning the sequence if the pipe is closed + /// The PipeWriter to which the strings should be written + /// The strings to be written + /// true if the pipe is still open, false if not + [] + let writeStrings writer items = backgroundTask { + let rec writeNext docs idx = backgroundTask { + match items |> Seq.tryItem idx with + | Some item -> + if idx > 0 then + let! _ = writeString writer "," + () + match! writeString writer item with + | true -> return! writeNext docs (idx + 1) + | false -> return false + | None -> return true + } + let! _ = writeString writer "[" + let! isCleanFinish = writeNext items 0 + if isCleanFinish then + let! _ = writeString writer "]" + () + } diff --git a/src/Postgres/Extensions.fs b/src/Postgres/Extensions.fs index da0d24d..ebd9229 100644 --- a/src/Postgres/Extensions.fs +++ b/src/Postgres/Extensions.fs @@ -26,10 +26,10 @@ module Extensions = member conn.customJsonArray query parameters mapFunc = Custom.jsonArray query parameters mapFunc (Sql.existingConnection conn) - /// Execute a query, writing its results to the given StreamWriter + /// Execute a query, writing its results to the given PipeWriter /// The query to retrieve the results /// Parameters to use for the query - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The mapping function to extract the document member conn.writeCustomJsonArray query parameters writer mapFunc = Custom.writeJsonArray query parameters writer mapFunc (Sql.existingConnection conn) @@ -298,9 +298,9 @@ module Extensions = member conn.jsonAll tableName = Json.all tableName (Sql.existingConnection conn) - /// Write all documents in the given table to the given StreamWriter + /// Write all documents in the given table to the given PipeWriter /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written member conn.writeJsonAll tableName writer = Json.writeAll tableName writer (Sql.existingConnection conn) @@ -314,11 +314,11 @@ module Extensions = Json.allOrdered tableName orderFields (Sql.existingConnection conn) /// - /// Write all documents in the given table to the given StreamWriter, ordered by the given fields in the + /// Write all documents in the given table to the given PipeWriter, ordered by the given fields in the /// document /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Fields by which the results should be ordered member conn.writeJsonAllOrdered tableName writer orderFields = Json.writeAllOrdered tableName writer orderFields (Sql.existingConnection conn) @@ -330,9 +330,9 @@ module Extensions = member conn.jsonById<'TKey> tableName (docId: 'TKey) = Json.byId tableName docId (Sql.existingConnection conn) - /// Write a JSON document to the given StreamWriter by its ID + /// Write a JSON document to the given PipeWriter by its ID /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The ID of the document to retrieve member conn.writeJsonById<'TKey> tableName writer (docId: 'TKey) = Json.writeById tableName writer docId (Sql.existingConnection conn) @@ -346,11 +346,11 @@ module Extensions = Json.byFields tableName howMatched fields (Sql.existingConnection conn) /// - /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, /// etc.) /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match member conn.writeJsonByFields tableName writer howMatched fields = @@ -369,11 +369,11 @@ module Extensions = Json.byFieldsOrdered tableName howMatched queryFields orderFields (Sql.existingConnection conn) /// - /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, /// etc.) ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered @@ -388,10 +388,10 @@ module Extensions = Json.byContains tableName criteria (Sql.existingConnection conn) /// - /// Write JSON documents to the given StreamWriter matching a JSON containment query (@>) + /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>) /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The document to match with the containment query member conn.writeJsonByContains tableName writer (criteria: obj) = Json.writeByContains tableName writer criteria (Sql.existingConnection conn) @@ -408,11 +408,11 @@ module Extensions = Json.byContainsOrdered tableName criteria orderFields (Sql.existingConnection conn) /// - /// Write JSON documents to the given StreamWriter matching a JSON containment query (@>) - /// ordered by the given fields in the document + /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>) ordered + /// by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The document to match with the containment query /// Fields by which the results should be ordered member conn.writeJsonByContainsOrdered tableName writer (criteria: obj) orderFields = @@ -426,10 +426,10 @@ module Extensions = Json.byJsonPath tableName jsonPath (Sql.existingConnection conn) /// - /// Write JSON documents to the given StreamWriter matching a JSON Path match query (@?) + /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?) /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The JSON Path expression to match member conn.writeJsonByJsonPath tableName writer jsonPath = Json.writeByJsonPath tableName writer jsonPath (Sql.existingConnection conn) @@ -446,11 +446,11 @@ module Extensions = Json.byJsonPathOrdered tableName jsonPath orderFields (Sql.existingConnection conn) /// - /// Write JSON documents to the given StreamWriter matching a JSON Path match query (@?) ordered - /// by the given fields in the document + /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?) ordered by + /// the given fields in the document /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The JSON Path expression to match /// Fields by which the results should be ordered member conn.writeJsonByJsonPathOrdered tableName writer jsonPath orderFields = @@ -467,11 +467,11 @@ module Extensions = Json.firstByFields tableName howMatched fields (Sql.existingConnection conn) /// - /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons /// (->> =, etc.) /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match member conn.writeJsonFirstByFields tableName writer howMatched fields = @@ -490,11 +490,11 @@ module Extensions = Json.firstByFieldsOrdered tableName howMatched queryFields orderFields (Sql.existingConnection conn) /// - /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons /// (->> =, etc.) ordered by the given fields in the document /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered @@ -510,11 +510,11 @@ module Extensions = Json.firstByContains tableName criteria (Sql.existingConnection conn) /// - /// Write the first JSON document to the given StreamWriter matching a JSON containment query + /// Write the first JSON document to the given PipeWriter matching a JSON containment query /// (@>) /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The document to match with the containment query member conn.writeJsonFirstByContains tableName writer (criteria: obj) = Json.writeFirstByContains tableName writer criteria (Sql.existingConnection conn) @@ -531,11 +531,11 @@ module Extensions = Json.firstByContainsOrdered tableName criteria orderFields (Sql.existingConnection conn) /// - /// Write the first JSON document to the given StreamWriter matching a JSON containment query + /// Write the first JSON document to the given PipeWriter matching a JSON containment query /// (@>) ordered by the given fields in the document /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The document to match with the containment query /// Fields by which the results should be ordered member conn.writeJsonFirstByContainsOrdered tableName writer (criteria: obj) orderFields = @@ -549,10 +549,10 @@ module Extensions = Json.firstByJsonPath tableName jsonPath (Sql.existingConnection conn) /// - /// Write the first JSON document to the given StreamWriter matching a JSON Path match query (@?) + /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?) /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The JSON Path expression to match member conn.writeJsonFirstByJsonPath tableName writer jsonPath = Json.writeFirstByJsonPath tableName writer jsonPath (Sql.existingConnection conn) @@ -569,11 +569,11 @@ module Extensions = Json.firstByJsonPathOrdered tableName jsonPath orderFields (Sql.existingConnection conn) /// - /// Write the first JSON document to the given StreamWriter matching a JSON Path match query (@?) + /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?) /// ordered by the given fields in the document /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The JSON Path expression to match /// Fields by which the results should be ordered member conn.writeJsonFirstByJsonPathOrdered tableName writer jsonPath orderFields = @@ -710,11 +710,11 @@ type NpgsqlConnectionCSharpExtensions = static member inline CustomJsonArray(conn, query, parameters, mapFunc) = Custom.JsonArray(query, parameters, mapFunc, Sql.existingConnection conn) - /// Execute a query, writing its results to the given StreamWriter + /// Execute a query, writing its results to the given PipeWriter /// The NpgsqlConnection on which to run the query /// The query to retrieve the results /// Parameters to use for the query - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The mapping function to extract the document [] static member inline WriteCustomJsonArray(conn, query, parameters, writer, mapFunc) = @@ -1048,10 +1048,10 @@ type NpgsqlConnectionCSharpExtensions = static member inline JsonAll(conn, tableName) = Json.all tableName (Sql.existingConnection conn) - /// Write all documents in the given table to the given StreamWriter + /// Write all documents in the given table to the given PipeWriter /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written [] static member inline WriteJsonAll(conn, tableName, writer) = Json.writeAll tableName writer (Sql.existingConnection conn) @@ -1068,12 +1068,12 @@ type NpgsqlConnectionCSharpExtensions = Json.allOrdered tableName orderFields (Sql.existingConnection conn) /// - /// Write all documents in the given table to the given StreamWriter, ordered by the given fields in the + /// Write all documents in the given table to the given PipeWriter, ordered by the given fields in the /// document /// /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Fields by which the results should be ordered [] static member inline WriteJsonAllOrdered(conn, tableName, writer, orderFields) = @@ -1088,10 +1088,10 @@ type NpgsqlConnectionCSharpExtensions = static member inline JsonById<'TKey>(conn, tableName, docId: 'TKey) = Json.byId tableName docId (Sql.existingConnection conn) - /// Write a JSON document to the given StreamWriter by its ID + /// Write a JSON document to the given PipeWriter by its ID /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The ID of the document to retrieve [] static member inline WriteJsonById<'TKey>(conn, tableName, writer, docId) = @@ -1108,11 +1108,11 @@ type NpgsqlConnectionCSharpExtensions = Json.byFields tableName howMatched fields (Sql.existingConnection conn) /// - /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) /// /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match [] @@ -1134,12 +1134,12 @@ type NpgsqlConnectionCSharpExtensions = Json.byFieldsOrdered tableName howMatched queryFields orderFields (Sql.existingConnection conn) /// - /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) /// ordered by the given fields in the document /// /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered @@ -1157,11 +1157,11 @@ type NpgsqlConnectionCSharpExtensions = Json.byContains tableName criteria (Sql.existingConnection conn) /// - /// Write JSON documents to the given StreamWriter matching a JSON containment query (@>) + /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>) /// /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The document to match with the containment query [] static member inline WriteJsonByContains(conn, tableName, writer, criteria: obj) = @@ -1181,12 +1181,12 @@ type NpgsqlConnectionCSharpExtensions = Json.byContainsOrdered tableName criteria orderFields (Sql.existingConnection conn) /// - /// Write JSON documents to the given StreamWriter matching a JSON containment query (@>) ordered - /// by the given fields in the document + /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>) ordered by + /// the given fields in the document /// /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The document to match with the containment query /// Fields by which the results should be ordered [] @@ -1203,11 +1203,11 @@ type NpgsqlConnectionCSharpExtensions = Json.byJsonPath tableName jsonPath (Sql.existingConnection conn) /// - /// Write JSON documents to the given StreamWriter matching a JSON Path match query (@?) + /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?) /// /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The JSON Path expression to match [] static member inline WriteJsonByJsonPath(conn, tableName, writer, jsonPath) = @@ -1226,12 +1226,12 @@ type NpgsqlConnectionCSharpExtensions = Json.byJsonPathOrdered tableName jsonPath orderFields (Sql.existingConnection conn) /// - /// Write JSON documents to the given StreamWriter matching a JSON Path match query (@?) ordered by - /// the given fields in the document + /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?) ordered by the + /// given fields in the document /// /// The NpgsqlConnection on which to run the query /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The JSON Path expression to match /// Fields by which the results should be ordered [] @@ -1249,12 +1249,12 @@ type NpgsqlConnectionCSharpExtensions = Json.firstByFields tableName howMatched fields (Sql.existingConnection conn) /// - /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons /// (->> =, etc.) /// /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match [] @@ -1276,12 +1276,12 @@ type NpgsqlConnectionCSharpExtensions = Json.firstByFieldsOrdered tableName howMatched queryFields orderFields (Sql.existingConnection conn) /// - /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons /// (->> =, etc.) ordered by the given fields in the document /// /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered @@ -1300,11 +1300,11 @@ type NpgsqlConnectionCSharpExtensions = Json.firstByContains tableName criteria (Sql.existingConnection conn) /// - /// Write the first JSON document to the given StreamWriter matching a JSON containment query (@>) + /// Write the first JSON document to the given PipeWriter matching a JSON containment query (@>) /// /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The document to match with the containment query [] static member inline WriteJsonFirstByContains(conn, tableName, writer, criteria: obj) = @@ -1324,12 +1324,12 @@ type NpgsqlConnectionCSharpExtensions = Json.firstByContainsOrdered tableName criteria orderFields (Sql.existingConnection conn) /// - /// Write the first JSON document to the given StreamWriter matching a JSON containment query (@>) + /// Write the first JSON document to the given PipeWriter matching a JSON containment query (@>) /// ordered by the given fields in the document /// /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The document to match with the containment query /// Fields by which the results should be ordered [] @@ -1346,11 +1346,11 @@ type NpgsqlConnectionCSharpExtensions = Json.firstByJsonPath tableName jsonPath (Sql.existingConnection conn) /// - /// Write the first JSON document to the given StreamWriter matching a JSON Path match query (@?) + /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?) /// /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The JSON Path expression to match [] static member inline WriteJsonFirstByJsonPath(conn, tableName, writer, jsonPath) = @@ -1370,12 +1370,12 @@ type NpgsqlConnectionCSharpExtensions = Json.firstByJsonPathOrdered tableName jsonPath orderFields (Sql.existingConnection conn) /// - /// Write the first JSON document to the given StreamWriter matching a JSON Path match query (@?) + /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?) /// ordered by the given fields in the document /// /// The NpgsqlConnection on which to run the query /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The JSON Path expression to match /// Fields by which the results should be ordered [] diff --git a/src/Postgres/Functions.fs b/src/Postgres/Functions.fs index f9f0eae..cf4d9f6 100644 --- a/src/Postgres/Functions.fs +++ b/src/Postgres/Functions.fs @@ -38,19 +38,19 @@ module Custom = let JsonArray(query, parameters, mapFunc) = WithProps.Custom.JsonArray(query, parameters, mapFunc, fromDataSource ()) - /// Execute a query, writing its results to the given StreamWriter + /// Execute a query, writing its results to the given PipeWriter /// The query to retrieve the results /// Parameters to use for the query - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The mapping function to extract the document [] let writeJsonArray query parameters writer mapFunc = WithProps.Custom.writeJsonArray query parameters writer mapFunc (fromDataSource ()) - /// Execute a query, writing its results to the given StreamWriter + /// Execute a query, writing its results to the given PipeWriter /// The query to retrieve the results /// Parameters to use for the query - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The mapping function to extract the document let WriteJsonArray(query, parameters, writer, mapFunc) = WithProps.Custom.WriteJsonArray(query, parameters, writer, mapFunc, fromDataSource ()) @@ -527,9 +527,9 @@ module Json = let all tableName = WithProps.Json.all tableName (fromDataSource ()) - /// Write all documents in the given table to the given StreamWriter + /// Write all documents in the given table to the given PipeWriter /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written [] let writeAll tableName writer = WithProps.Json.writeAll tableName writer (fromDataSource ()) @@ -545,11 +545,11 @@ module Json = WithProps.Json.allOrdered tableName orderFields (fromDataSource ()) /// - /// Write all documents in the given table to the given StreamWriter, ordered by the given fields in the + /// Write all documents in the given table to the given PipeWriter, ordered by the given fields in the /// document /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Fields by which the results should be ordered [] let writeAllOrdered tableName writer orderFields = @@ -563,9 +563,9 @@ module Json = let byId<'TKey> tableName (docId: 'TKey) = WithProps.Json.byId tableName docId (fromDataSource ()) - /// Write a JSON document to the given StreamWriter by its ID + /// Write a JSON document to the given PipeWriter by its ID /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The ID of the document to retrieve [] let writeById<'TKey> tableName writer (docId: 'TKey) = @@ -581,10 +581,10 @@ module Json = WithProps.Json.byFields tableName howMatched fields (fromDataSource ()) /// - /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match [] @@ -605,11 +605,11 @@ module Json = WithProps.Json.byFieldsOrdered tableName howMatched queryFields orderFields (fromDataSource ()) /// - /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) /// ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered @@ -626,10 +626,10 @@ module Json = WithProps.Json.byContains tableName criteria (fromDataSource ()) /// - /// Write JSON documents to the given StreamWriter matching a JSON containment query (@>) + /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>) /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The document to match with the containment query [] let writeByContains tableName writer (criteria: obj) = @@ -648,11 +648,11 @@ module Json = WithProps.Json.byContainsOrdered tableName criteria orderFields (fromDataSource ()) /// - /// Write JSON documents to the given StreamWriter matching a JSON containment query (@>) ordered - /// by the given fields in the document + /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>) ordered by + /// the given fields in the document /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The document to match with the containment query /// Fields by which the results should be ordered [] @@ -668,10 +668,10 @@ module Json = WithProps.Json.byJsonPath tableName jsonPath (fromDataSource ()) /// - /// Write JSON documents to the given StreamWriter matching a JSON Path match query (@?) + /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?) /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The JSON Path expression to match [] let writeByJsonPath tableName writer jsonPath = @@ -689,11 +689,11 @@ module Json = WithProps.Json.byJsonPathOrdered tableName jsonPath orderFields (fromDataSource ()) /// - /// Write JSON documents to the given StreamWriter matching a JSON Path match query (@?) ordered by - /// the given fields in the document + /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?) ordered by the + /// given fields in the document /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The JSON Path expression to match /// Fields by which the results should be ordered [] @@ -710,11 +710,11 @@ module Json = WithProps.Json.firstByFields tableName howMatched fields (fromDataSource ()) /// - /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons /// (->> =, etc.) /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match [] @@ -735,11 +735,11 @@ module Json = WithProps.Json.firstByFieldsOrdered tableName howMatched queryFields orderFields (fromDataSource ()) /// - /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons /// (->> =, etc.) ordered by the given fields in the document /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered @@ -756,10 +756,10 @@ module Json = WithProps.Json.firstByContains tableName criteria (fromDataSource ()) /// - /// Write the first JSON document to the given StreamWriter matching a JSON containment query (@>) + /// Write the first JSON document to the given PipeWriter matching a JSON containment query (@>) /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The document to match with the containment query [] let writeFirstByContains tableName writer (criteria: obj) = @@ -778,11 +778,11 @@ module Json = WithProps.Json.firstByContainsOrdered tableName criteria orderFields (fromDataSource ()) /// - /// Write the first JSON document to the given StreamWriter matching a JSON containment query (@>) + /// Write the first JSON document to the given PipeWriter matching a JSON containment query (@>) /// ordered by the given fields in the document /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The document to match with the containment query /// Fields by which the results should be ordered [] @@ -798,10 +798,10 @@ module Json = WithProps.Json.firstByJsonPath tableName jsonPath (fromDataSource ()) /// - /// Write the first JSON document to the given StreamWriter matching a JSON Path match query (@?) + /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?) /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The JSON Path expression to match [] let writeFirstByJsonPath tableName writer jsonPath = @@ -820,11 +820,11 @@ module Json = WithProps.Json.firstByJsonPathOrdered tableName jsonPath orderFields (fromDataSource ()) /// - /// Write the first JSON document to the given StreamWriter matching a JSON Path match query (@?) + /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?) /// ordered by the given fields in the document /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The JSON Path expression to match /// Fields by which the results should be ordered [] diff --git a/src/Postgres/Library.fs b/src/Postgres/Library.fs index 7a5520e..230c7f2 100644 --- a/src/Postgres/Library.fs +++ b/src/Postgres/Library.fs @@ -1,9 +1,5 @@ namespace BitBadger.Documents.Postgres -open System.IO -open System.Text -open System.Threading.Tasks - /// The type of index to generate for the document [] type DocumentIndex = @@ -268,6 +264,8 @@ module Query = Query.statementWhere statement (whereJsonPathMatches "@path") +open System.Text + /// Functions for dealing with results [] module Results = @@ -337,24 +335,16 @@ module Results = let ToJsonArray(mapFunc: System.Func, sqlProps) = toJsonArray mapFunc.Invoke sqlProps - /// Write a JSON array of items for the results of a query to the given StreamWriter - /// The StreamWriter to which results should be written + /// Write a JSON array of items for the results of a query to the given PipeWriter + /// The PipeWriter to which results should be written /// The mapping function to extract JSON from the query's results /// The query from which JSON should be extracted [] - let writeJsonArray (writer: StreamWriter) (mapFunc: RowReader -> string) sqlProps = backgroundTask { - let await (it: Task) = it.ConfigureAwait(false).GetAwaiter().GetResult() - do! writer.WriteAsync "[" - let mutable isFirst = true - do! sqlProps - |> Sql.iterAsync (fun it -> - if isFirst then isFirst <- false else await (writer.WriteAsync ",") - (mapFunc >> writer.WriteAsync >> await) it) - do! writer.WriteAsync "]" - } + let writeJsonArray writer (mapFunc: RowReader -> string) sqlProps = + sqlProps |> Sql.toSeq mapFunc |> PipeWriter.writeStrings writer - /// Write a JSON array of items for the results of a query to the given StreamWriter - /// The StreamWriter to which results should be written + /// Write a JSON array of items for the results of a query to the given PipeWriter + /// The PipeWriter to which results should be written /// The mapping function to extract JSON from the query's results /// The query from which JSON should be extracted let WriteJsonArray(writer, mapFunc: System.Func, sqlProps) = diff --git a/src/Postgres/WithProps.fs b/src/Postgres/WithProps.fs index 2a06c96..16901ff 100644 --- a/src/Postgres/WithProps.fs +++ b/src/Postgres/WithProps.fs @@ -1,7 +1,6 @@ /// Versions of queries that accept SqlProps as the last parameter module BitBadger.Documents.Postgres.WithProps -open System.IO open BitBadger.Documents open Npgsql.FSharp @@ -55,10 +54,10 @@ module Custom = let JsonArray(query, parameters, mapFunc: System.Func, sqlProps) = jsonArray query parameters mapFunc.Invoke sqlProps - /// Execute a query, writing its results to the given StreamWriter + /// Execute a query, writing its results to the given PipeWriter /// The query to retrieve the results /// Parameters to use for the query - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The mapping function to extract the document /// The SqlProps to use to execute the query [] @@ -67,10 +66,10 @@ module Custom = |> Sql.parameters (FSharpList.ofSeq parameters) |> writeJsonArray writer mapFunc - /// Execute a query, writing its results to the given StreamWriter + /// Execute a query, writing its results to the given PipeWriter /// The query to retrieve the results /// Parameters to use for the query - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The mapping function to extract the document /// The SqlProps to use to execute the query let WriteJsonArray(query, parameters, writer, mapFunc: System.Func, sqlProps) = @@ -724,9 +723,9 @@ module Json = let all tableName sqlProps = Custom.jsonArray (Query.find tableName) [] jsonFromData sqlProps - /// Write all documents in the given table to the given StreamWriter + /// Write all documents in the given table to the given PipeWriter /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The SqlProps to use to execute the query [] let writeAll tableName writer sqlProps = @@ -744,11 +743,11 @@ module Json = Custom.jsonArray (Query.find tableName + Query.orderBy orderFields PostgreSQL) [] jsonFromData sqlProps /// - /// Write all documents in the given table to the given StreamWriter, ordered by the given fields in the + /// Write all documents in the given table to the given PipeWriter, ordered by the given fields in the /// document /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Fields by which the results should be ordered /// The SqlProps to use to execute the query [] @@ -765,15 +764,16 @@ module Json = let byId<'TKey> tableName (docId: 'TKey) sqlProps = Custom.jsonSingle (Query.byId (Query.find tableName) docId) [ idParam docId ] jsonFromData sqlProps - /// Write a JSON document to the given StreamWriter by its ID + /// Write a JSON document to the given PipeWriter by its ID /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The ID of the document to retrieve /// The SqlProps to use to execute the query [] - let writeById<'TKey> tableName (writer: StreamWriter) (docId: 'TKey) sqlProps = backgroundTask { + let writeById<'TKey> tableName writer (docId: 'TKey) sqlProps = backgroundTask { let! json = byId tableName docId sqlProps - do! writer.WriteAsync json + let! _ = PipeWriter.writeString writer json + () } /// Retrieve JSON documents matching JSON field comparisons (->> =, etc.) @@ -788,10 +788,10 @@ module Json = (Query.byFields (Query.find tableName) howMatched fields) (addFieldParams fields []) jsonFromData sqlProps /// - /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match /// The SqlProps to use to execute the query @@ -823,11 +823,11 @@ module Json = sqlProps /// - /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) /// ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered @@ -852,10 +852,10 @@ module Json = (Query.byContains (Query.find tableName)) [ jsonParam "@criteria" criteria ] jsonFromData sqlProps /// - /// Write JSON documents to the given StreamWriter matching a JSON containment query (@>) + /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>) /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The document to match with the containment query /// The SqlProps to use to execute the query [] @@ -881,11 +881,11 @@ module Json = sqlProps /// - /// Write JSON documents to the given StreamWriter matching a JSON containment query (@>) ordered - /// by the given fields in the document + /// Write JSON documents to the given PipeWriter matching a JSON containment query (@>) ordered by + /// the given fields in the document /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The document to match with the containment query /// Fields by which the results should be ordered /// The SqlProps to use to execute the query @@ -909,10 +909,10 @@ module Json = (Query.byPathMatch (Query.find tableName)) [ "@path", Sql.string jsonPath ] jsonFromData sqlProps /// - /// Write JSON documents to the given StreamWriter matching a JSON Path match query (@?) + /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?) /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The JSON Path expression to match /// The SqlProps to use to execute the query [] @@ -937,11 +937,11 @@ module Json = sqlProps /// - /// Write JSON documents to the given StreamWriter matching a JSON Path match query (@?) ordered by - /// the given fields in the document + /// Write JSON documents to the given PipeWriter matching a JSON Path match query (@?) ordered by the + /// given fields in the document /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The JSON Path expression to match /// Fields by which the results should be ordered /// The SqlProps to use to execute the query @@ -966,18 +966,19 @@ module Json = (Query.byFields (Query.find tableName) howMatched fields) (addFieldParams fields []) jsonFromData sqlProps /// - /// Write the first JSON document to the given StreamWriter matching JSON field comparisons - /// (->> =, etc.) + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons(->> =, + /// etc.) /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match /// The SqlProps to use to execute the query [] - let writeFirstByFields tableName (writer: StreamWriter) howMatched fields sqlProps = backgroundTask { + let writeFirstByFields tableName writer howMatched fields sqlProps = backgroundTask { let! json = firstByFields tableName howMatched fields sqlProps - do! writer.WriteAsync json + let! _ = PipeWriter.writeString writer json + () } /// @@ -999,21 +1000,21 @@ module Json = sqlProps /// - /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons /// (->> =, etc.) ordered by the given fields in the document /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered /// The SqlProps to use to execute the query [] - let writeFirstByFieldsOrdered tableName (writer: StreamWriter) howMatched queryFields orderFields sqlProps = - backgroundTask { - let! json = firstByFieldsOrdered tableName howMatched queryFields orderFields sqlProps - do! writer.WriteAsync json - } + let writeFirstByFieldsOrdered tableName writer howMatched queryFields orderFields sqlProps = backgroundTask { + let! json = firstByFieldsOrdered tableName howMatched queryFields orderFields sqlProps + let! _ = PipeWriter.writeString writer json + () + } /// Retrieve the first JSON document matching a JSON containment query (@>) /// The table from which a document should be retrieved (may include schema) @@ -1026,16 +1027,17 @@ module Json = (Query.byContains (Query.find tableName)) [ jsonParam "@criteria" criteria ] jsonFromData sqlProps /// - /// Write the first JSON document to the given StreamWriter matching a JSON containment query (@>) + /// Write the first JSON document to the given PipeWriter matching a JSON containment query (@>) /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The document to match with the containment query /// The SqlProps to use to execute the query [] - let writeFirstByContains tableName (writer: StreamWriter) (criteria: obj) sqlProps = backgroundTask { + let writeFirstByContains tableName writer (criteria: obj) sqlProps = backgroundTask { let! json = firstByContains tableName criteria sqlProps - do! writer.WriteAsync json + let! _ = PipeWriter.writeString writer json + () } /// @@ -1056,20 +1058,20 @@ module Json = sqlProps /// - /// Write the first JSON document to the given StreamWriter matching a JSON containment query (@>) + /// Write the first JSON document to the given PipeWriter matching a JSON containment query (@>) /// ordered by the given fields in the document /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The document to match with the containment query /// Fields by which the results should be ordered /// The SqlProps to use to execute the query [] - let writeFirstByContainsOrdered tableName (writer: StreamWriter) (criteria: obj) orderFields sqlProps = - backgroundTask { - let! json = firstByContainsOrdered tableName criteria orderFields sqlProps - do! writer.WriteAsync json - } + let writeFirstByContainsOrdered tableName writer (criteria: obj) orderFields sqlProps = backgroundTask { + let! json = firstByContainsOrdered tableName criteria orderFields sqlProps + let! _ = PipeWriter.writeString writer json + () + } /// Retrieve the first JSON document matching a JSON Path match query (@?) /// The table from which a document should be retrieved (may include schema) @@ -1082,16 +1084,17 @@ module Json = (Query.byPathMatch (Query.find tableName)) [ "@path", Sql.string jsonPath ] jsonFromData sqlProps /// - /// Write the first JSON document to the given StreamWriter matching a JSON Path match query (@?) + /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?) /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The JSON Path expression to match /// The SqlProps to use to execute the query [] - let writeFirstByJsonPath tableName (writer: StreamWriter) jsonPath sqlProps = backgroundTask { + let writeFirstByJsonPath tableName writer jsonPath sqlProps = backgroundTask { let! json = firstByJsonPath tableName jsonPath sqlProps - do! writer.WriteAsync json + let! _ = PipeWriter.writeString writer json + () } /// @@ -1112,18 +1115,19 @@ module Json = sqlProps /// - /// Write the first JSON document to the given StreamWriter matching a JSON Path match query (@?) + /// Write the first JSON document to the given PipeWriter matching a JSON Path match query (@?) /// ordered by the given fields in the document /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The JSON Path expression to match /// Fields by which the results should be ordered /// The SqlProps to use to execute the query [] - let writeFirstByJsonPathOrdered tableName (writer: StreamWriter) jsonPath orderFields sqlProps = backgroundTask { + let writeFirstByJsonPathOrdered tableName writer jsonPath orderFields sqlProps = backgroundTask { let! json = firstByJsonPathOrdered tableName jsonPath orderFields sqlProps - do! writer.WriteAsync json + let! _ = PipeWriter.writeString writer json + () } /// Commands to update documents diff --git a/src/Sqlite/Extensions.fs b/src/Sqlite/Extensions.fs index 30160ae..c6d192d 100644 --- a/src/Sqlite/Extensions.fs +++ b/src/Sqlite/Extensions.fs @@ -25,10 +25,10 @@ module Extensions = member conn.customJsonArray query parameters mapFunc = Custom.jsonArray query parameters mapFunc conn - /// Execute a query, writing its results to the given StreamWriter + /// Execute a query, writing its results to the given PipeWriter /// The query to retrieve the results /// Parameters to use for the query - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The mapping function to extract the document member conn.writeCustomJsonArray query parameters writer mapFunc = Custom.writeJsonArray query parameters writer mapFunc conn @@ -242,46 +242,46 @@ module Extensions = member conn.jsonFirstByFieldsOrdered tableName howMatched queryFields orderFields = Json.firstByFieldsOrdered tableName howMatched queryFields orderFields conn - /// Write all JSON documents in the given table to the given StreamWriter + /// Write all JSON documents in the given table to the given PipeWriter /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written member conn.writeJsonAll tableName writer = Json.writeAll tableName writer conn /// - /// Write all JSON all documents in the given table to the given StreamWriter, ordered by the given - /// fields in the document + /// Write all JSON all documents in the given table to the given PipeWriter, ordered by the given fields + /// in the document /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Fields by which the results should be ordered member conn.writeJsonAllOrdered tableName writer orderFields = Json.writeAllOrdered tableName writer orderFields conn - /// Write a JSON document to the given StreamWriter by its ID + /// Write a JSON document to the given PipeWriter by its ID /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The ID of the document to retrieve member conn.writeJsonById<'TKey> tableName writer (docId: 'TKey) = Json.writeById tableName writer docId conn /// - /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, /// etc.) /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match member conn.writeJsonByFields tableName writer howMatched fields = Json.writeByFields tableName writer howMatched fields conn /// - /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, /// etc.) ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered @@ -289,22 +289,22 @@ module Extensions = Json.writeByFieldsOrdered tableName writer howMatched queryFields orderFields conn /// - /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons /// (->> =, etc.) /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match member conn.writeJsonFirstByFields tableName writer howMatched fields = Json.writeFirstByFields tableName writer howMatched fields conn /// - /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons /// (->> =, etc.) ordered by the given fields in the document /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered @@ -399,11 +399,11 @@ type SqliteConnectionCSharpExtensions = static member inline CustomJsonArray(conn, query, parameters, mapFunc) = Custom.JsonArray(query, parameters, mapFunc, conn) - /// Execute a query, writing its results to the given StreamWriter + /// Execute a query, writing its results to the given PipeWriter /// The SqliteConnection on which to run the query /// The query to retrieve the results /// Parameters to use for the query - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The mapping function to extract the document [] static member inline WriteCustomJsonArray(conn, query, parameters, writer, mapFunc) = @@ -668,41 +668,41 @@ type SqliteConnectionCSharpExtensions = static member inline JsonFirstByFieldsOrdered(conn, tableName, howMatched, queryFields, orderFields) = Json.firstByFieldsOrdered tableName howMatched queryFields orderFields conn - /// Write all JSON documents in the given table to the given StreamWriter + /// Write all JSON documents in the given table to the given PipeWriter /// The SqliteConnection on which to run the query /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written [] static member inline WriteJsonAll(conn, tableName, writer) = Json.writeAll tableName writer conn /// - /// Write all JSON all documents in the given table to the given StreamWriter, ordered by the given fields in + /// Write all JSON all documents in the given table to the given PipeWriter, ordered by the given fields in /// the document /// /// The SqliteConnection on which to run the query /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Fields by which the results should be ordered [] static member inline WriteJsonAllOrdered(conn, tableName, writer, orderFields) = Json.writeAllOrdered tableName writer orderFields conn - /// Write a JSON document to the given StreamWriter by its ID + /// Write a JSON document to the given PipeWriter by its ID /// The SqliteConnection on which to run the query /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The ID of the document to retrieve [] static member inline WriteJsonById<'TKey>(conn, tableName, writer, docId: 'TKey) = Json.writeById tableName writer docId conn /// - /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) /// /// The SqliteConnection on which to run the query /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match [] @@ -710,12 +710,12 @@ type SqliteConnectionCSharpExtensions = Json.writeByFields tableName writer howMatched fields conn /// - /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) /// ordered by the given fields in the document /// /// The SqliteConnection on which to run the query /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered @@ -724,12 +724,12 @@ type SqliteConnectionCSharpExtensions = Json.writeByFieldsOrdered tableName writer howMatched queryFields orderFields conn /// - /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons /// (->> =, etc.) /// /// The SqliteConnection on which to run the query /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match [] @@ -737,12 +737,12 @@ type SqliteConnectionCSharpExtensions = Json.writeFirstByFields tableName writer howMatched fields conn /// - /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons /// (->> =, etc.) ordered by the given fields in the document /// /// The SqliteConnection on which to run the query /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered diff --git a/src/Sqlite/Functions.fs b/src/Sqlite/Functions.fs index 2b0ec65..c147f9a 100644 --- a/src/Sqlite/Functions.fs +++ b/src/Sqlite/Functions.fs @@ -44,20 +44,20 @@ module Custom = use conn = Configuration.dbConn () WithConn.Custom.JsonArray(query, parameters, mapFunc, conn) - /// Execute a query, writing its results to the given StreamWriter + /// Execute a query, writing its results to the given PipeWriter /// The query to retrieve the results /// Parameters to use for the query - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The mapping function to extract the document [] let writeJsonArray query parameters writer mapFunc = use conn = Configuration.dbConn () WithConn.Custom.writeJsonArray query parameters writer mapFunc conn - /// Execute a query, writing its results to the given StreamWriter + /// Execute a query, writing its results to the given PipeWriter /// The query to retrieve the results /// Parameters to use for the query - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The mapping function to extract the document let WriteJsonArray(query, parameters, writer, mapFunc) = use conn = Configuration.dbConn () @@ -446,29 +446,29 @@ module Json = use conn = Configuration.dbConn () WithConn.Json.firstByFieldsOrdered tableName howMatched queryFields orderFields conn - /// Write all JSON documents in the given table to the given StreamWriter + /// Write all JSON documents in the given table to the given PipeWriter /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written [] let writeAll tableName writer = use conn = Configuration.dbConn () WithConn.Json.writeAll tableName writer conn /// - /// Write all JSON all documents in the given table to the given StreamWriter, ordered by the given fields in + /// Write all JSON all documents in the given table to the given PipeWriter, ordered by the given fields in /// the document /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Fields by which the results should be ordered [] let writeAllOrdered tableName writer orderFields = use conn = Configuration.dbConn () WithConn.Json.writeAllOrdered tableName writer orderFields conn - /// Write a JSON document to the given StreamWriter by its ID + /// Write a JSON document to the given PipeWriter by its ID /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The ID of the document to retrieve [] let writeById<'TKey> tableName writer (docId: 'TKey) = @@ -476,10 +476,10 @@ module Json = WithConn.Json.writeById tableName writer docId conn /// - /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match [] @@ -488,11 +488,11 @@ module Json = WithConn.Json.writeByFields tableName writer howMatched fields conn /// - /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) /// ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered @@ -502,11 +502,11 @@ module Json = WithConn.Json.writeByFieldsOrdered tableName writer howMatched queryFields orderFields conn /// - /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons /// (->> =, etc.) /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match [] @@ -515,11 +515,11 @@ module Json = WithConn.Json.writeFirstByFields tableName writer howMatched fields conn /// - /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons /// (->> =, etc.) ordered by the given fields in the document /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered diff --git a/src/Sqlite/Library.fs b/src/Sqlite/Library.fs index 8e74554..0b44915 100644 --- a/src/Sqlite/Library.fs +++ b/src/Sqlite/Library.fs @@ -1,8 +1,5 @@ namespace BitBadger.Documents.Sqlite -open System.IO -open System.Text -open BitBadger.Documents open Microsoft.Data.Sqlite /// Configuration for document handling @@ -34,6 +31,8 @@ module Configuration = | None -> invalidOp "Please provide a connection string before attempting data access" +open BitBadger.Documents + /// Query definitions [] module Query = @@ -183,6 +182,8 @@ module Parameters = Seq.empty +open System.Text + /// Helper functions for handling results [] module Results = @@ -294,22 +295,18 @@ module Results = /// The StreamWriter to which results should be written /// The mapping function to extract JSON from the query's results [] - let writeJsonArray (cmd: SqliteCommand) (writer: StreamWriter) (mapFunc: SqliteDataReader -> string) = - backgroundTask { - use! rdr = cmd.ExecuteReaderAsync() - do! writer.WriteAsync "[" - let mutable isFirst = true - while! rdr.ReadAsync() do - if isFirst then isFirst <- false else do! writer.WriteAsync "," - do! writer.WriteAsync(mapFunc rdr) - do! writer.WriteAsync "]" - } + let writeJsonArray (cmd: SqliteCommand) writer (mapFunc: SqliteDataReader -> string) = backgroundTask { + use! rdr = cmd.ExecuteReaderAsync() + return + seq { while rdr.Read() do yield mapFunc rdr } + |> PipeWriter.writeStrings writer + } /// Write a JSON array of items for the results of a query to the given StreamWriter /// The command to execute /// The StreamWriter to which results should be written /// The mapping function to extract JSON from the query's results - let WriteJsonArray (cmd: SqliteCommand) (writer: StreamWriter) (mapFunc: System.Func) = + let WriteJsonArray (cmd: SqliteCommand) writer (mapFunc: System.Func) = writeJsonArray cmd writer mapFunc.Invoke diff --git a/src/Sqlite/WithConn.fs b/src/Sqlite/WithConn.fs index 1006a3f..d0deeb9 100644 --- a/src/Sqlite/WithConn.fs +++ b/src/Sqlite/WithConn.fs @@ -1,7 +1,6 @@ /// Versions of queries that accept a SqliteConnection as the last parameter module BitBadger.Documents.Sqlite.WithConn -open System.IO open BitBadger.Documents open Microsoft.Data.Sqlite @@ -64,10 +63,10 @@ module Custom = let JsonArray(query, parameters, mapFunc: System.Func, conn) = jsonArray query parameters mapFunc.Invoke conn - /// Execute a query, writing its results to the given StreamWriter + /// Execute a query, writing its results to the given PipeWriter /// The query to retrieve the results /// Parameters to use for the query - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The mapping function to extract the document /// The SqliteConnection to use to execute the query [] @@ -82,10 +81,10 @@ module Custom = cmd.Parameters.AddRange parameters writeJsonArray cmd writer mapFunc - /// Execute a query, writing its results to the given StreamWriter + /// Execute a query, writing its results to the given PipeWriter /// The query to retrieve the results /// Parameters to use for the query - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The mapping function to extract the document /// The SqliteConnection to use to execute the query let WriteJsonArray(query, parameters, writer, mapFunc: System.Func, conn) = @@ -554,42 +553,43 @@ module Json = jsonFromData conn - /// Write all JSON documents in the given table to the given StreamWriter + /// Write all JSON documents in the given table to the given PipeWriter /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The SqliteConnection to use to execute the query [] let writeAll tableName writer conn = Custom.writeJsonArray (Query.find tableName) [] writer jsonFromData conn /// - /// Write all JSON all documents in the given table to the given StreamWriter, ordered by the given fields in + /// Write all JSON all documents in the given table to the given PipeWriter, ordered by the given fields in /// the document /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Fields by which the results should be ordered /// The SqliteConnection to use to execute the query [] let writeAllOrdered tableName writer orderFields conn = Custom.writeJsonArray (Query.find tableName + Query.orderBy orderFields SQLite) [] writer jsonFromData conn - /// Write a JSON document to the given StreamWriter by its ID + /// Write a JSON document to the given PipeWriter by its ID /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// The ID of the document to retrieve /// The SqliteConnection to use to execute the query [] - let writeById<'TKey> tableName (writer: StreamWriter) (docId: 'TKey) conn = backgroundTask { + let writeById<'TKey> tableName writer (docId: 'TKey) conn = backgroundTask { let! json = Custom.jsonSingle (Query.byId (Query.find tableName) docId) [ idParam docId ] jsonFromData conn - do! writer.WriteAsync json + let! _ = PipeWriter.writeString writer json + () } /// - /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match /// The SqliteConnection to use to execute the query @@ -603,11 +603,11 @@ module Json = conn /// - /// Write JSON documents to the given StreamWriter matching JSON field comparisons (->> =, etc.) + /// Write JSON documents to the given PipeWriter matching JSON field comparisons (->> =, etc.) /// ordered by the given fields in the document /// /// The table from which documents should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered @@ -622,43 +622,44 @@ module Json = conn /// - /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons /// (->> =, etc.) /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match /// The SqliteConnection to use to execute the query [] - let writeFirstByFields tableName (writer: StreamWriter) howMatched fields conn = backgroundTask { + let writeFirstByFields tableName writer howMatched fields conn = backgroundTask { let! json = Custom.jsonSingle (Query.byFields (Query.find tableName) howMatched fields) (addFieldParams fields []) jsonFromData conn - do! writer.WriteAsync json + let! _ = PipeWriter.writeString writer json + () } /// - /// Write the first JSON document to the given StreamWriter matching JSON field comparisons + /// Write the first JSON document to the given PipeWriter matching JSON field comparisons /// (->> =, etc.) ordered by the given fields in the document /// /// The table from which a document should be retrieved (may include schema) - /// The StreamWriter to which the results should be written + /// The PipeWriter to which the results should be written /// Whether to match any or all of the field conditions /// The field conditions to match /// Fields by which the results should be ordered /// The SqliteConnection to use to execute the query [] - let writeFirstByFieldsOrdered tableName (writer: StreamWriter) howMatched queryFields orderFields conn = - backgroundTask { - let! json = - Custom.jsonSingle - (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields SQLite) - (addFieldParams queryFields []) - jsonFromData - conn - do! writer.WriteAsync json - } + let writeFirstByFieldsOrdered tableName writer howMatched queryFields orderFields conn = backgroundTask { + let! json = + Custom.jsonSingle + (Query.byFields (Query.find tableName) howMatched queryFields + Query.orderBy orderFields SQLite) + (addFieldParams queryFields []) + jsonFromData + conn + let! _ = PipeWriter.writeString writer json + () + } /// Commands to update documents diff --git a/src/Tests.CSharp/CommonCSharpTests.cs b/src/Tests.CSharp/CommonCSharpTests.cs index 90cbcba..0ed9d67 100644 --- a/src/Tests.CSharp/CommonCSharpTests.cs +++ b/src/Tests.CSharp/CommonCSharpTests.cs @@ -1,3 +1,4 @@ +using System.IO.Pipelines; using Expecto.CSharp; using Expecto; using Microsoft.FSharp.Core; @@ -648,6 +649,114 @@ public static class CommonCSharpTests ]) ]); + private static string StreamText(Stream stream) + { + stream.Position = 0L; + using StreamReader reader = new(stream); + return reader.ReadToEnd(); + } + + /// Unit tests for the PipeWriter module + private static readonly Test PipeWriterTests = TestList("PipeWriterModule", + [ + TestList("WriteString", + [ + TestCase("succeeds when writer is open", async () => + { + await using MemoryStream stream = new(); + var writer = PipeWriter.Create(stream, new StreamPipeWriterOptions(leaveOpen: true)); + try + { + var result = await PipeWriterModule.WriteString(writer, "abc"); + Expect.isTrue(result, "The write operation should have been successful"); + Expect.equal(StreamText(stream), "abc", "The string was not written correctly"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when writer is completed", async () => + { + await using MemoryStream stream = new(); + var writer = PipeWriter.Create(stream, new StreamPipeWriterOptions(leaveOpen: true)); + await writer.CompleteAsync(); + + var result = await PipeWriterModule.WriteString(writer, "abc"); + Expect.isFalse(result, "The write operation should have returned false"); + Expect.equal(StreamText(stream), "", "No text should have been written"); + }) + ]), + TestList("WriteStrings", + [ + TestCase("succeeds with no strings", async () => + { + await using MemoryStream stream = new(); + var writer = PipeWriter.Create(stream, new StreamPipeWriterOptions(leaveOpen: true)); + try + { + await PipeWriterModule.WriteStrings(writer, []); + Expect.equal(StreamText(stream), "[]", "An empty sequence of strings was not written correctly"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds with one strings", async () => + { + await using MemoryStream stream = new(); + var writer = PipeWriter.Create(stream, new StreamPipeWriterOptions(leaveOpen: true)); + try + { + await PipeWriterModule.WriteStrings(writer, ["le-test"]); + Expect.equal(StreamText(stream), "[le-test]", "A sequence of one string was not written correctly"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds with many strings", async () => + { + await using MemoryStream stream = new(); + var writer = PipeWriter.Create(stream, new StreamPipeWriterOptions(leaveOpen: true)); + + try + { + await PipeWriterModule.WriteStrings(writer, ["z", "y", "x", "c", "b", "a"]); + Expect.equal(StreamText(stream), "[z,y,x,c,b,a]", + "A sequence of many strings was not written correctly"); + } + finally + { + await writer.CompleteAsync(); + } + }), + TestCase("succeeds when the writer is completed early", async () => + { + await using MemoryStream stream = new(); + var writer = PipeWriter.Create(stream, new StreamPipeWriterOptions(leaveOpen: true)); + + await PipeWriterModule.WriteStrings(writer, Items()); + Expect.equal(StreamText(stream), "[a,b,c", "The writing should have stopped when the writer completed"); + return; + + IEnumerable Items() + { + yield return "a"; + yield return "b"; + yield return "c"; + writer.Complete(); + yield return "d"; + yield return "e"; + yield return "f"; + } + }) + ]) + ]); + + /// /// Unit tests /// @@ -660,6 +769,7 @@ public static class CommonCSharpTests ParameterNameTests, AutoIdTests, QueryTests, + PipeWriterTests, TestSequenced(ConfigurationTests) ]); } diff --git a/src/Tests.CSharp/PostgresCSharpExtensionTests.cs b/src/Tests.CSharp/PostgresCSharpExtensionTests.cs index c0398ed..d665106 100644 --- a/src/Tests.CSharp/PostgresCSharpExtensionTests.cs +++ b/src/Tests.CSharp/PostgresCSharpExtensionTests.cs @@ -1,3 +1,4 @@ +using System.IO.Pipelines; using Expecto.CSharp; using Expecto; using BitBadger.Documents.Postgres; @@ -31,12 +32,8 @@ public class PostgresCSharpExtensionTests } /// Set up a stream writer for a test - private static StreamWriter WriteStream(Stream stream) - { - StreamWriter writer = new(stream); - writer.AutoFlush = true; - return writer; - } + private static PipeWriter WriteStream(Stream stream) => + PipeWriter.Create(stream, new StreamPipeWriterOptions(leaveOpen: true)); /// Get the text of the given stream private static string StreamText(Stream stream) @@ -54,10 +51,8 @@ public class PostgresCSharpExtensionTests } /// Verify the presence of a document by its ID - private static void VerifyDocById(string json, string docId) - { + private static void VerifyDocById(string json, string docId) => Expect.stringContains(json, $"{{\"Id\": \"{docId}\",", $"Document `{docId}` not present"); - } /// Verify the presence of a document by its ID private static void VerifySingleById(string json, string docId) @@ -84,16 +79,12 @@ public class PostgresCSharpExtensionTests } /// Verify an empty JSON array - private static void VerifyEmpty(string json) - { + private static void VerifyEmpty(string json) => Expect.equal(json, "[]", "There should be no documents returned"); - } /// Verify an empty JSON document - private static void VerifyNoDoc(string json) - { + private static void VerifyNoDoc(string json) => Expect.equal(json, "{}", "There should be no document returned"); - } /// Verify the JSON for an ordered query private static void VerifyExpectedOrder(string json, string idFirst, string idSecond, string? idThird = null, @@ -158,9 +149,8 @@ public class PostgresCSharpExtensionTests var docs = await conn.CustomJsonArray(Query.Find(PostgresDb.TableName), Parameters.None, Results.JsonFromData); - Expect.stringStarts(docs, "[", "The JSON array should have started with `[`"); + VerifyBeginEnd(docs); Expect.hasLength(docs.Split("{\"Id\":"), 6, "There should have been 5 documents returned"); - Expect.stringEnds(docs, "]", "The JSON array should have ended with `[`"); }), TestCase("succeeds when data is not found", async () => { @@ -171,7 +161,7 @@ public class PostgresCSharpExtensionTests var docs = await conn.CustomJsonArray( $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath", [Tuple.Create("@path", Sql.@string("$.NumValue ? (@ > 100)"))], Results.JsonFromData); - Expect.equal(docs, "[]", "There should have been no documents returned"); + VerifyEmpty(docs); }) ]), TestList("WriteJsonArray", @@ -183,14 +173,19 @@ public class PostgresCSharpExtensionTests await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteCustomJsonArray(Query.Find(PostgresDb.TableName), Parameters.None, writer, - Results.JsonFromData); - - var docs = StreamText(stream); - Expect.stringStarts(docs, "[", "The JSON array should have started with `[`"); - Expect.hasLength(docs.Split("{\"Id\":"), 6, "There should have been 5 documents returned"); - Expect.stringEnds(docs, "]", "The JSON array should have ended with `[`"); + var writer = WriteStream(stream); + try + { + await conn.WriteCustomJsonArray(Query.Find(PostgresDb.TableName), Parameters.None, writer, + Results.JsonFromData); + var docs = StreamText(stream); + VerifyBeginEnd(docs); + Expect.hasLength(docs.Split("{\"Id\":"), 6, "There should have been 5 documents returned"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when data is not found", async () => { @@ -199,12 +194,18 @@ public class PostgresCSharpExtensionTests await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteCustomJsonArray( - $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath", - [Tuple.Create("@path", Sql.@string("$.NumValue ? (@ > 100)"))], writer, Results.JsonFromData); - - Expect.equal(StreamText(stream), "[]", "There should have been no documents returned"); + var writer = WriteStream(stream); + try + { + await conn.WriteCustomJsonArray( + $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath", + [Tuple.Create("@path", Sql.@string("$.NumValue ? (@ > 100)"))], writer, Results.JsonFromData); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("CustomSingle", @@ -1331,18 +1332,32 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonAll(PostgresDb.TableName, writer); - VerifyAllData(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonAll(PostgresDb.TableName, writer); + VerifyAllData(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when there is no data", async () => { await using var db = PostgresDb.BuildDb(); await using var conn = MkConn(db); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonAll(PostgresDb.TableName, writer); - VerifyEmpty(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonAll(PostgresDb.TableName, writer); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteJsonAllOrdered", @@ -1353,9 +1368,16 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonAllOrdered(PostgresDb.TableName, writer, [Field.Named("n:NumValue")]); - VerifyExpectedOrder(StreamText(stream), "one", "three", "two", "four", "five"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonAllOrdered(PostgresDb.TableName, writer, [Field.Named("n:NumValue")]); + VerifyExpectedOrder(StreamText(stream), "one", "three", "two", "four", "five"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when ordering numerically descending", async () => { @@ -1363,9 +1385,16 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonAllOrdered(PostgresDb.TableName, writer, [Field.Named("n:NumValue DESC")]); - VerifyExpectedOrder(StreamText(stream), "five", "four", "two", "three", "one"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonAllOrdered(PostgresDb.TableName, writer, [Field.Named("n:NumValue DESC")]); + VerifyExpectedOrder(StreamText(stream), "five", "four", "two", "three", "one"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when ordering alphabetically", async () => { @@ -1373,9 +1402,16 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonAllOrdered(PostgresDb.TableName, writer, [Field.Named("Id DESC")]); - VerifyExpectedOrder(StreamText(stream), "two", "three", "one", "four", "five"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonAllOrdered(PostgresDb.TableName, writer, [Field.Named("Id DESC")]); + VerifyExpectedOrder(StreamText(stream), "two", "three", "one", "four", "five"); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteJsonById", @@ -1387,11 +1423,18 @@ public class PostgresCSharpExtensionTests await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonById(PostgresDb.TableName, writer, "two"); - var json = StreamText(stream); - Expect.stringStarts(json, """{"Id": "two",""", "An incorrect document was returned"); - Expect.stringEnds(json, "}", "JSON should have ended with this document"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonById(PostgresDb.TableName, writer, "two"); + var json = StreamText(stream); + Expect.stringStarts(json, """{"Id": "two",""", "An incorrect document was returned"); + Expect.stringEnds(json, "}", "JSON should have ended with this document"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when a document is not found", async () => { @@ -1399,9 +1442,16 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonById(PostgresDb.TableName, writer, "three hundred eighty-seven"); - VerifyNoDoc(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonById(PostgresDb.TableName, writer, "three hundred eighty-seven"); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteJsonByFields", @@ -1412,10 +1462,17 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByFields(PostgresDb.TableName, writer, FieldMatch.All, - [Field.In("Value", ["purple", "blue"]), Field.Exists("Sub")]); - VerifySingleById(StreamText(stream), "four"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.In("Value", ["purple", "blue"]), Field.Exists("Sub")]); + VerifySingleById(StreamText(stream), "four"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when documents are found using IN with numeric field", async() => { @@ -1423,10 +1480,17 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByFields(PostgresDb.TableName, writer, FieldMatch.All, - [Field.In("NumValue", [2, 4, 6, 8])]); - VerifySingleById(StreamText(stream), "three"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.In("NumValue", [2, 4, 6, 8])]); + VerifySingleById(StreamText(stream), "three"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when documents are not found", async () => { @@ -1434,10 +1498,17 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByFields(PostgresDb.TableName, writer, FieldMatch.All, - [Field.Equal("Value", "mauve"), Field.NotEqual("NumValue", 40)]); - VerifyEmpty(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.Equal("Value", "mauve"), Field.NotEqual("NumValue", 40)]); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds for InArray when matching documents exist", async () => { @@ -1447,13 +1518,20 @@ public class PostgresCSharpExtensionTests foreach (var doc in ArrayDocument.TestDocuments) await conn.Insert(PostgresDb.TableName, doc); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByFields(PostgresDb.TableName, writer, FieldMatch.All, - [Field.InArray("Values", PostgresDb.TableName, ["c"])]); - var json = StreamText(stream); - VerifyBeginEnd(json); - VerifyDocById(json, "first"); - VerifyDocById(json, "second"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", PostgresDb.TableName, ["c"])]); + var json = StreamText(stream); + VerifyBeginEnd(json); + VerifyDocById(json, "first"); + VerifyDocById(json, "second"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds for InArray when no matching documents exist", async () => { @@ -1463,10 +1541,17 @@ public class PostgresCSharpExtensionTests foreach (var doc in ArrayDocument.TestDocuments) await conn.Insert(PostgresDb.TableName, doc); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByFields(PostgresDb.TableName, writer, FieldMatch.All, - [Field.InArray("Values", PostgresDb.TableName, ["j"])]); - VerifyEmpty(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", PostgresDb.TableName, ["j"])]); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteJsonByFieldsOrdered", @@ -1477,10 +1562,17 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.All, - [Field.Equal("Value", "purple")], [Field.Named("Id")]); - VerifyExpectedOrder(StreamText(stream), "five", "four"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.All, + [Field.Equal("Value", "purple")], [Field.Named("Id")]); + VerifyExpectedOrder(StreamText(stream), "five", "four"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when sorting descending", async () => { @@ -1488,10 +1580,17 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.All, - [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]); - VerifyExpectedOrder(StreamText(stream), "four", "five"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.All, + [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]); + VerifyExpectedOrder(StreamText(stream), "four", "five"); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteJsonByContains", @@ -1503,12 +1602,19 @@ public class PostgresCSharpExtensionTests await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByContains(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }); - var json = StreamText(stream); - VerifyBeginEnd(json); - VerifyDocById(json, "two"); - VerifyDocById(json, "four"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByContains(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }); + var json = StreamText(stream); + VerifyBeginEnd(json); + VerifyDocById(json, "two"); + VerifyDocById(json, "four"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when documents are not found", async () => { @@ -1516,9 +1622,16 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByContains(PostgresDb.TableName, writer, new { Value = "mauve" }); - VerifyEmpty(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByContains(PostgresDb.TableName, writer, new { Value = "mauve" }); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteJsonByContainsOrdered", @@ -1530,10 +1643,18 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByContainsOrdered(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }, - [Field.Named("Sub.Bar")]); - VerifyExpectedOrder(StreamText(stream), "two", "four"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByContainsOrdered(PostgresDb.TableName, writer, + new { Sub = new { Foo = "green" } }, + [Field.Named("Sub.Bar")]); + VerifyExpectedOrder(StreamText(stream), "two", "four"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when sorting descending", async () => { @@ -1541,10 +1662,18 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByContainsOrdered(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }, - [Field.Named("Sub.Bar DESC")]); - VerifyExpectedOrder(StreamText(stream), "four", "two"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByContainsOrdered(PostgresDb.TableName, writer, + new { Sub = new { Foo = "green" } }, + [Field.Named("Sub.Bar DESC")]); + VerifyExpectedOrder(StreamText(stream), "four", "two"); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteJsonByJsonPath", @@ -1556,13 +1685,20 @@ public class PostgresCSharpExtensionTests await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByJsonPath(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)"); - var json = StreamText(stream); - VerifyBeginEnd(json); - VerifyDocById(json, "one"); - VerifyDocById(json, "two"); - VerifyDocById(json, "three"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByJsonPath(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)"); + var json = StreamText(stream); + VerifyBeginEnd(json); + VerifyDocById(json, "one"); + VerifyDocById(json, "two"); + VerifyDocById(json, "three"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when documents are not found", async () => { @@ -1570,9 +1706,16 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByJsonPath(PostgresDb.TableName, writer, "$.NumValue ? (@ < 0)"); - VerifyEmpty(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByJsonPath(PostgresDb.TableName, writer, "$.NumValue ? (@ < 0)"); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteJsonByJsonPathOrdered", @@ -1584,10 +1727,17 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByJsonPathOrdered(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)", - [Field.Named("n:NumValue")]); - VerifyExpectedOrder(StreamText(stream), "one", "three", "two"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByJsonPathOrdered(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)", + [Field.Named("n:NumValue")]); + VerifyExpectedOrder(StreamText(stream), "one", "three", "two"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when sorting descending", async () => { @@ -1595,10 +1745,17 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByJsonPathOrdered(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)", - [Field.Named("n:NumValue DESC")]); - VerifyExpectedOrder(StreamText(stream), "two", "three", "one"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByJsonPathOrdered(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)", + [Field.Named("n:NumValue DESC")]); + VerifyExpectedOrder(StreamText(stream), "two", "three", "one"); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteJsonFirstByFields", @@ -1609,10 +1766,17 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, - [Field.Equal("Value", "another")]); - VerifyDocById(StreamText(stream), "two"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "another")]); + VerifyDocById(StreamText(stream), "two"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when multiple documents are found", async () => { @@ -1620,10 +1784,17 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, - [Field.Equal("Value", "purple")]); - VerifyAnyById(StreamText(stream), ["five", "four"]); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "purple")]); + VerifyAnyById(StreamText(stream), ["five", "four"]); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when a document is not found", async () => { @@ -1631,10 +1802,17 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, - [Field.Equal("Value", "absent")]); - VerifyNoDoc(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "absent")]); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteJsonFirstByFieldsOrdered", @@ -1645,10 +1823,17 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonFirstByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.Any, - [Field.Equal("Value", "purple")], [Field.Named("Id")]); - VerifyDocById(StreamText(stream), "five"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "purple")], [Field.Named("Id")]); + VerifyDocById(StreamText(stream), "five"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when sorting descending", async () => { @@ -1656,10 +1841,17 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonFirstByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.Any, - [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]); - VerifyDocById(StreamText(stream), "four"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]); + VerifyDocById(StreamText(stream), "four"); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteJsonFirstByContains", @@ -1670,9 +1862,16 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonFirstByContains(PostgresDb.TableName, writer, new { Value = "another" }); - VerifyDocById(StreamText(stream), "two"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByContains(PostgresDb.TableName, writer, new { Value = "another" }); + VerifyDocById(StreamText(stream), "two"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when multiple documents are found", async () => { @@ -1680,9 +1879,17 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonFirstByContains(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }); - VerifyAnyById(StreamText(stream), ["two", "four"]); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByContains(PostgresDb.TableName, writer, + new { Sub = new { Foo = "green" } }); + VerifyAnyById(StreamText(stream), ["two", "four"]); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when a document is not found", async () => { @@ -1690,9 +1897,16 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonFirstByContains(PostgresDb.TableName, writer, new { Value = "absent" }); - VerifyNoDoc(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByContains(PostgresDb.TableName, writer, new { Value = "absent" }); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteJsonFirstByContainsOrdered", @@ -1703,10 +1917,17 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonFirstByContainsOrdered(PostgresDb.TableName, writer, - new { Sub = new { Foo = "green" } }, [Field.Named("Value")]); - VerifyDocById(StreamText(stream), "two"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByContainsOrdered(PostgresDb.TableName, writer, + new { Sub = new { Foo = "green" } }, [Field.Named("Value")]); + VerifyDocById(StreamText(stream), "two"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when sorting descending", async () => { @@ -1714,10 +1935,17 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonFirstByContainsOrdered(PostgresDb.TableName, writer, - new { Sub = new { Foo = "green" } }, [Field.Named("Value DESC")]); - VerifyDocById(StreamText(stream), "four"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByContainsOrdered(PostgresDb.TableName, writer, + new { Sub = new { Foo = "green" } }, [Field.Named("Value DESC")]); + VerifyDocById(StreamText(stream), "four"); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteJsonFirstByJsonPath", @@ -1728,9 +1956,16 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonFirstByJsonPath(PostgresDb.TableName, writer, """$.Value ? (@ == "FIRST!")"""); - VerifyDocById(StreamText(stream), "one"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByJsonPath(PostgresDb.TableName, writer, """$.Value ? (@ == "FIRST!")"""); + VerifyDocById(StreamText(stream), "one"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when multiple documents are found", async () => { @@ -1738,9 +1973,16 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonFirstByJsonPath(PostgresDb.TableName, writer, """$.Sub.Foo ? (@ == "green")"""); - VerifyAnyById(StreamText(stream), ["two", "four"]); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByJsonPath(PostgresDb.TableName, writer, """$.Sub.Foo ? (@ == "green")"""); + VerifyAnyById(StreamText(stream), ["two", "four"]); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when a document is not found", async () => { @@ -1748,9 +1990,16 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonFirstByJsonPath(PostgresDb.TableName, writer, """$.Id ? (@ == "nope")"""); - VerifyNoDoc(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByJsonPath(PostgresDb.TableName, writer, """$.Id ? (@ == "nope")"""); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteJsonFirstByJsonPathOrdered", @@ -1761,10 +2010,17 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonFirstByJsonPathOrdered(PostgresDb.TableName, writer, - """$.Sub.Foo ? (@ == "green")""", [Field.Named("Sub.Bar")]); - VerifyDocById(StreamText(stream), "two"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByJsonPathOrdered(PostgresDb.TableName, writer, + """$.Sub.Foo ? (@ == "green")""", [Field.Named("Sub.Bar")]); + VerifyDocById(StreamText(stream), "two"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when sorting descending", async () => { @@ -1772,10 +2028,17 @@ public class PostgresCSharpExtensionTests await using var conn = MkConn(db); await LoadDocs(conn); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonFirstByJsonPathOrdered(PostgresDb.TableName, writer, - """$.Sub.Foo ? (@ == "green")""", [Field.Named("Sub.Bar DESC")]); - VerifyDocById(StreamText(stream), "four"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByJsonPathOrdered(PostgresDb.TableName, writer, + """$.Sub.Foo ? (@ == "green")""", [Field.Named("Sub.Bar DESC")]); + VerifyDocById(StreamText(stream), "four"); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("UpdateById", diff --git a/src/Tests.CSharp/PostgresCSharpTests.cs b/src/Tests.CSharp/PostgresCSharpTests.cs index b0b3393..da3a991 100644 --- a/src/Tests.CSharp/PostgresCSharpTests.cs +++ b/src/Tests.CSharp/PostgresCSharpTests.cs @@ -1,3 +1,4 @@ +using System.IO.Pipelines; using Expecto.CSharp; using Expecto; using BitBadger.Documents.Postgres; @@ -329,12 +330,8 @@ public static class PostgresCSharpTests } /// Set up a stream writer for a test - private static StreamWriter WriteStream(Stream stream) - { - StreamWriter writer = new(stream); - writer.AutoFlush = true; - return writer; - } + private static PipeWriter WriteStream(Stream stream) => + PipeWriter.Create(stream, new StreamPipeWriterOptions(leaveOpen: true)); /// Get the text of the given stream private static string StreamText(Stream stream) @@ -352,10 +349,8 @@ public static class PostgresCSharpTests } /// Verify the presence of a document by its ID - private static void VerifyDocById(string json, string docId) - { + private static void VerifyDocById(string json, string docId) => Expect.stringContains(json, $"{{\"Id\": \"{docId}\",", $"Document `{docId}` not present"); - } /// Verify the presence of a document by its ID private static void VerifySingleById(string json, string docId) @@ -382,16 +377,12 @@ public static class PostgresCSharpTests } /// Verify an empty JSON array - private static void VerifyEmpty(string json) - { + private static void VerifyEmpty(string json) => Expect.equal(json, "[]", "There should be no documents returned"); - } /// Verify an empty JSON document - private static void VerifyNoDoc(string json) - { + private static void VerifyNoDoc(string json) => Expect.equal(json, "{}", "There should be no document returned"); - } /// /// Integration tests for the Configuration module of the PostgreSQL library @@ -486,14 +477,20 @@ public static class PostgresCSharpTests await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Custom.WriteJsonArray(Query.Find(PostgresDb.TableName), Parameters.None, writer, - Results.JsonFromData); - - var docs = StreamText(stream); - Expect.stringStarts(docs, "[", "The JSON array should have started with `[`"); - Expect.hasLength(docs.Split("{\"Id\":"), 6, "There should have been 5 documents returned"); - Expect.stringEnds(docs, "]", "The JSON array should have ended with `[`"); + var writer = WriteStream(stream); + try + { + await Custom.WriteJsonArray(Query.Find(PostgresDb.TableName), Parameters.None, writer, + Results.JsonFromData); + var docs = StreamText(stream); + Expect.stringStarts(docs, "[", "The JSON array should have started with `[`"); + Expect.hasLength(docs.Split("{\"Id\":"), 6, "There should have been 5 documents returned"); + Expect.stringEnds(docs, "]", "The JSON array should have ended with `[`"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when data is not found", async () => { @@ -501,11 +498,18 @@ public static class PostgresCSharpTests await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Custom.WriteJsonArray($"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath", - [Tuple.Create("@path", Sql.@string("$.NumValue ? (@ > 100)"))], writer, Results.JsonFromData); - - Expect.equal(StreamText(stream), "[]", "There should have been no documents returned"); + var writer = WriteStream(stream); + try + { + await Custom.WriteJsonArray( + $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath", + [Tuple.Create("@path", Sql.@string("$.NumValue ? (@ > 100)"))], writer, Results.JsonFromData); + Expect.equal(StreamText(stream), "[]", "There should have been no documents returned"); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("Single", @@ -1714,17 +1718,31 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteAll(PostgresDb.TableName, writer); - VerifyAllData(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await Json.WriteAll(PostgresDb.TableName, writer); + VerifyAllData(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when there is no data", async () => { await using var db = PostgresDb.BuildDb(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteAll(PostgresDb.TableName, writer); - VerifyEmpty(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await Json.WriteAll(PostgresDb.TableName, writer); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteAllOrdered", @@ -1734,27 +1752,48 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteAllOrdered(PostgresDb.TableName, writer, [Field.Named("n:NumValue")]); - VerifyExpectedOrder(StreamText(stream), "one", "three", "two", "four", "five"); + var writer = WriteStream(stream); + try + { + await Json.WriteAllOrdered(PostgresDb.TableName, writer, [Field.Named("n:NumValue")]); + VerifyExpectedOrder(StreamText(stream), "one", "three", "two", "four", "five"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when ordering numerically descending", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteAllOrdered(PostgresDb.TableName, writer, [Field.Named("n:NumValue DESC")]); - VerifyExpectedOrder(StreamText(stream), "five", "four", "two", "three", "one"); + var writer = WriteStream(stream); + try + { + await Json.WriteAllOrdered(PostgresDb.TableName, writer, [Field.Named("n:NumValue DESC")]); + VerifyExpectedOrder(StreamText(stream), "five", "four", "two", "three", "one"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when ordering alphabetically", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteAllOrdered(PostgresDb.TableName, writer, [Field.Named("Id DESC")]); - VerifyExpectedOrder(StreamText(stream), "two", "three", "one", "four", "five"); + var writer = WriteStream(stream); + try + { + await Json.WriteAllOrdered(PostgresDb.TableName, writer, [Field.Named("Id DESC")]); + VerifyExpectedOrder(StreamText(stream), "two", "three", "one", "four", "five"); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteById", @@ -1765,20 +1804,34 @@ public static class PostgresCSharpTests await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteById(PostgresDb.TableName, writer, "two"); - var json = StreamText(stream); - Expect.stringStarts(json, """{"Id": "two",""", "An incorrect document was returned"); - Expect.stringEnds(json, "}", "JSON should have ended with this document"); + var writer = WriteStream(stream); + try + { + await Json.WriteById(PostgresDb.TableName, writer, "two"); + var json = StreamText(stream); + Expect.stringStarts(json, """{"Id": "two",""", "An incorrect document was returned"); + Expect.stringEnds(json, "}", "JSON should have ended with this document"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when a document is not found", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteById(PostgresDb.TableName, writer, "three hundred eighty-seven"); - VerifyNoDoc(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await Json.WriteById(PostgresDb.TableName, writer, "three hundred eighty-seven"); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteByFields", @@ -1788,30 +1841,51 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByFields(PostgresDb.TableName, writer, FieldMatch.All, - [Field.In("Value", ["purple", "blue"]), Field.Exists("Sub")]); - VerifySingleById(StreamText(stream), "four"); + var writer = WriteStream(stream); + try + { + await Json.WriteByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.In("Value", ["purple", "blue"]), Field.Exists("Sub")]); + VerifySingleById(StreamText(stream), "four"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when documents are found using IN with numeric field", async() => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByFields(PostgresDb.TableName, writer, FieldMatch.All, - [Field.In("NumValue", [2, 4, 6, 8])]); - VerifySingleById(StreamText(stream), "three"); + var writer = WriteStream(stream); + try + { + await Json.WriteByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.In("NumValue", [2, 4, 6, 8])]); + VerifySingleById(StreamText(stream), "three"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when documents are not found", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByFields(PostgresDb.TableName, writer, FieldMatch.All, - [Field.Equal("Value", "mauve"), Field.NotEqual("NumValue", 40)]); - VerifyEmpty(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await Json.WriteByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.Equal("Value", "mauve"), Field.NotEqual("NumValue", 40)]); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds for InArray when matching documents exist", async () => { @@ -1820,13 +1894,20 @@ public static class PostgresCSharpTests foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(PostgresDb.TableName, doc); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByFields(PostgresDb.TableName, writer, FieldMatch.All, - [Field.InArray("Values", PostgresDb.TableName, ["c"])]); - var json = StreamText(stream); - VerifyBeginEnd(json); - VerifyDocById(json, "first"); - VerifyDocById(json, "second"); + var writer = WriteStream(stream); + try + { + await Json.WriteByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", PostgresDb.TableName, ["c"])]); + var json = StreamText(stream); + VerifyBeginEnd(json); + VerifyDocById(json, "first"); + VerifyDocById(json, "second"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds for InArray when no matching documents exist", async () => { @@ -1834,10 +1915,17 @@ public static class PostgresCSharpTests await Definition.EnsureTable(PostgresDb.TableName); foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(PostgresDb.TableName, doc); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByFields(PostgresDb.TableName, writer, FieldMatch.All, - [Field.InArray("Values", PostgresDb.TableName, ["j"])]); - VerifyEmpty(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await Json.WriteByFields(PostgresDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", PostgresDb.TableName, ["j"])]); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteByFieldsOrdered", @@ -1847,20 +1935,34 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.All, - [Field.Equal("Value", "purple")], [Field.Named("Id")]); - VerifyExpectedOrder(StreamText(stream), "five", "four"); + var writer = WriteStream(stream); + try + { + await Json.WriteByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.All, + [Field.Equal("Value", "purple")], [Field.Named("Id")]); + VerifyExpectedOrder(StreamText(stream), "five", "four"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when sorting descending", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.All, - [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]); - VerifyExpectedOrder(StreamText(stream), "four", "five"); + var writer = WriteStream(stream); + try + { + await Json.WriteByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.All, + [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]); + VerifyExpectedOrder(StreamText(stream), "four", "five"); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteByContains", @@ -1871,21 +1973,35 @@ public static class PostgresCSharpTests await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByContains(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }); - var json = StreamText(stream); - VerifyBeginEnd(json); - VerifyDocById(json, "two"); - VerifyDocById(json, "four"); + var writer = WriteStream(stream); + try + { + await Json.WriteByContains(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }); + var json = StreamText(stream); + VerifyBeginEnd(json); + VerifyDocById(json, "two"); + VerifyDocById(json, "four"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when documents are not found", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByContains(PostgresDb.TableName, writer, new { Value = "mauve" }); - VerifyEmpty(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await Json.WriteByContains(PostgresDb.TableName, writer, new { Value = "mauve" }); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteByContainsOrdered", @@ -1896,20 +2012,34 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByContainsOrdered(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }, - [Field.Named("Sub.Bar")]); - VerifyExpectedOrder(StreamText(stream), "two", "four"); + var writer = WriteStream(stream); + try + { + await Json.WriteByContainsOrdered(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }, + [Field.Named("Sub.Bar")]); + VerifyExpectedOrder(StreamText(stream), "two", "four"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when sorting descending", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByContainsOrdered(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }, - [Field.Named("Sub.Bar DESC")]); - VerifyExpectedOrder(StreamText(stream), "four", "two"); + var writer = WriteStream(stream); + try + { + await Json.WriteByContainsOrdered(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }, + [Field.Named("Sub.Bar DESC")]); + VerifyExpectedOrder(StreamText(stream), "four", "two"); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteByJsonPath", @@ -1920,22 +2050,36 @@ public static class PostgresCSharpTests await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByJsonPath(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)"); - var json = StreamText(stream); - VerifyBeginEnd(json); - VerifyDocById(json, "one"); - VerifyDocById(json, "two"); - VerifyDocById(json, "three"); + var writer = WriteStream(stream); + try + { + await Json.WriteByJsonPath(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)"); + var json = StreamText(stream); + VerifyBeginEnd(json); + VerifyDocById(json, "one"); + VerifyDocById(json, "two"); + VerifyDocById(json, "three"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when documents are not found", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByJsonPath(PostgresDb.TableName, writer, "$.NumValue ? (@ < 0)"); - VerifyEmpty(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await Json.WriteByJsonPath(PostgresDb.TableName, writer, "$.NumValue ? (@ < 0)"); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteByJsonPathOrdered", @@ -1946,20 +2090,34 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByJsonPathOrdered(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)", - [Field.Named("n:NumValue")]); - VerifyExpectedOrder(StreamText(stream), "one", "three", "two"); + var writer = WriteStream(stream); + try + { + await Json.WriteByJsonPathOrdered(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)", + [Field.Named("n:NumValue")]); + VerifyExpectedOrder(StreamText(stream), "one", "three", "two"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when sorting descending", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByJsonPathOrdered(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)", - [Field.Named("n:NumValue DESC")]); - VerifyExpectedOrder(StreamText(stream), "two", "three", "one"); + var writer = WriteStream(stream); + try + { + await Json.WriteByJsonPathOrdered(PostgresDb.TableName, writer, "$.NumValue ? (@ < 15)", + [Field.Named("n:NumValue DESC")]); + VerifyExpectedOrder(StreamText(stream), "two", "three", "one"); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteFirstByFields", @@ -1969,30 +2127,51 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, - [Field.Equal("Value", "another")]); - VerifyDocById(StreamText(stream), "two"); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "another")]); + VerifyDocById(StreamText(stream), "two"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when multiple documents are found", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, - [Field.Equal("Value", "purple")]); - VerifyAnyById(StreamText(stream), ["five", "four"]); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "purple")]); + VerifyAnyById(StreamText(stream), ["five", "four"]); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when a document is not found", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, - [Field.Equal("Value", "absent")]); - VerifyNoDoc(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByFields(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "absent")]); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteFirstByFieldsOrdered", @@ -2002,20 +2181,34 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteFirstByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.Any, - [Field.Equal("Value", "purple")], [Field.Named("Id")]); - VerifyDocById(StreamText(stream), "five"); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "purple")], [Field.Named("Id")]); + VerifyDocById(StreamText(stream), "five"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when sorting descending", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteFirstByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.Any, - [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]); - VerifyDocById(StreamText(stream), "four"); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByFieldsOrdered(PostgresDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "purple")], [Field.Named("Id DESC")]); + VerifyDocById(StreamText(stream), "four"); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteFirstByContains", @@ -2025,27 +2218,48 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteFirstByContains(PostgresDb.TableName, writer, new { Value = "another" }); - VerifyDocById(StreamText(stream), "two"); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByContains(PostgresDb.TableName, writer, new { Value = "another" }); + VerifyDocById(StreamText(stream), "two"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when multiple documents are found", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteFirstByContains(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }); - VerifyAnyById(StreamText(stream), ["two", "four"]); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByContains(PostgresDb.TableName, writer, new { Sub = new { Foo = "green" } }); + VerifyAnyById(StreamText(stream), ["two", "four"]); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when a document is not found", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteFirstByContains(PostgresDb.TableName, writer, new { Value = "absent" }); - VerifyNoDoc(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByContains(PostgresDb.TableName, writer, new { Value = "absent" }); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteFirstByContainsOrdered", @@ -2055,20 +2269,34 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteFirstByContainsOrdered(PostgresDb.TableName, writer, - new { Sub = new { Foo = "green" } }, [Field.Named("Value")]); - VerifyDocById(StreamText(stream), "two"); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByContainsOrdered(PostgresDb.TableName, writer, + new { Sub = new { Foo = "green" } }, [Field.Named("Value")]); + VerifyDocById(StreamText(stream), "two"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when sorting descending", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteFirstByContainsOrdered(PostgresDb.TableName, writer, - new { Sub = new { Foo = "green" } }, [Field.Named("Value DESC")]); - VerifyDocById(StreamText(stream), "four"); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByContainsOrdered(PostgresDb.TableName, writer, + new { Sub = new { Foo = "green" } }, [Field.Named("Value DESC")]); + VerifyDocById(StreamText(stream), "four"); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteFirstByJsonPath", @@ -2078,27 +2306,48 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteFirstByJsonPath(PostgresDb.TableName, writer, """$.Value ? (@ == "FIRST!")"""); - VerifyDocById(StreamText(stream), "one"); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByJsonPath(PostgresDb.TableName, writer, """$.Value ? (@ == "FIRST!")"""); + VerifyDocById(StreamText(stream), "one"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when multiple documents are found", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteFirstByJsonPath(PostgresDb.TableName, writer, """$.Sub.Foo ? (@ == "green")"""); - VerifyAnyById(StreamText(stream), ["two", "four"]); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByJsonPath(PostgresDb.TableName, writer, """$.Sub.Foo ? (@ == "green")"""); + VerifyAnyById(StreamText(stream), ["two", "four"]); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when a document is not found", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteFirstByJsonPath(PostgresDb.TableName, writer, """$.Id ? (@ == "nope")"""); - VerifyNoDoc(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByJsonPath(PostgresDb.TableName, writer, """$.Id ? (@ == "nope")"""); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteFirstByJsonPathOrdered", @@ -2108,20 +2357,36 @@ public static class PostgresCSharpTests await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteFirstByJsonPathOrdered(PostgresDb.TableName, writer, """$.Sub.Foo ? (@ == "green")""", - [Field.Named("Sub.Bar")]); - VerifyDocById(StreamText(stream), "two"); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByJsonPathOrdered(PostgresDb.TableName, writer, + """$.Sub.Foo ? (@ == "green")""", + [Field.Named("Sub.Bar")]); + VerifyDocById(StreamText(stream), "two"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when sorting descending", async () => { await using var db = PostgresDb.BuildDb(); await LoadDocs(); await using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteFirstByJsonPathOrdered(PostgresDb.TableName, writer, """$.Sub.Foo ? (@ == "green")""", - [Field.Named("Sub.Bar DESC")]); - VerifyDocById(StreamText(stream), "four"); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByJsonPathOrdered(PostgresDb.TableName, writer, + """$.Sub.Foo ? (@ == "green")""", + [Field.Named("Sub.Bar DESC")]); + VerifyDocById(StreamText(stream), "four"); + } + finally + { + await writer.CompleteAsync(); + } }) ]) ]); diff --git a/src/Tests.CSharp/SqliteCSharpExtensionTests.cs b/src/Tests.CSharp/SqliteCSharpExtensionTests.cs index 6842b2b..e47aedc 100644 --- a/src/Tests.CSharp/SqliteCSharpExtensionTests.cs +++ b/src/Tests.CSharp/SqliteCSharpExtensionTests.cs @@ -1,3 +1,4 @@ +using System.IO.Pipelines; using Expecto.CSharp; using Expecto; using BitBadger.Documents.Sqlite; @@ -25,24 +26,16 @@ public static class SqliteCSharpExtensionTests } /// Verify an empty JSON array - private static void VerifyEmpty(string json) - { + private static void VerifyEmpty(string json) => Expect.equal(json, "[]", "There should be no documents returned"); - } /// Verify an empty JSON document - private static void VerifyNoDoc(string json) - { + private static void VerifyNoDoc(string json) => Expect.equal(json, "{}", "There should be no document returned"); - } /// Set up a stream writer for a test - private static StreamWriter WriteStream(Stream stream) - { - StreamWriter writer = new(stream); - writer.AutoFlush = true; - return writer; - } + private static PipeWriter WriteStream(Stream stream) => + PipeWriter.Create(stream, new StreamPipeWriterOptions(leaveOpen: true)); /// Get the text of the given stream private static string StreamText(Stream stream) @@ -127,16 +120,22 @@ public static class SqliteCSharpExtensionTests await LoadDocs(conn); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteCustomJsonArray(Query.Find(SqliteDb.TableName), [], writer, Results.JsonFromData); - - var json = StreamText(stream); - VerifyBeginEnd(json); - Expect.stringContains(json, JsonDocument.One, "Document ID `one` should have been found"); - Expect.stringContains(json, JsonDocument.Two,"Document ID `two` should have been found"); - Expect.stringContains(json, JsonDocument.Three, "Document ID `three` should have been found"); - Expect.stringContains(json, JsonDocument.Four, "Document ID `four` should have been found"); - Expect.stringContains(json, JsonDocument.Five, "Document ID `five` should have been found"); + var writer = WriteStream(stream); + try + { + await conn.WriteCustomJsonArray(Query.Find(SqliteDb.TableName), [], writer, Results.JsonFromData); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, JsonDocument.One, "Document ID `one` should have been found"); + Expect.stringContains(json, JsonDocument.Two, "Document ID `two` should have been found"); + Expect.stringContains(json, JsonDocument.Three, "Document ID `three` should have been found"); + Expect.stringContains(json, JsonDocument.Four, "Document ID `four` should have been found"); + Expect.stringContains(json, JsonDocument.Five, "Document ID `five` should have been found"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when data is not found", async () => { @@ -145,12 +144,18 @@ public static class SqliteCSharpExtensionTests await LoadDocs(conn); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteCustomJsonArray( - $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", - [new SqliteParameter("@value", 100)], writer, Results.JsonFromData); - - VerifyEmpty(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await conn.WriteCustomJsonArray( + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", + [new SqliteParameter("@value", 100)], writer, Results.JsonFromData); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("CustomSingle", @@ -828,13 +833,21 @@ public static class SqliteCSharpExtensionTests await conn.Insert(SqliteDb.TableName, new SubDocument { Foo = "five", Bar = "six" }); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonAll(SqliteDb.TableName, writer); - var json = StreamText(stream); - VerifyBeginEnd(json); - Expect.stringContains(json, """{"Foo":"one","Bar":"two"}""", "The first document was not found"); - Expect.stringContains(json, """{"Foo":"three","Bar":"four"}""", "The second document was not found"); - Expect.stringContains(json, """{"Foo":"five","Bar":"six"}""", "The third document was not found"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonAll(SqliteDb.TableName, writer); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, """{"Foo":"one","Bar":"two"}""", "The first document was not found"); + Expect.stringContains(json, """{"Foo":"three","Bar":"four"}""", + "The second document was not found"); + Expect.stringContains(json, """{"Foo":"five","Bar":"six"}""", "The third document was not found"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when there is no data", async () => { @@ -842,9 +855,16 @@ public static class SqliteCSharpExtensionTests await using var conn = Sqlite.Configuration.DbConn(); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonAll(SqliteDb.TableName, writer); - VerifyEmpty(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonAll(SqliteDb.TableName, writer); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteJsonAllOrdered", @@ -856,11 +876,18 @@ public static class SqliteCSharpExtensionTests await LoadDocs(conn); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonAllOrdered(SqliteDb.TableName, writer, [Field.Named("n:NumValue")]); - Expect.equal(StreamText(stream), - $"[{JsonDocument.One},{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.Four},{JsonDocument.Five}]", - "The documents were not ordered correctly"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonAllOrdered(SqliteDb.TableName, writer, [Field.Named("n:NumValue")]); + Expect.equal(StreamText(stream), + $"[{JsonDocument.One},{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.Four},{JsonDocument.Five}]", + "The documents were not ordered correctly"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when ordering numerically descending", async () => { @@ -869,11 +896,18 @@ public static class SqliteCSharpExtensionTests await LoadDocs(conn); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonAllOrdered(SqliteDb.TableName, writer, [Field.Named("n:NumValue DESC")]); - Expect.equal(StreamText(stream), - $"[{JsonDocument.Five},{JsonDocument.Four},{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One}]", - "The documents were not ordered correctly"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonAllOrdered(SqliteDb.TableName, writer, [Field.Named("n:NumValue DESC")]); + Expect.equal(StreamText(stream), + $"[{JsonDocument.Five},{JsonDocument.Four},{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One}]", + "The documents were not ordered correctly"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when ordering alphabetically", async () => { @@ -882,11 +916,18 @@ public static class SqliteCSharpExtensionTests await LoadDocs(conn); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonAllOrdered(SqliteDb.TableName, writer, [Field.Named("Id DESC")]); - Expect.equal(StreamText(stream), - $"[{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Four},{JsonDocument.Five}]", - "The documents were not ordered correctly"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonAllOrdered(SqliteDb.TableName, writer, [Field.Named("Id DESC")]); + Expect.equal(StreamText(stream), + $"[{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Four},{JsonDocument.Five}]", + "The documents were not ordered correctly"); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteJsonById", @@ -898,9 +939,16 @@ public static class SqliteCSharpExtensionTests await LoadDocs(conn); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonById(SqliteDb.TableName, writer, "two"); - Expect.equal(StreamText(stream), JsonDocument.Two, "The incorrect document was returned"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonById(SqliteDb.TableName, writer, "two"); + Expect.equal(StreamText(stream), JsonDocument.Two, "The incorrect document was returned"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when a document is not found", async () => { @@ -909,9 +957,16 @@ public static class SqliteCSharpExtensionTests await LoadDocs(conn); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonById(SqliteDb.TableName, writer, "three hundred eighty-seven"); - VerifyNoDoc(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonById(SqliteDb.TableName, writer, "three hundred eighty-seven"); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteJsonByFields", @@ -923,13 +978,20 @@ public static class SqliteCSharpExtensionTests await LoadDocs(conn); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByFields(SqliteDb.TableName, writer, FieldMatch.Any, - [Field.Greater("NumValue", 15)]); - var json = StreamText(stream); - VerifyBeginEnd(json); - Expect.stringContains(json, JsonDocument.Four, "Document `four` should have been returned"); - Expect.stringContains(json, JsonDocument.Five, "Document `five` should have been returned"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 15)]); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, JsonDocument.Four, "Document `four` should have been returned"); + Expect.stringContains(json, JsonDocument.Five, "Document `five` should have been returned"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when documents are found using IN with numeric field", async () => { @@ -938,11 +1000,18 @@ public static class SqliteCSharpExtensionTests await LoadDocs(conn); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByFields(SqliteDb.TableName, writer, FieldMatch.All, - [Field.In("NumValue", [2, 4, 6, 8])]); - Expect.equal(StreamText(stream), $"[{JsonDocument.Three}]", - "There should have been one document returned"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFields(SqliteDb.TableName, writer, FieldMatch.All, + [Field.In("NumValue", [2, 4, 6, 8])]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Three}]", + "There should have been one document returned"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when documents are not found", async () => { @@ -951,10 +1020,17 @@ public static class SqliteCSharpExtensionTests await LoadDocs(conn); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByFields(SqliteDb.TableName, writer, FieldMatch.Any, - [Field.Greater("NumValue", 100)]); - VerifyEmpty(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 100)]); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds for InArray when matching documents exist", async () => { @@ -964,15 +1040,22 @@ public static class SqliteCSharpExtensionTests foreach (var doc in ArrayDocument.TestDocuments) await conn.Insert(SqliteDb.TableName, doc); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByFields(SqliteDb.TableName, writer, FieldMatch.All, - [Field.InArray("Values", SqliteDb.TableName, ["c"])]); - var json = StreamText(stream); - VerifyBeginEnd(json); - Expect.stringContains(json, """{"Id":"first","Values":["a","b","c"]}""", - "Document `first` should have been returned"); - Expect.stringContains(json, """{"Id":"second","Values":["c","d","e"]}""", - "Document `second` should have been returned"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFields(SqliteDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", SqliteDb.TableName, ["c"])]); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, """{"Id":"first","Values":["a","b","c"]}""", + "Document `first` should have been returned"); + Expect.stringContains(json, """{"Id":"second","Values":["c","d","e"]}""", + "Document `second` should have been returned"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds for InArray when no matching documents exist", async () => { @@ -982,10 +1065,17 @@ public static class SqliteCSharpExtensionTests foreach (var doc in ArrayDocument.TestDocuments) await conn.Insert(SqliteDb.TableName, doc); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByFields(SqliteDb.TableName, writer, FieldMatch.All, - [Field.InArray("Values", SqliteDb.TableName, ["j"])]); - VerifyEmpty(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFields(SqliteDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", SqliteDb.TableName, ["j"])]); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteJsonByFieldsOrdered", @@ -997,11 +1087,18 @@ public static class SqliteCSharpExtensionTests await LoadDocs(conn); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, - [Field.Greater("NumValue", 15)], [Field.Named("Id")]); - Expect.equal(StreamText(stream), $"[{JsonDocument.Five},{JsonDocument.Four}]", - "Incorrect documents were returned"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 15)], [Field.Named("Id")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Five},{JsonDocument.Four}]", + "Incorrect documents were returned"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when sorting descending", async () => { @@ -1010,11 +1107,18 @@ public static class SqliteCSharpExtensionTests await LoadDocs(conn); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, - [Field.Greater("NumValue", 15)], [Field.Named("Id DESC")]); - Expect.equal(StreamText(stream), $"[{JsonDocument.Four},{JsonDocument.Five}]", - "Incorrect documents were returned"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 15)], [Field.Named("Id DESC")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Four},{JsonDocument.Five}]", + "Incorrect documents were returned"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when sorting case-sensitively", async () => { @@ -1023,11 +1127,18 @@ public static class SqliteCSharpExtensionTests await LoadDocs(conn); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.All, - [Field.LessOrEqual("NumValue", 10)], [Field.Named("Value")]); - Expect.equal(StreamText(stream), $"[{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Two}]", - "Documents not ordered correctly"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.All, + [Field.LessOrEqual("NumValue", 10)], [Field.Named("Value")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Two}]", + "Documents not ordered correctly"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when sorting case-insensitively", async () => { @@ -1036,11 +1147,18 @@ public static class SqliteCSharpExtensionTests await LoadDocs(conn); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.All, - [Field.LessOrEqual("NumValue", 10)], [Field.Named("i:Value")]); - Expect.equal(StreamText(stream), $"[{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.One}]", - "Documents not ordered correctly"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.All, + [Field.LessOrEqual("NumValue", 10)], [Field.Named("i:Value")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.One}]", + "Documents not ordered correctly"); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteJsonFirstByFields", @@ -1052,10 +1170,17 @@ public static class SqliteCSharpExtensionTests await LoadDocs(conn); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, - [Field.Equal("Value", "another")]); - Expect.equal(StreamText(stream), JsonDocument.Two, "The incorrect document was returned"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "another")]); + Expect.equal(StreamText(stream), JsonDocument.Two, "The incorrect document was returned"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when multiple documents are found", async () => { @@ -1064,12 +1189,19 @@ public static class SqliteCSharpExtensionTests await LoadDocs(conn); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, - [Field.Equal("Sub.Foo", "green")]); - var json = StreamText(stream); - Expect.notEqual(json, "{}", "There should have been a document returned"); - VerifyAny(json, [JsonDocument.Two, JsonDocument.Four]); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")]); + var json = StreamText(stream); + Expect.notEqual(json, "{}", "There should have been a document returned"); + VerifyAny(json, [JsonDocument.Two, JsonDocument.Four]); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when a document is not found", async () => { @@ -1078,10 +1210,17 @@ public static class SqliteCSharpExtensionTests await LoadDocs(conn); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, - [Field.Equal("Value", "absent")]); - VerifyNoDoc(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "absent")]); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteJsonFirstByFieldsOrdered", @@ -1093,10 +1232,17 @@ public static class SqliteCSharpExtensionTests await LoadDocs(conn); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonFirstByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, - [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar")]); - Expect.equal(StreamText(stream), JsonDocument.Two, "An incorrect document was returned"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar")]); + Expect.equal(StreamText(stream), JsonDocument.Two, "An incorrect document was returned"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when sorting descending", async () => { @@ -1105,10 +1251,17 @@ public static class SqliteCSharpExtensionTests await LoadDocs(conn); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await conn.WriteJsonFirstByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, - [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar DESC")]); - Expect.equal(StreamText(stream), JsonDocument.Four, "An incorrect document was returned"); + var writer = WriteStream(stream); + try + { + await conn.WriteJsonFirstByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar DESC")]); + Expect.equal(StreamText(stream), JsonDocument.Four, "An incorrect document was returned"); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("UpdateById", diff --git a/src/Tests.CSharp/SqliteCSharpTests.cs b/src/Tests.CSharp/SqliteCSharpTests.cs index 5c89aab..c1e10d8 100644 --- a/src/Tests.CSharp/SqliteCSharpTests.cs +++ b/src/Tests.CSharp/SqliteCSharpTests.cs @@ -1,4 +1,5 @@ -using Expecto.CSharp; +using System.IO.Pipelines; +using Expecto.CSharp; using Expecto; using Microsoft.FSharp.Core; using BitBadger.Documents.Sqlite; @@ -178,24 +179,16 @@ public static class SqliteCSharpTests } /// Verify an empty JSON array - private static void VerifyEmpty(string json) - { + private static void VerifyEmpty(string json) => Expect.equal(json, "[]", "There should be no documents returned"); - } /// Verify an empty JSON document - private static void VerifyNoDoc(string json) - { + private static void VerifyNoDoc(string json) => Expect.equal(json, "{}", "There should be no document returned"); - } /// Set up a stream writer for a test - private static StreamWriter WriteStream(Stream stream) - { - StreamWriter writer = new(stream); - writer.AutoFlush = true; - return writer; - } + private static PipeWriter WriteStream(Stream stream) => + PipeWriter.Create(stream, new StreamPipeWriterOptions(leaveOpen: true)); /// Get the text of the given stream private static string StreamText(Stream stream) @@ -274,16 +267,22 @@ public static class SqliteCSharpTests await LoadDocs(); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Custom.WriteJsonArray(Query.Find(SqliteDb.TableName), [], writer, Results.JsonFromData); - - var json = StreamText(stream); - VerifyBeginEnd(json); - Expect.stringContains(json, JsonDocument.One, "Document ID `one` should have been found"); - Expect.stringContains(json, JsonDocument.Two,"Document ID `two` should have been found"); - Expect.stringContains(json, JsonDocument.Three, "Document ID `three` should have been found"); - Expect.stringContains(json, JsonDocument.Four, "Document ID `four` should have been found"); - Expect.stringContains(json, JsonDocument.Five, "Document ID `five` should have been found"); + var writer = WriteStream(stream); + try + { + await Custom.WriteJsonArray(Query.Find(SqliteDb.TableName), [], writer, Results.JsonFromData); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, JsonDocument.One, "Document ID `one` should have been found"); + Expect.stringContains(json, JsonDocument.Two, "Document ID `two` should have been found"); + Expect.stringContains(json, JsonDocument.Three, "Document ID `three` should have been found"); + Expect.stringContains(json, JsonDocument.Four, "Document ID `four` should have been found"); + Expect.stringContains(json, JsonDocument.Five, "Document ID `five` should have been found"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when data is not found", async () => { @@ -291,12 +290,18 @@ public static class SqliteCSharpTests await LoadDocs(); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Custom.WriteJsonArray( - $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", - [new SqliteParameter("@value", 100)], writer, Results.JsonFromData); - - VerifyEmpty(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await Custom.WriteJsonArray( + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value", + [new SqliteParameter("@value", 100)], writer, Results.JsonFromData); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("Single", @@ -1096,22 +1101,37 @@ public static class SqliteCSharpTests await Document.Insert(SqliteDb.TableName, new SubDocument { Foo = "five", Bar = "six" }); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteAll(SqliteDb.TableName, writer); - var json = StreamText(stream); - VerifyBeginEnd(json); - Expect.stringContains(json, """{"Foo":"one","Bar":"two"}""", "The first document was not found"); - Expect.stringContains(json, """{"Foo":"three","Bar":"four"}""", "The second document was not found"); - Expect.stringContains(json, """{"Foo":"five","Bar":"six"}""", "The third document was not found"); + var writer = WriteStream(stream); + try + { + await Json.WriteAll(SqliteDb.TableName, writer); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, """{"Foo":"one","Bar":"two"}""", "The first document was not found"); + Expect.stringContains(json, """{"Foo":"three","Bar":"four"}""", + "The second document was not found"); + Expect.stringContains(json, """{"Foo":"five","Bar":"six"}""", "The third document was not found"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when there is no data", async () => { await using var db = await SqliteDb.BuildDb(); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteAll(SqliteDb.TableName, writer); - VerifyEmpty(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await Json.WriteAll(SqliteDb.TableName, writer); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteAllOrdered", @@ -1122,11 +1142,18 @@ public static class SqliteCSharpTests await LoadDocs(); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteAllOrdered(SqliteDb.TableName, writer, [Field.Named("n:NumValue")]); - Expect.equal(StreamText(stream), - $"[{JsonDocument.One},{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.Four},{JsonDocument.Five}]", - "The documents were not ordered correctly"); + var writer = WriteStream(stream); + try + { + await Json.WriteAllOrdered(SqliteDb.TableName, writer, [Field.Named("n:NumValue")]); + Expect.equal(StreamText(stream), + $"[{JsonDocument.One},{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.Four},{JsonDocument.Five}]", + "The documents were not ordered correctly"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when ordering numerically descending", async () => { @@ -1134,11 +1161,18 @@ public static class SqliteCSharpTests await LoadDocs(); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteAllOrdered(SqliteDb.TableName, writer, [Field.Named("n:NumValue DESC")]); - Expect.equal(StreamText(stream), - $"[{JsonDocument.Five},{JsonDocument.Four},{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One}]", - "The documents were not ordered correctly"); + var writer = WriteStream(stream); + try + { + await Json.WriteAllOrdered(SqliteDb.TableName, writer, [Field.Named("n:NumValue DESC")]); + Expect.equal(StreamText(stream), + $"[{JsonDocument.Five},{JsonDocument.Four},{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One}]", + "The documents were not ordered correctly"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when ordering alphabetically", async () => { @@ -1146,11 +1180,18 @@ public static class SqliteCSharpTests await LoadDocs(); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteAllOrdered(SqliteDb.TableName, writer, [Field.Named("Id DESC")]); - Expect.equal(StreamText(stream), - $"[{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Four},{JsonDocument.Five}]", - "The documents were not ordered correctly"); + var writer = WriteStream(stream); + try + { + await Json.WriteAllOrdered(SqliteDb.TableName, writer, [Field.Named("Id DESC")]); + Expect.equal(StreamText(stream), + $"[{JsonDocument.Two},{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Four},{JsonDocument.Five}]", + "The documents were not ordered correctly"); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteById", @@ -1161,9 +1202,16 @@ public static class SqliteCSharpTests await LoadDocs(); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteById(SqliteDb.TableName, writer, "two"); - Expect.equal(StreamText(stream), JsonDocument.Two, "The incorrect document was returned"); + var writer = WriteStream(stream); + try + { + await Json.WriteById(SqliteDb.TableName, writer, "two"); + Expect.equal(StreamText(stream), JsonDocument.Two, "The incorrect document was returned"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when a document is not found", async () => { @@ -1171,9 +1219,16 @@ public static class SqliteCSharpTests await LoadDocs(); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteById(SqliteDb.TableName, writer, "three hundred eighty-seven"); - VerifyNoDoc(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await Json.WriteById(SqliteDb.TableName, writer, "three hundred eighty-seven"); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteByFields", @@ -1184,12 +1239,20 @@ public static class SqliteCSharpTests await LoadDocs(); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByFields(SqliteDb.TableName, writer, FieldMatch.Any, [Field.Greater("NumValue", 15)]); - var json = StreamText(stream); - VerifyBeginEnd(json); - Expect.stringContains(json, JsonDocument.Four, "Document `four` should have been returned"); - Expect.stringContains(json, JsonDocument.Five, "Document `five` should have been returned"); + var writer = WriteStream(stream); + try + { + await Json.WriteByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 15)]); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, JsonDocument.Four, "Document `four` should have been returned"); + Expect.stringContains(json, JsonDocument.Five, "Document `five` should have been returned"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when documents are found using IN with numeric field", async () => { @@ -1197,11 +1260,18 @@ public static class SqliteCSharpTests await LoadDocs(); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByFields(SqliteDb.TableName, writer, FieldMatch.All, - [Field.In("NumValue", [2, 4, 6, 8])]); - Expect.equal(StreamText(stream), $"[{JsonDocument.Three}]", - "There should have been one document returned"); + var writer = WriteStream(stream); + try + { + await Json.WriteByFields(SqliteDb.TableName, writer, FieldMatch.All, + [Field.In("NumValue", [2, 4, 6, 8])]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Three}]", + "There should have been one document returned"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when documents are not found", async () => { @@ -1209,9 +1279,17 @@ public static class SqliteCSharpTests await LoadDocs(); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByFields(SqliteDb.TableName, writer, FieldMatch.Any, [Field.Greater("NumValue", 100)]); - VerifyEmpty(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await Json.WriteByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 100)]); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds for InArray when matching documents exist", async () => { @@ -1220,15 +1298,22 @@ public static class SqliteCSharpTests foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(SqliteDb.TableName, doc); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByFields(SqliteDb.TableName, writer, FieldMatch.All, - [Field.InArray("Values", SqliteDb.TableName, ["c"])]); - var json = StreamText(stream); - VerifyBeginEnd(json); - Expect.stringContains(json, """{"Id":"first","Values":["a","b","c"]}""", - "Document `first` should have been returned"); - Expect.stringContains(json, """{"Id":"second","Values":["c","d","e"]}""", - "Document `second` should have been returned"); + var writer = WriteStream(stream); + try + { + await Json.WriteByFields(SqliteDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", SqliteDb.TableName, ["c"])]); + var json = StreamText(stream); + VerifyBeginEnd(json); + Expect.stringContains(json, """{"Id":"first","Values":["a","b","c"]}""", + "Document `first` should have been returned"); + Expect.stringContains(json, """{"Id":"second","Values":["c","d","e"]}""", + "Document `second` should have been returned"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds for InArray when no matching documents exist", async () => { @@ -1237,10 +1322,17 @@ public static class SqliteCSharpTests foreach (var doc in ArrayDocument.TestDocuments) await Document.Insert(SqliteDb.TableName, doc); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByFields(SqliteDb.TableName, writer, FieldMatch.All, - [Field.InArray("Values", SqliteDb.TableName, ["j"])]); - VerifyEmpty(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await Json.WriteByFields(SqliteDb.TableName, writer, FieldMatch.All, + [Field.InArray("Values", SqliteDb.TableName, ["j"])]); + VerifyEmpty(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteByFieldsOrdered", @@ -1251,11 +1343,18 @@ public static class SqliteCSharpTests await LoadDocs(); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, - [Field.Greater("NumValue", 15)], [Field.Named("Id")]); - Expect.equal(StreamText(stream), $"[{JsonDocument.Five},{JsonDocument.Four}]", - "Incorrect documents were returned"); + var writer = WriteStream(stream); + try + { + await Json.WriteByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 15)], [Field.Named("Id")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Five},{JsonDocument.Four}]", + "Incorrect documents were returned"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when sorting descending", async () => { @@ -1263,11 +1362,18 @@ public static class SqliteCSharpTests await LoadDocs(); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, - [Field.Greater("NumValue", 15)], [Field.Named("Id DESC")]); - Expect.equal(StreamText(stream), $"[{JsonDocument.Four},{JsonDocument.Five}]", - "Incorrect documents were returned"); + var writer = WriteStream(stream); + try + { + await Json.WriteByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Greater("NumValue", 15)], [Field.Named("Id DESC")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Four},{JsonDocument.Five}]", + "Incorrect documents were returned"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when sorting case-sensitively", async () => { @@ -1275,11 +1381,18 @@ public static class SqliteCSharpTests await LoadDocs(); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.All, - [Field.LessOrEqual("NumValue", 10)], [Field.Named("Value")]); - Expect.equal(StreamText(stream), $"[{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Two}]", - "Documents not ordered correctly"); + var writer = WriteStream(stream); + try + { + await Json.WriteByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.All, + [Field.LessOrEqual("NumValue", 10)], [Field.Named("Value")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Three},{JsonDocument.One},{JsonDocument.Two}]", + "Documents not ordered correctly"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when sorting case-insensitively", async () => { @@ -1287,11 +1400,18 @@ public static class SqliteCSharpTests await LoadDocs(); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.All, - [Field.LessOrEqual("NumValue", 10)], [Field.Named("i:Value")]); - Expect.equal(StreamText(stream), $"[{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.One}]", - "Documents not ordered correctly"); + var writer = WriteStream(stream); + try + { + await Json.WriteByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.All, + [Field.LessOrEqual("NumValue", 10)], [Field.Named("i:Value")]); + Expect.equal(StreamText(stream), $"[{JsonDocument.Three},{JsonDocument.Two},{JsonDocument.One}]", + "Documents not ordered correctly"); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteFirstByFields", @@ -1302,10 +1422,17 @@ public static class SqliteCSharpTests await LoadDocs(); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, - [Field.Equal("Value", "another")]); - Expect.equal(StreamText(stream), JsonDocument.Two, "The incorrect document was returned"); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "another")]); + Expect.equal(StreamText(stream), JsonDocument.Two, "The incorrect document was returned"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when multiple documents are found", async () => { @@ -1313,12 +1440,19 @@ public static class SqliteCSharpTests await LoadDocs(); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, - [Field.Equal("Sub.Foo", "green")]); - var json = StreamText(stream); - Expect.notEqual(json, "{}", "There should have been a document returned"); - VerifyAny(json, [JsonDocument.Two, JsonDocument.Four]); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")]); + var json = StreamText(stream); + Expect.notEqual(json, "{}", "There should have been a document returned"); + VerifyAny(json, [JsonDocument.Two, JsonDocument.Four]); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when a document is not found", async () => { @@ -1326,10 +1460,17 @@ public static class SqliteCSharpTests await LoadDocs(); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, - [Field.Equal("Value", "absent")]); - VerifyNoDoc(StreamText(stream)); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByFields(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Value", "absent")]); + VerifyNoDoc(StreamText(stream)); + } + finally + { + await writer.CompleteAsync(); + } }) ]), TestList("WriteFirstByFieldsOrdered", @@ -1340,10 +1481,17 @@ public static class SqliteCSharpTests await LoadDocs(); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteFirstByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, - [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar")]); - Expect.equal(StreamText(stream), JsonDocument.Two, "An incorrect document was returned"); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar")]); + Expect.equal(StreamText(stream), JsonDocument.Two, "An incorrect document was returned"); + } + finally + { + await writer.CompleteAsync(); + } }), TestCase("succeeds when sorting descending", async () => { @@ -1351,10 +1499,17 @@ public static class SqliteCSharpTests await LoadDocs(); using MemoryStream stream = new(); - await using var writer = WriteStream(stream); - await Json.WriteFirstByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, - [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar DESC")]); - Expect.equal(StreamText(stream), JsonDocument.Four, "An incorrect document was returned"); + var writer = WriteStream(stream); + try + { + await Json.WriteFirstByFieldsOrdered(SqliteDb.TableName, writer, FieldMatch.Any, + [Field.Equal("Sub.Foo", "green")], [Field.Named("Sub.Bar DESC")]); + Expect.equal(StreamText(stream), JsonDocument.Four, "An incorrect document was returned"); + } + finally + { + await writer.CompleteAsync(); + } }) ]) ]); diff --git a/src/Tests/CommonTests.fs b/src/Tests/CommonTests.fs index 98f6fe8..40aeb8e 100644 --- a/src/Tests/CommonTests.fs +++ b/src/Tests/CommonTests.fs @@ -1,5 +1,7 @@ module CommonTests +open System.IO +open System.IO.Pipelines open BitBadger.Documents open Expecto @@ -484,6 +486,81 @@ let queryTests = testList "Query" [ ] ] +let private streamText (stream: Stream) = + stream.Position <- 0L + use reader = new StreamReader(stream) + reader.ReadToEnd() + +/// Unit tests for the PipeWriter module +let pipeWriterTests = testList "Extensions.PipeWriter" [ + testList "writeString" [ + testTask "succeeds when writer is open" { + use stream = new MemoryStream() + let writer = PipeWriter.Create(stream, StreamPipeWriterOptions(leaveOpen = true)) + try + let! result = PipeWriter.writeString writer "abc" + Expect.isTrue result "The write operation should have been successful" + Expect.equal (streamText stream) "abc" "The string was not written correctly" + finally + writer.Complete() + } + testTask "succeeds when writer is completed" { + use stream = new MemoryStream() + let writer = PipeWriter.Create(stream, StreamPipeWriterOptions(leaveOpen = true)) + do! writer.CompleteAsync() + + let! result = PipeWriter.writeString writer "abc" + Expect.isFalse result "The write operation should have returned false" + Expect.equal (streamText stream) "" "No text should have been written" + } + ] + testList "writeStrings" [ + testTask "succeeds with no strings" { + use stream = new MemoryStream() + let writer = PipeWriter.Create(stream, StreamPipeWriterOptions(leaveOpen = true)) + try + do! PipeWriter.writeStrings writer [] + Expect.equal (streamText stream) "[]" "An empty sequence of strings was not written correctly" + finally + writer.Complete() + } + testTask "succeeds with one strings" { + use stream = new MemoryStream() + let writer = PipeWriter.Create(stream, StreamPipeWriterOptions(leaveOpen = true)) + try + do! PipeWriter.writeStrings writer [ "le-test" ] + Expect.equal (streamText stream) "[le-test]" "A sequence of one string was not written correctly" + finally + writer.Complete() + } + testTask "succeeds with many strings" { + use stream = new MemoryStream() + let writer = PipeWriter.Create(stream, StreamPipeWriterOptions(leaveOpen = true)) + try + do! PipeWriter.writeStrings writer [ "z"; "y"; "x"; "c"; "b"; "a" ] + Expect.equal (streamText stream) "[z,y,x,c,b,a]" "A sequence of many strings was not written correctly" + finally + writer.Complete() + } + testTask "succeeds when the writer is completed early" { + use stream = new MemoryStream() + let writer = PipeWriter.Create(stream, StreamPipeWriterOptions(leaveOpen = true)) + let items = seq { + "a" + "b" + "c" + writer.Complete() + "d" + "e" + "f" + } + + do! PipeWriter.writeStrings writer items + Expect.equal (streamText stream) "[a,b,c" "The writing should have stopped when the writer completed" + } + ] +] + /// Tests which do not hit the database let all = testList "Common" [ comparisonTests @@ -492,5 +569,6 @@ let all = testList "Common" [ parameterNameTests autoIdTests queryTests + pipeWriterTests testSequenced configurationTests ] diff --git a/src/Tests/PostgresExtensionTests.fs b/src/Tests/PostgresExtensionTests.fs index df675c1..d780072 100644 --- a/src/Tests/PostgresExtensionTests.fs +++ b/src/Tests/PostgresExtensionTests.fs @@ -1,6 +1,7 @@ module PostgresExtensionTests open System.IO +open System.IO.Pipelines open BitBadger.Documents open BitBadger.Documents.Postgres open BitBadger.Documents.Tests @@ -16,9 +17,7 @@ let private mkConn (db: ThrowawayPostgresDb) = /// Set up a stream writer for a test let private writeStream (stream: Stream) = - let writer = new StreamWriter(stream) - writer.AutoFlush <- true - writer + PipeWriter.Create(stream, StreamPipeWriterOptions(leaveOpen = true)) /// Get the text of the given stream let private streamText (stream: Stream) = @@ -142,13 +141,15 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeCustomJsonArray (Query.find PostgresDb.TableName) [] writer jsonFromData - - let docs = streamText stream - Expect.stringStarts docs "[" "The JSON array should have started with `[`" - Expect.hasLength (docs.Split "{\"Id\":") 6 "There should have been 5 documents returned" - Expect.stringEnds docs "]" "The JSON array should have ended with `[`" + let writer = writeStream stream + try + do! conn.writeCustomJsonArray (Query.find PostgresDb.TableName) [] writer jsonFromData + let docs = streamText stream + Expect.stringStarts docs "[" "The JSON array should have started with `[`" + Expect.hasLength (docs.Split "{\"Id\":") 6 "There should have been 5 documents returned" + Expect.stringEnds docs "]" "The JSON array should have ended with `[`" + finally + writer.Complete() } testTask "succeeds when data is not found" { use db = PostgresDb.BuildDb() @@ -156,14 +157,16 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeCustomJsonArray - $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath" - [ "@path", Sql.string "$.NumValue ? (@ > 100)" ] - writer - jsonFromData - - Expect.equal (streamText stream) "[]" "There should have been no documents returned" + let writer = writeStream stream + try + do! conn.writeCustomJsonArray + $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath" + [ "@path", Sql.string "$.NumValue ? (@ > 100)" ] + writer + jsonFromData + Expect.equal (streamText stream) "[]" "There should have been no documents returned" + finally + writer.Complete() } ] testList "customSingle" [ @@ -1170,17 +1173,23 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonAll PostgresDb.TableName writer - verifyAllData (streamText stream) + let writer = writeStream stream + try + do! conn.writeJsonAll PostgresDb.TableName writer + verifyAllData (streamText stream) + finally + writer.Complete() } testTask "succeeds when there is no data" { use db = PostgresDb.BuildDb() use conn = mkConn db use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonAll PostgresDb.TableName writer - verifyEmpty (streamText stream) + let writer = writeStream stream + try + do! conn.writeJsonAll PostgresDb.TableName writer + verifyEmpty (streamText stream) + finally + writer.Complete() } ] testList "writeJsonAllOrdered" [ @@ -1190,9 +1199,12 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonAllOrdered PostgresDb.TableName writer [ Field.Named "n:NumValue" ] - verifyExpectedOrder (streamText stream) "one" "three" (Some "two") (Some "four") (Some "five") + let writer = writeStream stream + try + do! conn.writeJsonAllOrdered PostgresDb.TableName writer [ Field.Named "n:NumValue" ] + verifyExpectedOrder (streamText stream) "one" "three" (Some "two") (Some "four") (Some "five") + finally + writer.Complete() } testTask "succeeds when ordering numerically descending" { use db = PostgresDb.BuildDb() @@ -1200,9 +1212,12 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonAllOrdered PostgresDb.TableName writer [ Field.Named "n:NumValue DESC" ] - verifyExpectedOrder (streamText stream) "five" "four" (Some "two") (Some "three") (Some "one") + let writer = writeStream stream + try + do! conn.writeJsonAllOrdered PostgresDb.TableName writer [ Field.Named "n:NumValue DESC" ] + verifyExpectedOrder (streamText stream) "five" "four" (Some "two") (Some "three") (Some "one") + finally + writer.Complete() } testTask "succeeds when ordering alphabetically" { use db = PostgresDb.BuildDb() @@ -1210,9 +1225,12 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonAllOrdered PostgresDb.TableName writer [ Field.Named "Id DESC" ] - verifyExpectedOrder (streamText stream) "two" "three" (Some "one") (Some "four") (Some "five") + let writer = writeStream stream + try + do! conn.writeJsonAllOrdered PostgresDb.TableName writer [ Field.Named "Id DESC" ] + verifyExpectedOrder (streamText stream) "two" "three" (Some "one") (Some "four") (Some "five") + finally + writer.Complete() } ] testList "writeJsonById" [ @@ -1222,11 +1240,14 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonById PostgresDb.TableName writer "two" - let json = streamText stream - Expect.stringStarts json """{"Id": "two",""" "An incorrect document was returned" - Expect.stringEnds json "}" "JSON should have ended with this document" + let writer = writeStream stream + try + do! conn.writeJsonById PostgresDb.TableName writer "two" + let json = streamText stream + Expect.stringStarts json """{"Id": "two",""" "An incorrect document was returned" + Expect.stringEnds json "}" "JSON should have ended with this document" + finally + writer.Complete() } testTask "succeeds when a document is not found" { use db = PostgresDb.BuildDb() @@ -1234,9 +1255,12 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonById PostgresDb.TableName writer "three hundred eighty-seven" - verifyNoDoc (streamText stream) + let writer = writeStream stream + try + do! conn.writeJsonById PostgresDb.TableName writer "three hundred eighty-seven" + verifyNoDoc (streamText stream) + finally + writer.Complete() } ] testList "writeJsonByFields" [ @@ -1246,10 +1270,16 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByFields - PostgresDb.TableName writer All [ Field.In "Value" [ "purple"; "blue" ]; Field.Exists "Sub" ] - verifySingleById (streamText stream) "four" + let writer = writeStream stream + try + do! conn.writeJsonByFields + PostgresDb.TableName + writer + All + [ Field.In "Value" [ "purple"; "blue" ]; Field.Exists "Sub" ] + verifySingleById (streamText stream) "four" + finally + writer.Complete() } testTask "succeeds when documents are found using IN with numeric field" { use db = PostgresDb.BuildDb() @@ -1257,9 +1287,12 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByFields PostgresDb.TableName writer All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] - verifySingleById (streamText stream) "three" + let writer = writeStream stream + try + do! conn.writeJsonByFields PostgresDb.TableName writer All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] + verifySingleById (streamText stream) "three" + finally + writer.Complete() } testTask "succeeds when documents are not found" { use db = PostgresDb.BuildDb() @@ -1267,10 +1300,16 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByFields - PostgresDb.TableName writer All [ Field.Equal "Value" "mauve"; Field.NotEqual "NumValue" 40 ] - verifyEmpty (streamText stream) + let writer = writeStream stream + try + do! conn.writeJsonByFields + PostgresDb.TableName + writer + All + [ Field.Equal "Value" "mauve"; Field.NotEqual "NumValue" 40 ] + verifyEmpty (streamText stream) + finally + writer.Complete() } testTask "succeeds for InArray when matching documents exist" { use db = PostgresDb.BuildDb() @@ -1279,13 +1318,16 @@ let integrationTests = for doc in ArrayDocument.TestDocuments do do! conn.insert PostgresDb.TableName doc use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByFields - PostgresDb.TableName writer All [ Field.InArray "Values" PostgresDb.TableName [ "c" ] ] - let json = streamText stream - verifyBeginEnd json - verifyDocById json "first" - verifyDocById json "second" + let writer = writeStream stream + try + do! conn.writeJsonByFields + PostgresDb.TableName writer All [ Field.InArray "Values" PostgresDb.TableName [ "c" ] ] + let json = streamText stream + verifyBeginEnd json + verifyDocById json "first" + verifyDocById json "second" + finally + writer.Complete() } testTask "succeeds for InArray when no matching documents exist" { use db = PostgresDb.BuildDb() @@ -1294,10 +1336,13 @@ let integrationTests = for doc in ArrayDocument.TestDocuments do do! conn.insert PostgresDb.TableName doc use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByFields - PostgresDb.TableName writer All [ Field.InArray "Values" PostgresDb.TableName [ "j" ] ] - verifyEmpty (streamText stream) + let writer = writeStream stream + try + do! conn.writeJsonByFields + PostgresDb.TableName writer All [ Field.InArray "Values" PostgresDb.TableName [ "j" ] ] + verifyEmpty (streamText stream) + finally + writer.Complete() } ] testList "writeJsonByFieldsOrdered" [ @@ -1307,10 +1352,13 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByFieldsOrdered - PostgresDb.TableName writer All [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] - verifyExpectedOrder (streamText stream) "five" "four" None None None + let writer = writeStream stream + try + do! conn.writeJsonByFieldsOrdered + PostgresDb.TableName writer All [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + verifyExpectedOrder (streamText stream) "five" "four" None None None + finally + writer.Complete() } testTask "succeeds when sorting descending" { use db = PostgresDb.BuildDb() @@ -1318,10 +1366,13 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByFieldsOrdered - PostgresDb.TableName writer All [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] - verifyExpectedOrder (streamText stream) "four" "five" None None None + let writer = writeStream stream + try + do! conn.writeJsonByFieldsOrdered + PostgresDb.TableName writer All [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + verifyExpectedOrder (streamText stream) "four" "five" None None None + finally + writer.Complete() } ] testList "writeJsonByContains" [ @@ -1331,12 +1382,15 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByContains PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} - let json = streamText stream - verifyBeginEnd json - verifyDocById json "two" - verifyDocById json "four" + let writer = writeStream stream + try + do! conn.writeJsonByContains PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} + let json = streamText stream + verifyBeginEnd json + verifyDocById json "two" + verifyDocById json "four" + finally + writer.Complete() } testTask "succeeds when documents are not found" { use db = PostgresDb.BuildDb() @@ -1344,9 +1398,12 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByContains PostgresDb.TableName writer {| Value = "mauve" |} - verifyEmpty (streamText stream) + let writer = writeStream stream + try + do! conn.writeJsonByContains PostgresDb.TableName writer {| Value = "mauve" |} + verifyEmpty (streamText stream) + finally + writer.Complete() } ] testList "writeJsonByContainsOrdered" [ @@ -1357,10 +1414,13 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByContainsOrdered - PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar" ] - verifyExpectedOrder (streamText stream) "two" "four" None None None + let writer = writeStream stream + try + do! conn.writeJsonByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar" ] + verifyExpectedOrder (streamText stream) "two" "four" None None None + finally + writer.Complete() } testTask "succeeds when sorting descending" { use db = PostgresDb.BuildDb() @@ -1368,10 +1428,13 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByContainsOrdered - PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar DESC" ] - verifyExpectedOrder (streamText stream) "four" "two" None None None + let writer = writeStream stream + try + do! conn.writeJsonByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar DESC" ] + verifyExpectedOrder (streamText stream) "four" "two" None None None + finally + writer.Complete() } ] testList "writeJsonByJsonPath" [ @@ -1381,13 +1444,16 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByJsonPath PostgresDb.TableName writer "$.NumValue ? (@ < 15)" - let json = streamText stream - verifyBeginEnd json - verifyDocById json "one" - verifyDocById json "two" - verifyDocById json "three" + let writer = writeStream stream + try + do! conn.writeJsonByJsonPath PostgresDb.TableName writer "$.NumValue ? (@ < 15)" + let json = streamText stream + verifyBeginEnd json + verifyDocById json "one" + verifyDocById json "two" + verifyDocById json "three" + finally + writer.Complete() } testTask "succeeds when documents are not found" { use db = PostgresDb.BuildDb() @@ -1395,9 +1461,12 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByJsonPath PostgresDb.TableName writer "$.NumValue ? (@ < 0)" - verifyEmpty (streamText stream) + let writer = writeStream stream + try + do! conn.writeJsonByJsonPath PostgresDb.TableName writer "$.NumValue ? (@ < 0)" + verifyEmpty (streamText stream) + finally + writer.Complete() } ] testList "writeJsonByJsonPathOrdered" [ @@ -1408,10 +1477,13 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByJsonPathOrdered - PostgresDb.TableName writer "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue" ] - verifyExpectedOrder (streamText stream) "one" "three" (Some "two") None None + let writer = writeStream stream + try + do! conn.writeJsonByJsonPathOrdered + PostgresDb.TableName writer "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue" ] + verifyExpectedOrder (streamText stream) "one" "three" (Some "two") None None + finally + writer.Complete() } testTask "succeeds when sorting descending" { use db = PostgresDb.BuildDb() @@ -1419,10 +1491,13 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByJsonPathOrdered - PostgresDb.TableName writer "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue DESC" ] - verifyExpectedOrder (streamText stream) "two" "three" (Some "one") None None + let writer = writeStream stream + try + do! conn.writeJsonByJsonPathOrdered + PostgresDb.TableName writer "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue DESC" ] + verifyExpectedOrder (streamText stream) "two" "three" (Some "one") None None + finally + writer.Complete() } ] testList "writeJsonFirstByFields" [ @@ -1432,9 +1507,12 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "another" ] - verifyDocById (streamText stream) "two" + let writer = writeStream stream + try + do! conn.writeJsonFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "another" ] + verifyDocById (streamText stream) "two" + finally + writer.Complete() } testTask "succeeds when multiple documents are found" { use db = PostgresDb.BuildDb() @@ -1442,9 +1520,12 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] - verifyAnyById (streamText stream) [ "five"; "four" ] + let writer = writeStream stream + try + do! conn.writeJsonFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] + verifyAnyById (streamText stream) [ "five"; "four" ] + finally + writer.Complete() } testTask "succeeds when a document is not found" { use db = PostgresDb.BuildDb() @@ -1452,9 +1533,12 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "absent" ] - verifyNoDoc (streamText stream) + let writer = writeStream stream + try + do! conn.writeJsonFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "absent" ] + verifyNoDoc (streamText stream) + finally + writer.Complete() } ] testList "writeJsonFirstByFieldsOrdered" [ @@ -1464,10 +1548,13 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonFirstByFieldsOrdered - PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] - verifyDocById (streamText stream) "five" + let writer = writeStream stream + try + do! conn.writeJsonFirstByFieldsOrdered + PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + verifyDocById (streamText stream) "five" + finally + writer.Complete() } testTask "succeeds when sorting descending" { use db = PostgresDb.BuildDb() @@ -1475,10 +1562,13 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonFirstByFieldsOrdered - PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] - verifyDocById (streamText stream) "four" + let writer = writeStream stream + try + do! conn.writeJsonFirstByFieldsOrdered + PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + verifyDocById (streamText stream) "four" + finally + writer.Complete() } ] testList "writeJsonFirstByContains" [ @@ -1488,9 +1578,12 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonFirstByContains PostgresDb.TableName writer {| Value = "another" |} - verifyDocById (streamText stream) "two" + let writer = writeStream stream + try + do! conn.writeJsonFirstByContains PostgresDb.TableName writer {| Value = "another" |} + verifyDocById (streamText stream) "two" + finally + writer.Complete() } testTask "succeeds when multiple documents are found" { use db = PostgresDb.BuildDb() @@ -1498,9 +1591,12 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonFirstByContains PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} - verifyAnyById (streamText stream) [ "two"; "four" ] + let writer = writeStream stream + try + do! conn.writeJsonFirstByContains PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} + verifyAnyById (streamText stream) [ "two"; "four" ] + finally + writer.Complete() } testTask "succeeds when a document is not found" { use db = PostgresDb.BuildDb() @@ -1508,9 +1604,12 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonFirstByContains PostgresDb.TableName writer {| Value = "absent" |} - verifyNoDoc (streamText stream) + let writer = writeStream stream + try + do! conn.writeJsonFirstByContains PostgresDb.TableName writer {| Value = "absent" |} + verifyNoDoc (streamText stream) + finally + writer.Complete() } ] testList "writeJsonFirstByContainsOrdered" [ @@ -1520,10 +1619,13 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonFirstByContainsOrdered - PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Value" ] - verifyDocById (streamText stream) "two" + let writer = writeStream stream + try + do! conn.writeJsonFirstByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Value" ] + verifyDocById (streamText stream) "two" + finally + writer.Complete() } testTask "succeeds when sorting descending" { use db = PostgresDb.BuildDb() @@ -1531,10 +1633,13 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonFirstByContainsOrdered - PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Value DESC" ] - verifyDocById (streamText stream) "four" + let writer = writeStream stream + try + do! conn.writeJsonFirstByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Value DESC" ] + verifyDocById (streamText stream) "four" + finally + writer.Complete() } ] testList "writeJsonFirstByJsonPath" [ @@ -1544,9 +1649,12 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonFirstByJsonPath PostgresDb.TableName writer """$.Value ? (@ == "FIRST!")""" - verifyDocById (streamText stream) "one" + let writer = writeStream stream + try + do! conn.writeJsonFirstByJsonPath PostgresDb.TableName writer """$.Value ? (@ == "FIRST!")""" + verifyDocById (streamText stream) "one" + finally + writer.Complete() } testTask "succeeds when multiple documents are found" { use db = PostgresDb.BuildDb() @@ -1554,9 +1662,12 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonFirstByJsonPath PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" - verifyAnyById (streamText stream) [ "two"; "four" ] + let writer = writeStream stream + try + do! conn.writeJsonFirstByJsonPath PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" + verifyAnyById (streamText stream) [ "two"; "four" ] + finally + writer.Complete() } testTask "succeeds when a document is not found" { use db = PostgresDb.BuildDb() @@ -1564,9 +1675,12 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonFirstByJsonPath PostgresDb.TableName writer """$.Id ? (@ == "nope")""" - verifyNoDoc (streamText stream) + let writer = writeStream stream + try + do! conn.writeJsonFirstByJsonPath PostgresDb.TableName writer """$.Id ? (@ == "nope")""" + verifyNoDoc (streamText stream) + finally + writer.Complete() } ] testList "writeJsonFirstByJsonPathOrdered" [ @@ -1576,10 +1690,13 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonFirstByJsonPathOrdered - PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar" ] - verifyDocById (streamText stream) "two" + let writer = writeStream stream + try + do! conn.writeJsonFirstByJsonPathOrdered + PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar" ] + verifyDocById (streamText stream) "two" + finally + writer.Complete() } testTask "succeeds when sorting descending" { use db = PostgresDb.BuildDb() @@ -1587,10 +1704,13 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonFirstByJsonPathOrdered - PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar DESC" ] - verifyDocById (streamText stream) "four" + let writer = writeStream stream + try + do! conn.writeJsonFirstByJsonPathOrdered + PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar DESC" ] + verifyDocById (streamText stream) "four" + finally + writer.Complete() } ] testList "updateById" [ diff --git a/src/Tests/PostgresTests.fs b/src/Tests/PostgresTests.fs index d166cbf..8fd4e73 100644 --- a/src/Tests/PostgresTests.fs +++ b/src/Tests/PostgresTests.fs @@ -1,6 +1,7 @@ module PostgresTests open System.IO +open System.IO.Pipelines open Expecto open BitBadger.Documents open BitBadger.Documents.Postgres @@ -278,9 +279,7 @@ let loadDocs () = backgroundTask { /// Set up a stream writer for a test let writeStream (stream: Stream) = - let writer = new StreamWriter(stream) - writer.AutoFlush <- true - writer + PipeWriter.Create(stream, StreamPipeWriterOptions(leaveOpen = true)) /// Get the text of the given stream let streamText (stream: Stream) = @@ -339,7 +338,7 @@ let customTests = testList "Custom" [ let! docs = Custom.jsonArray (Query.find PostgresDb.TableName) [] jsonFromData Expect.stringStarts docs "[" "The JSON array should have started with `[`" Expect.hasLength ((string docs).Split "{\"Id\":") 6 "There should have been 5 documents returned" - Expect.stringEnds docs "]" "The JSON array should have ended with `[`" + Expect.stringEnds docs "]" "The JSON array should have ended with `]`" } testTask "succeeds when data is not found" { use db = PostgresDb.BuildDb() @@ -359,27 +358,32 @@ let customTests = testList "Custom" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Custom.writeJsonArray (Query.find PostgresDb.TableName) [] writer jsonFromData - - let docs = streamText stream - Expect.stringStarts docs "[" "The JSON array should have started with `[`" - Expect.hasLength (docs.Split "{\"Id\":") 6 "There should have been 5 documents returned" - Expect.stringEnds docs "]" "The JSON array should have ended with `[`" + let writer = writeStream stream + + try + do! Custom.writeJsonArray (Query.find PostgresDb.TableName) [] writer jsonFromData + let docs = streamText stream + Expect.stringStarts docs "[" "The JSON array should have started with `[`" + Expect.hasLength (docs.Split "{\"Id\":") 6 "There should have been 5 documents returned" + Expect.stringEnds docs "]" "The JSON array should have ended with `]`" + finally + writer.Complete() } testTask "succeeds when data is not found" { use db = PostgresDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Custom.writeJsonArray - $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath" - [ "@path", Sql.string "$.NumValue ? (@ > 100)" ] - writer - jsonFromData - - Expect.equal (streamText stream) "[]" "There should have been no documents returned" + let writer = writeStream stream + try + do! Custom.writeJsonArray + $"SELECT data FROM {PostgresDb.TableName} WHERE data @? @path::jsonpath" + [ "@path", Sql.string "$.NumValue ? (@ > 100)" ] + writer + jsonFromData + Expect.equal (streamText stream) "[]" "There should have been no documents returned" + finally + writer.Complete() } ] testList "single" [ @@ -1457,16 +1461,22 @@ let jsonTests = testList "Json" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeAll PostgresDb.TableName writer - verifyAllData (streamText stream) + let writer = writeStream stream + try + do! Json.writeAll PostgresDb.TableName writer + verifyAllData (streamText stream) + finally + writer.Complete() } testTask "succeeds when there is no data" { use db = PostgresDb.BuildDb() use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeAll PostgresDb.TableName writer - verifyEmpty (streamText stream) + let writer = writeStream stream + try + do! Json.writeAll PostgresDb.TableName writer + verifyEmpty (streamText stream) + finally + writer.Complete() } ] testList "writeAllOrdered" [ @@ -1475,27 +1485,36 @@ let jsonTests = testList "Json" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeAllOrdered PostgresDb.TableName writer [ Field.Named "n:NumValue" ] - verifyExpectedOrder (streamText stream) "one" "three" (Some "two") (Some "four") (Some "five") + let writer = writeStream stream + try + do! Json.writeAllOrdered PostgresDb.TableName writer [ Field.Named "n:NumValue" ] + verifyExpectedOrder (streamText stream) "one" "three" (Some "two") (Some "four") (Some "five") + finally + writer.Complete() } testTask "succeeds when ordering numerically descending" { use db = PostgresDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeAllOrdered PostgresDb.TableName writer [ Field.Named "n:NumValue DESC" ] - verifyExpectedOrder (streamText stream) "five" "four" (Some "two") (Some "three") (Some "one") + let writer = writeStream stream + try + do! Json.writeAllOrdered PostgresDb.TableName writer [ Field.Named "n:NumValue DESC" ] + verifyExpectedOrder (streamText stream) "five" "four" (Some "two") (Some "three") (Some "one") + finally + writer.Complete() } testTask "succeeds when ordering alphabetically" { use db = PostgresDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeAllOrdered PostgresDb.TableName writer [ Field.Named "Id DESC" ] - verifyExpectedOrder (streamText stream) "two" "three" (Some "one") (Some "four") (Some "five") + let writer = writeStream stream + try + do! Json.writeAllOrdered PostgresDb.TableName writer [ Field.Named "Id DESC" ] + verifyExpectedOrder (streamText stream) "two" "three" (Some "one") (Some "four") (Some "five") + finally + writer.Complete() } ] testList "writeById" [ @@ -1504,20 +1523,26 @@ let jsonTests = testList "Json" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeById PostgresDb.TableName writer "two" - let json = streamText stream - Expect.stringStarts json """{"Id": "two",""" "An incorrect document was returned" - Expect.stringEnds json "}" "JSON should have ended with this document" + let writer = writeStream stream + try + do! Json.writeById PostgresDb.TableName writer "two" + let json = streamText stream + Expect.stringStarts json """{"Id": "two",""" "An incorrect document was returned" + Expect.stringEnds json "}" "JSON should have ended with this document" + finally + writer.Complete() } testTask "succeeds when a document is not found" { use db = PostgresDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeById PostgresDb.TableName writer "three hundred eighty-seven" - verifyNoDoc (streamText stream) + let writer = writeStream stream + try + do! Json.writeById PostgresDb.TableName writer "three hundred eighty-seven" + verifyNoDoc (streamText stream) + finally + writer.Complete() } ] testList "writeByFields" [ @@ -1526,29 +1551,38 @@ let jsonTests = testList "Json" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByFields - PostgresDb.TableName writer All [ Field.In "Value" [ "purple"; "blue" ]; Field.Exists "Sub" ] - verifySingleById (streamText stream) "four" + let writer = writeStream stream + try + do! Json.writeByFields + PostgresDb.TableName writer All [ Field.In "Value" [ "purple"; "blue" ]; Field.Exists "Sub" ] + verifySingleById (streamText stream) "four" + finally + writer.Complete() } testTask "succeeds when documents are found using IN with numeric field" { use db = PostgresDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByFields PostgresDb.TableName writer All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] - verifySingleById (streamText stream) "three" + let writer = writeStream stream + try + do! Json.writeByFields PostgresDb.TableName writer All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] + verifySingleById (streamText stream) "three" + finally + writer.Complete() } testTask "succeeds when documents are not found" { use db = PostgresDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByFields - PostgresDb.TableName writer All [ Field.Equal "Value" "mauve"; Field.NotEqual "NumValue" 40 ] - verifyEmpty (streamText stream) + let writer = writeStream stream + try + do! Json.writeByFields + PostgresDb.TableName writer All [ Field.Equal "Value" "mauve"; Field.NotEqual "NumValue" 40 ] + verifyEmpty (streamText stream) + finally + writer.Complete() } testTask "succeeds for InArray when matching documents exist" { use db = PostgresDb.BuildDb() @@ -1556,13 +1590,16 @@ let jsonTests = testList "Json" [ for doc in ArrayDocument.TestDocuments do do! insert PostgresDb.TableName doc use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByFields - PostgresDb.TableName writer All [ Field.InArray "Values" PostgresDb.TableName [ "c" ] ] - let json = streamText stream - verifyBeginEnd json - verifyDocById json "first" - verifyDocById json "second" + let writer = writeStream stream + try + do! Json.writeByFields + PostgresDb.TableName writer All [ Field.InArray "Values" PostgresDb.TableName [ "c" ] ] + let json = streamText stream + verifyBeginEnd json + verifyDocById json "first" + verifyDocById json "second" + finally + writer.Complete() } testTask "succeeds for InArray when no matching documents exist" { use db = PostgresDb.BuildDb() @@ -1570,10 +1607,13 @@ let jsonTests = testList "Json" [ for doc in ArrayDocument.TestDocuments do do! insert PostgresDb.TableName doc use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByFields - PostgresDb.TableName writer All [ Field.InArray "Values" PostgresDb.TableName [ "j" ] ] - verifyEmpty (streamText stream) + let writer = writeStream stream + try + do! Json.writeByFields + PostgresDb.TableName writer All [ Field.InArray "Values" PostgresDb.TableName [ "j" ] ] + verifyEmpty (streamText stream) + finally + writer.Complete() } ] testList "writeByFieldsOrdered" [ @@ -1582,20 +1622,26 @@ let jsonTests = testList "Json" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByFieldsOrdered - PostgresDb.TableName writer All [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] - verifyExpectedOrder (streamText stream) "five" "four" None None None + let writer = writeStream stream + try + do! Json.writeByFieldsOrdered + PostgresDb.TableName writer All [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + verifyExpectedOrder (streamText stream) "five" "four" None None None + finally + writer.Complete() } testTask "succeeds when sorting descending" { use db = PostgresDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByFieldsOrdered - PostgresDb.TableName writer All [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] - verifyExpectedOrder (streamText stream) "four" "five" None None None + let writer = writeStream stream + try + do! Json.writeByFieldsOrdered + PostgresDb.TableName writer All [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + verifyExpectedOrder (streamText stream) "four" "five" None None None + finally + writer.Complete() } ] testList "writeByContains" [ @@ -1604,21 +1650,27 @@ let jsonTests = testList "Json" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByContains PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} - let json = streamText stream - verifyBeginEnd json - verifyDocById json "two" - verifyDocById json "four" + let writer = writeStream stream + try + do! Json.writeByContains PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} + let json = streamText stream + verifyBeginEnd json + verifyDocById json "two" + verifyDocById json "four" + finally + writer.Complete() } testTask "succeeds when documents are not found" { use db = PostgresDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByContains PostgresDb.TableName writer {| Value = "mauve" |} - verifyEmpty (streamText stream) + let writer = writeStream stream + try + do! Json.writeByContains PostgresDb.TableName writer {| Value = "mauve" |} + verifyEmpty (streamText stream) + finally + writer.Complete() } ] testList "writeByContainsOrdered" [ @@ -1628,20 +1680,26 @@ let jsonTests = testList "Json" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByContainsOrdered - PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar" ] - verifyExpectedOrder (streamText stream) "two" "four" None None None + let writer = writeStream stream + try + do! Json.writeByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar" ] + verifyExpectedOrder (streamText stream) "two" "four" None None None + finally + writer.Complete() } testTask "succeeds when sorting descending" { use db = PostgresDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByContainsOrdered - PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar DESC" ] - verifyExpectedOrder (streamText stream) "four" "two" None None None + let writer = writeStream stream + try + do! Json.writeByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Sub.Bar DESC" ] + verifyExpectedOrder (streamText stream) "four" "two" None None None + finally + writer.Complete() } ] testList "writeByJsonPath" [ @@ -1650,22 +1708,28 @@ let jsonTests = testList "Json" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByJsonPath PostgresDb.TableName writer "$.NumValue ? (@ < 15)" - let json = streamText stream - verifyBeginEnd json - verifyDocById json "one" - verifyDocById json "two" - verifyDocById json "three" + let writer = writeStream stream + try + do! Json.writeByJsonPath PostgresDb.TableName writer "$.NumValue ? (@ < 15)" + let json = streamText stream + verifyBeginEnd json + verifyDocById json "one" + verifyDocById json "two" + verifyDocById json "three" + finally + writer.Complete() } testTask "succeeds when documents are not found" { use db = PostgresDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByJsonPath PostgresDb.TableName writer "$.NumValue ? (@ < 0)" - verifyEmpty (streamText stream) + let writer = writeStream stream + try + do! Json.writeByJsonPath PostgresDb.TableName writer "$.NumValue ? (@ < 0)" + verifyEmpty (streamText stream) + finally + writer.Complete() } ] testList "writeByJsonPathOrdered" [ @@ -1675,20 +1739,26 @@ let jsonTests = testList "Json" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByJsonPathOrdered - PostgresDb.TableName writer "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue" ] - verifyExpectedOrder (streamText stream) "one" "three" (Some "two") None None + let writer = writeStream stream + try + do! Json.writeByJsonPathOrdered + PostgresDb.TableName writer "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue" ] + verifyExpectedOrder (streamText stream) "one" "three" (Some "two") None None + finally + writer.Complete() } testTask "succeeds when sorting descending" { use db = PostgresDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByJsonPathOrdered - PostgresDb.TableName writer "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue DESC" ] - verifyExpectedOrder (streamText stream) "two" "three" (Some "one") None None + let writer = writeStream stream + try + do! Json.writeByJsonPathOrdered + PostgresDb.TableName writer "$.NumValue ? (@ < 15)" [ Field.Named "n:NumValue DESC" ] + verifyExpectedOrder (streamText stream) "two" "three" (Some "one") None None + finally + writer.Complete() } ] testList "writeFirstByFields" [ @@ -1697,27 +1767,36 @@ let jsonTests = testList "Json" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "another" ] - verifyDocById (streamText stream) "two" + let writer = writeStream stream + try + do! Json.writeFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "another" ] + verifyDocById (streamText stream) "two" + finally + writer.Complete() } testTask "succeeds when multiple documents are found" { use db = PostgresDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] - verifyAnyById (streamText stream) [ "five"; "four" ] + let writer = writeStream stream + try + do! Json.writeFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] + verifyAnyById (streamText stream) [ "five"; "four" ] + finally + writer.Complete() } testTask "succeeds when a document is not found" { use db = PostgresDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "absent" ] - verifyNoDoc (streamText stream) + let writer = writeStream stream + try + do! Json.writeFirstByFields PostgresDb.TableName writer Any [ Field.Equal "Value" "absent" ] + verifyNoDoc (streamText stream) + finally + writer.Complete() } ] testList "writeFirstByFieldsOrdered" [ @@ -1726,20 +1805,26 @@ let jsonTests = testList "Json" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeFirstByFieldsOrdered - PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] - verifyDocById (streamText stream) "five" + let writer = writeStream stream + try + do! Json.writeFirstByFieldsOrdered + PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id" ] + verifyDocById (streamText stream) "five" + finally + writer.Complete() } testTask "succeeds when sorting descending" { use db = PostgresDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeFirstByFieldsOrdered - PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] - verifyDocById (streamText stream) "four" + let writer = writeStream stream + try + do! Json.writeFirstByFieldsOrdered + PostgresDb.TableName writer Any [ Field.Equal "Value" "purple" ] [ Field.Named "Id DESC" ] + verifyDocById (streamText stream) "four" + finally + writer.Complete() } ] testList "writeFirstByContains" [ @@ -1748,27 +1833,36 @@ let jsonTests = testList "Json" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeFirstByContains PostgresDb.TableName writer {| Value = "another" |} - verifyDocById (streamText stream) "two" + let writer = writeStream stream + try + do! Json.writeFirstByContains PostgresDb.TableName writer {| Value = "another" |} + verifyDocById (streamText stream) "two" + finally + writer.Complete() } testTask "succeeds when multiple documents are found" { use db = PostgresDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeFirstByContains PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} - verifyAnyById (streamText stream) [ "two"; "four" ] + let writer = writeStream stream + try + do! Json.writeFirstByContains PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} + verifyAnyById (streamText stream) [ "two"; "four" ] + finally + writer.Complete() } testTask "succeeds when a document is not found" { use db = PostgresDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeFirstByContains PostgresDb.TableName writer {| Value = "absent" |} - verifyNoDoc (streamText stream) + let writer = writeStream stream + try + do! Json.writeFirstByContains PostgresDb.TableName writer {| Value = "absent" |} + verifyNoDoc (streamText stream) + finally + writer.Complete() } ] testList "writeFirstByContainsOrdered" [ @@ -1777,20 +1871,26 @@ let jsonTests = testList "Json" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeFirstByContainsOrdered - PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Value" ] - verifyDocById (streamText stream) "two" + let writer = writeStream stream + try + do! Json.writeFirstByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Value" ] + verifyDocById (streamText stream) "two" + finally + writer.Complete() } testTask "succeeds when sorting descending" { use db = PostgresDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeFirstByContainsOrdered - PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Value DESC" ] - verifyDocById (streamText stream) "four" + let writer = writeStream stream + try + do! Json.writeFirstByContainsOrdered + PostgresDb.TableName writer {| Sub = {| Foo = "green" |} |} [ Field.Named "Value DESC" ] + verifyDocById (streamText stream) "four" + finally + writer.Complete() } ] testList "writeFirstByJsonPath" [ @@ -1799,27 +1899,36 @@ let jsonTests = testList "Json" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeFirstByJsonPath PostgresDb.TableName writer """$.Value ? (@ == "FIRST!")""" - verifyDocById (streamText stream) "one" + let writer = writeStream stream + try + do! Json.writeFirstByJsonPath PostgresDb.TableName writer """$.Value ? (@ == "FIRST!")""" + verifyDocById (streamText stream) "one" + finally + writer.Complete() } testTask "succeeds when multiple documents are found" { use db = PostgresDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeFirstByJsonPath PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" - verifyAnyById (streamText stream) [ "two"; "four" ] + let writer = writeStream stream + try + do! Json.writeFirstByJsonPath PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" + verifyAnyById (streamText stream) [ "two"; "four" ] + finally + writer.Complete() } testTask "succeeds when a document is not found" { use db = PostgresDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeFirstByJsonPath PostgresDb.TableName writer """$.Id ? (@ == "nope")""" - verifyNoDoc (streamText stream) + let writer = writeStream stream + try + do! Json.writeFirstByJsonPath PostgresDb.TableName writer """$.Id ? (@ == "nope")""" + verifyNoDoc (streamText stream) + finally + writer.Complete() } ] testList "writeFirstByJsonPathOrdered" [ @@ -1828,20 +1937,26 @@ let jsonTests = testList "Json" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeFirstByJsonPathOrdered - PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar" ] - verifyDocById (streamText stream) "two" + let writer = writeStream stream + try + do! Json.writeFirstByJsonPathOrdered + PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar" ] + verifyDocById (streamText stream) "two" + finally + writer.Complete() } testTask "succeeds when sorting descending" { use db = PostgresDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeFirstByJsonPathOrdered - PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar DESC" ] - verifyDocById (streamText stream) "four" + let writer = writeStream stream + try + do! Json.writeFirstByJsonPathOrdered + PostgresDb.TableName writer """$.Sub.Foo ? (@ == "green")""" [ Field.Named "Sub.Bar DESC" ] + verifyDocById (streamText stream) "four" + finally + writer.Complete() } ] ] diff --git a/src/Tests/SqliteExtensionTests.fs b/src/Tests/SqliteExtensionTests.fs index 44dca44..87f74d2 100644 --- a/src/Tests/SqliteExtensionTests.fs +++ b/src/Tests/SqliteExtensionTests.fs @@ -1,6 +1,7 @@ module SqliteExtensionTests open System.IO +open System.IO.Pipelines open System.Text.Json open BitBadger.Documents open BitBadger.Documents.Sqlite @@ -17,9 +18,7 @@ let integrationTests = /// Set up a stream writer for a test let writeStream (stream: Stream) = - let writer = new StreamWriter(stream) - writer.AutoFlush <- true - writer + PipeWriter.Create(stream, StreamPipeWriterOptions(leaveOpen = true)) /// Get the text of the given stream let streamText (stream: Stream) = @@ -600,21 +599,27 @@ let integrationTests = do! conn.insert SqliteDb.TableName { Foo = "five"; Bar = "six" } use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonAll SqliteDb.TableName writer - let json = streamText stream - verifyBeginEnd json - Expect.stringContains json """{"Foo":"one","Bar":"two"}""" "The first document was not found" - Expect.stringContains json """{"Foo":"three","Bar":"four"}""" "The second document was not found" - Expect.stringContains json """{"Foo":"five","Bar":"six"}""" "The third document was not found" + let writer = writeStream stream + try + do! conn.writeJsonAll SqliteDb.TableName writer + let json = streamText stream + verifyBeginEnd json + Expect.stringContains json """{"Foo":"one","Bar":"two"}""" "The first document was not found" + Expect.stringContains json """{"Foo":"three","Bar":"four"}""" "The second document was not found" + Expect.stringContains json """{"Foo":"five","Bar":"six"}""" "The third document was not found" + finally + writer.Complete() } testTask "succeeds when there is no data" { use! db = SqliteDb.BuildDb() use conn = Configuration.dbConn () use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonAll SqliteDb.TableName writer - verifyEmpty (streamText stream) + let writer = writeStream stream + try + do! conn.writeJsonAll SqliteDb.TableName writer + verifyEmpty (streamText stream) + finally + writer.Complete() } ] testList "writeJsonAllOrdered" [ @@ -624,12 +629,15 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonAllOrdered SqliteDb.TableName writer [ Field.Named "n:NumValue" ] - Expect.equal - (streamText stream) - $"[{JsonDocument.one},{JsonDocument.three},{JsonDocument.two},{JsonDocument.four},{JsonDocument.five}]" - "The documents were not ordered correctly" + let writer = writeStream stream + try + do! conn.writeJsonAllOrdered SqliteDb.TableName writer [ Field.Named "n:NumValue" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.one},{JsonDocument.three},{JsonDocument.two},{JsonDocument.four},{JsonDocument.five}]" + "The documents were not ordered correctly" + finally + writer.Complete() } testTask "succeeds when ordering numerically descending" { use! db = SqliteDb.BuildDb() @@ -637,12 +645,15 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonAllOrdered SqliteDb.TableName writer [ Field.Named "n:NumValue DESC" ] - Expect.equal - (streamText stream) - $"[{JsonDocument.five},{JsonDocument.four},{JsonDocument.two},{JsonDocument.three},{JsonDocument.one}]" - "The documents were not ordered correctly" + let writer = writeStream stream + try + do! conn.writeJsonAllOrdered SqliteDb.TableName writer [ Field.Named "n:NumValue DESC" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.five},{JsonDocument.four},{JsonDocument.two},{JsonDocument.three},{JsonDocument.one}]" + "The documents were not ordered correctly" + finally + writer.Complete() } testTask "succeeds when ordering alphabetically" { use! db = SqliteDb.BuildDb() @@ -650,12 +661,15 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonAllOrdered SqliteDb.TableName writer [ Field.Named "Id DESC" ] - Expect.equal - (streamText stream) - $"[{JsonDocument.two},{JsonDocument.three},{JsonDocument.one},{JsonDocument.four},{JsonDocument.five}]" - "The documents were not ordered correctly" + let writer = writeStream stream + try + do! conn.writeJsonAllOrdered SqliteDb.TableName writer [ Field.Named "Id DESC" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.two},{JsonDocument.three},{JsonDocument.one},{JsonDocument.four},{JsonDocument.five}]" + "The documents were not ordered correctly" + finally + writer.Complete() } ] testList "writeJsonById" [ @@ -665,9 +679,12 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonById SqliteDb.TableName writer "two" - Expect.equal (streamText stream) JsonDocument.two "The incorrect document was returned" + let writer = writeStream stream + try + do! conn.writeJsonById SqliteDb.TableName writer "two" + Expect.equal (streamText stream) JsonDocument.two "The incorrect document was returned" + finally + writer.Complete() } testTask "succeeds when a document is not found" { use! db = SqliteDb.BuildDb() @@ -675,9 +692,12 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonById SqliteDb.TableName writer "three hundred eighty-seven" - verifyNoDoc (streamText stream) + let writer = writeStream stream + try + do! conn.writeJsonById SqliteDb.TableName writer "three hundred eighty-seven" + verifyNoDoc (streamText stream) + finally + writer.Complete() } ] testList "writeJsonByFields" [ @@ -687,12 +707,15 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByFields SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] - let json = streamText stream - verifyBeginEnd json - Expect.stringContains json JsonDocument.four "Document `four` should have been returned" - Expect.stringContains json JsonDocument.five "Document `five` should have been returned" + let writer = writeStream stream + try + do! conn.writeJsonByFields SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] + let json = streamText stream + verifyBeginEnd json + Expect.stringContains json JsonDocument.four "Document `four` should have been returned" + Expect.stringContains json JsonDocument.five "Document `five` should have been returned" + finally + writer.Complete() } testTask "succeeds when documents are found using IN with numeric field" { use! db = SqliteDb.BuildDb() @@ -700,9 +723,13 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByFields SqliteDb.TableName writer All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] - Expect.equal (streamText stream) $"[{JsonDocument.three}]" "There should have been one document returned" + let writer = writeStream stream + try + do! conn.writeJsonByFields SqliteDb.TableName writer All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] + Expect.equal + (streamText stream) $"[{JsonDocument.three}]" "There should have been one document returned" + finally + writer.Complete() } testTask "succeeds when documents are not found" { use! db = SqliteDb.BuildDb() @@ -710,9 +737,12 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByFields SqliteDb.TableName writer Any [ Field.Greater "NumValue" 100 ] - verifyEmpty (streamText stream) + let writer = writeStream stream + try + do! conn.writeJsonByFields SqliteDb.TableName writer Any [ Field.Greater "NumValue" 100 ] + verifyEmpty (streamText stream) + finally + writer.Complete() } testTask "succeeds for InArray when matching documents exist" { use! db = SqliteDb.BuildDb() @@ -721,15 +751,18 @@ let integrationTests = for doc in ArrayDocument.TestDocuments do do! conn.insert SqliteDb.TableName doc use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByFields - SqliteDb.TableName writer All [ Field.InArray "Values" SqliteDb.TableName [ "c" ] ] - let json = streamText stream - verifyBeginEnd json - Expect.stringContains - json """{"Id":"first","Values":["a","b","c"]}""" "Document `first` should have been returned" - Expect.stringContains - json """{"Id":"second","Values":["c","d","e"]}""" "Document `second` should have been returned" + let writer = writeStream stream + try + do! conn.writeJsonByFields + SqliteDb.TableName writer All [ Field.InArray "Values" SqliteDb.TableName [ "c" ] ] + let json = streamText stream + verifyBeginEnd json + Expect.stringContains + json """{"Id":"first","Values":["a","b","c"]}""" "Document `first` should have been returned" + Expect.stringContains + json """{"Id":"second","Values":["c","d","e"]}""" "Document `second` should have been returned" + finally + writer.Complete() } testTask "succeeds for InArray when no matching documents exist" { use! db = SqliteDb.BuildDb() @@ -738,10 +771,13 @@ let integrationTests = for doc in ArrayDocument.TestDocuments do do! conn.insert SqliteDb.TableName doc use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByFields - SqliteDb.TableName writer All [ Field.InArray "Values" SqliteDb.TableName [ "j" ] ] - verifyEmpty (streamText stream) + let writer = writeStream stream + try + do! conn.writeJsonByFields + SqliteDb.TableName writer All [ Field.InArray "Values" SqliteDb.TableName [ "j" ] ] + verifyEmpty (streamText stream) + finally + writer.Complete() } ] testList "writeJsonByFieldsOrdered" [ @@ -751,11 +787,16 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByFieldsOrdered - SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id" ] - Expect.equal - (streamText stream) $"[{JsonDocument.five},{JsonDocument.four}]" "Incorrect documents were returned" + let writer = writeStream stream + try + do! conn.writeJsonByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.five},{JsonDocument.four}]" + "Incorrect documents were returned" + finally + writer.Complete() } testTask "succeeds when sorting descending" { use! db = SqliteDb.BuildDb() @@ -763,11 +804,16 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByFieldsOrdered - SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id DESC" ] - Expect.equal - (streamText stream) $"[{JsonDocument.four},{JsonDocument.five}]" "Incorrect documents were returned" + let writer = writeStream stream + try + do! conn.writeJsonByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id DESC" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.four},{JsonDocument.five}]" + "Incorrect documents were returned" + finally + writer.Complete() } testTask "succeeds when sorting case-sensitively" { use! db = SqliteDb.BuildDb() @@ -775,13 +821,16 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByFieldsOrdered - SqliteDb.TableName writer All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "Value" ] - Expect.equal - (streamText stream) - $"[{JsonDocument.three},{JsonDocument.one},{JsonDocument.two}]" - "Documents not ordered correctly" + let writer = writeStream stream + try + do! conn.writeJsonByFieldsOrdered + SqliteDb.TableName writer All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "Value" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.three},{JsonDocument.one},{JsonDocument.two}]" + "Documents not ordered correctly" + finally + writer.Complete() } testTask "succeeds when sorting case-insensitively" { use! db = SqliteDb.BuildDb() @@ -789,13 +838,16 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonByFieldsOrdered - SqliteDb.TableName writer All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "i:Value" ] - Expect.equal - (streamText stream) - $"[{JsonDocument.three},{JsonDocument.two},{JsonDocument.one}]" - "Documents not ordered correctly" + let writer = writeStream stream + try + do! conn.writeJsonByFieldsOrdered + SqliteDb.TableName writer All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "i:Value" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.three},{JsonDocument.two},{JsonDocument.one}]" + "Documents not ordered correctly" + finally + writer.Complete() } ] testList "writeJsonFirstByFields" [ @@ -805,9 +857,12 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Value" "another" ] - Expect.equal (streamText stream) JsonDocument.two "The incorrect document was returned" + let writer = writeStream stream + try + do! conn.writeJsonFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Value" "another" ] + Expect.equal (streamText stream) JsonDocument.two "The incorrect document was returned" + finally + writer.Complete() } testTask "succeeds when multiple documents are found" { use! db = SqliteDb.BuildDb() @@ -815,11 +870,14 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] - let json = streamText stream - Expect.notEqual json "{}" "There should have been a document returned" - verifyAny json [ JsonDocument.two; JsonDocument.four ] + let writer = writeStream stream + try + do! conn.writeJsonFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] + let json = streamText stream + Expect.notEqual json "{}" "There should have been a document returned" + verifyAny json [ JsonDocument.two; JsonDocument.four ] + finally + writer.Complete() } testTask "succeeds when a document is not found" { use! db = SqliteDb.BuildDb() @@ -827,9 +885,12 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Value" "absent" ] - verifyNoDoc (streamText stream) + let writer = writeStream stream + try + do! conn.writeJsonFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Value" "absent" ] + verifyNoDoc (streamText stream) + finally + writer.Complete() } ] testList "writeJsonFirstByFieldsOrdered" [ @@ -839,10 +900,13 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonFirstByFieldsOrdered - SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar" ] - Expect.equal (streamText stream) JsonDocument.two "An incorrect document was returned" + let writer = writeStream stream + try + do! conn.writeJsonFirstByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar" ] + Expect.equal (streamText stream) JsonDocument.two "An incorrect document was returned" + finally + writer.Complete() } testTask "succeeds when sorting descending" { use! db = SqliteDb.BuildDb() @@ -850,10 +914,17 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeJsonFirstByFieldsOrdered - SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar DESC" ] - Expect.equal (streamText stream) JsonDocument.four "An incorrect document was returned" + let writer = writeStream stream + try + do! conn.writeJsonFirstByFieldsOrdered + SqliteDb.TableName + writer + Any + [ Field.Equal "Sub.Foo" "green" ] + [ Field.Named "Sub.Bar DESC" ] + Expect.equal (streamText stream) JsonDocument.four "An incorrect document was returned" + finally + writer.Complete() } ] testList "updateById" [ @@ -1113,17 +1184,19 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeCustomJsonArray (Query.find SqliteDb.TableName) [] writer jsonFromData - - let json = streamText stream - Expect.stringStarts json "[" "The JSON array should have started with `[`" - Expect.stringContains json JsonDocument.one "Document ID `one` should have been found" - Expect.stringContains json JsonDocument.two "Document ID `two` should have been found" - Expect.stringContains json JsonDocument.three "Document ID `three` should have been found" - Expect.stringContains json JsonDocument.four "Document ID `four` should have been found" - Expect.stringContains json JsonDocument.five "Document ID `five` should have been found" - Expect.stringEnds json "]" "The JSON array should have ended with `[`" + let writer = writeStream stream + try + do! conn.writeCustomJsonArray (Query.find SqliteDb.TableName) [] writer jsonFromData + let json = streamText stream + Expect.stringStarts json "[" "The JSON array should have started with `[`" + Expect.stringContains json JsonDocument.one "Document ID `one` should have been found" + Expect.stringContains json JsonDocument.two "Document ID `two` should have been found" + Expect.stringContains json JsonDocument.three "Document ID `three` should have been found" + Expect.stringContains json JsonDocument.four "Document ID `four` should have been found" + Expect.stringContains json JsonDocument.five "Document ID `five` should have been found" + Expect.stringEnds json "]" "The JSON array should have ended with `]`" + finally + writer.Complete() } testTask "succeeds when data is not found" { use! db = SqliteDb.BuildDb() @@ -1131,14 +1204,16 @@ let integrationTests = do! loadDocs conn use stream = new MemoryStream() - use writer = writeStream stream - do! conn.writeCustomJsonArray - $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" - [ SqliteParameter("@value", 100) ] - writer - jsonFromData - - Expect.equal (streamText stream) "[]" "There should have been no documents returned" + let writer = writeStream stream + try + do! conn.writeCustomJsonArray + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" + [ SqliteParameter("@value", 100) ] + writer + jsonFromData + Expect.equal (streamText stream) "[]" "There should have been no documents returned" + finally + writer.Complete() } ] testList "customSingle" [ diff --git a/src/Tests/SqliteTests.fs b/src/Tests/SqliteTests.fs index 15e5d1d..cd81442 100644 --- a/src/Tests/SqliteTests.fs +++ b/src/Tests/SqliteTests.fs @@ -1,6 +1,7 @@ module SqliteTests open System.IO +open System.IO.Pipelines open System.Text.Json open BitBadger.Documents open BitBadger.Documents.Sqlite @@ -138,9 +139,7 @@ let loadDocs () = backgroundTask { /// Set up a stream writer for a test let writeStream (stream: Stream) = - let writer = new StreamWriter(stream) - writer.AutoFlush <- true - writer + PipeWriter.Create(stream, StreamPipeWriterOptions(leaveOpen = true)) /// Get the text of the given stream let streamText (stream: Stream) = @@ -217,31 +216,35 @@ let customTests = testList "Custom" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Custom.writeJsonArray (Query.find SqliteDb.TableName) [] writer jsonFromData - - let json = streamText stream - Expect.stringStarts json "[" "The JSON array should have started with `[`" - Expect.stringContains json JsonDocument.one "Document ID `one` should have been found" - Expect.stringContains json JsonDocument.two "Document ID `two` should have been found" - Expect.stringContains json JsonDocument.three "Document ID `three` should have been found" - Expect.stringContains json JsonDocument.four "Document ID `four` should have been found" - Expect.stringContains json JsonDocument.five "Document ID `five` should have been found" - Expect.stringEnds json "]" "The JSON array should have ended with `[`" + let writer = writeStream stream + try + do! Custom.writeJsonArray (Query.find SqliteDb.TableName) [] writer jsonFromData + let json = streamText stream + Expect.stringStarts json "[" "The JSON array should have started with `[`" + Expect.stringContains json JsonDocument.one "Document ID `one` should have been found" + Expect.stringContains json JsonDocument.two "Document ID `two` should have been found" + Expect.stringContains json JsonDocument.three "Document ID `three` should have been found" + Expect.stringContains json JsonDocument.four "Document ID `four` should have been found" + Expect.stringContains json JsonDocument.five "Document ID `five` should have been found" + Expect.stringEnds json "]" "The JSON array should have ended with `[`" + finally + writer.Complete() } testTask "succeeds when data is not found" { use! db = SqliteDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Custom.writeJsonArray - $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" - [ SqliteParameter("@value", 100) ] - writer - jsonFromData - - Expect.equal (streamText stream) "[]" "There should have been no documents returned" + let writer = writeStream stream + try + do! Custom.writeJsonArray + $"SELECT data FROM {SqliteDb.TableName} WHERE data ->> 'NumValue' > @value" + [ SqliteParameter("@value", 100) ] + writer + jsonFromData + Expect.equal (streamText stream) "[]" "There should have been no documents returned" + finally + writer.Complete() } ] testList "single" [ @@ -975,20 +978,26 @@ let jsonTests = testList "Json" [ do! insert SqliteDb.TableName { Foo = "five"; Bar = "six" } use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeAll SqliteDb.TableName writer - let json = streamText stream - verifyBeginEnd json - Expect.stringContains json """{"Foo":"one","Bar":"two"}""" "The first document was not found" - Expect.stringContains json """{"Foo":"three","Bar":"four"}""" "The second document was not found" - Expect.stringContains json """{"Foo":"five","Bar":"six"}""" "The third document was not found" + let writer = writeStream stream + try + do! Json.writeAll SqliteDb.TableName writer + let json = streamText stream + verifyBeginEnd json + Expect.stringContains json """{"Foo":"one","Bar":"two"}""" "The first document was not found" + Expect.stringContains json """{"Foo":"three","Bar":"four"}""" "The second document was not found" + Expect.stringContains json """{"Foo":"five","Bar":"six"}""" "The third document was not found" + finally + writer.Complete() } testTask "succeeds when there is no data" { use! db = SqliteDb.BuildDb() use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeAll SqliteDb.TableName writer - verifyEmpty (streamText stream) + let writer = writeStream stream + try + do! Json.writeAll SqliteDb.TableName writer + verifyEmpty (streamText stream) + finally + writer.Complete() } ] testList "writeAllOrdered" [ @@ -997,36 +1006,45 @@ let jsonTests = testList "Json" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeAllOrdered SqliteDb.TableName writer [ Field.Named "n:NumValue" ] - Expect.equal - (streamText stream) - $"[{JsonDocument.one},{JsonDocument.three},{JsonDocument.two},{JsonDocument.four},{JsonDocument.five}]" - "The documents were not ordered correctly" + let writer = writeStream stream + try + do! Json.writeAllOrdered SqliteDb.TableName writer [ Field.Named "n:NumValue" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.one},{JsonDocument.three},{JsonDocument.two},{JsonDocument.four},{JsonDocument.five}]" + "The documents were not ordered correctly" + finally + writer.Complete() } testTask "succeeds when ordering numerically descending" { use! db = SqliteDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeAllOrdered SqliteDb.TableName writer [ Field.Named "n:NumValue DESC" ] - Expect.equal - (streamText stream) - $"[{JsonDocument.five},{JsonDocument.four},{JsonDocument.two},{JsonDocument.three},{JsonDocument.one}]" - "The documents were not ordered correctly" + let writer = writeStream stream + try + do! Json.writeAllOrdered SqliteDb.TableName writer [ Field.Named "n:NumValue DESC" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.five},{JsonDocument.four},{JsonDocument.two},{JsonDocument.three},{JsonDocument.one}]" + "The documents were not ordered correctly" + finally + writer.Complete() } testTask "succeeds when ordering alphabetically" { use! db = SqliteDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeAllOrdered SqliteDb.TableName writer [ Field.Named "Id DESC" ] - Expect.equal - (streamText stream) - $"[{JsonDocument.two},{JsonDocument.three},{JsonDocument.one},{JsonDocument.four},{JsonDocument.five}]" - "The documents were not ordered correctly" + let writer = writeStream stream + try + do! Json.writeAllOrdered SqliteDb.TableName writer [ Field.Named "Id DESC" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.two},{JsonDocument.three},{JsonDocument.one},{JsonDocument.four},{JsonDocument.five}]" + "The documents were not ordered correctly" + finally + writer.Complete() } ] testList "writeById" [ @@ -1035,18 +1053,24 @@ let jsonTests = testList "Json" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeById SqliteDb.TableName writer "two" - Expect.equal (streamText stream) JsonDocument.two "The incorrect document was returned" + let writer = writeStream stream + try + do! Json.writeById SqliteDb.TableName writer "two" + Expect.equal (streamText stream) JsonDocument.two "The incorrect document was returned" + finally + writer.Complete() } testTask "succeeds when a document is not found" { use! db = SqliteDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeById SqliteDb.TableName writer "three hundred eighty-seven" - verifyNoDoc (streamText stream) + let writer = writeStream stream + try + do! Json.writeById SqliteDb.TableName writer "three hundred eighty-seven" + verifyNoDoc (streamText stream) + finally + writer.Complete() } ] testList "writeByFields" [ @@ -1055,30 +1079,40 @@ let jsonTests = testList "Json" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByFields SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] - let json = streamText stream - verifyBeginEnd json - Expect.stringContains json JsonDocument.four "Document `four` should have been returned" - Expect.stringContains json JsonDocument.five "Document `five` should have been returned" + let writer = writeStream stream + try + do! Json.writeByFields SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] + let json = streamText stream + verifyBeginEnd json + Expect.stringContains json JsonDocument.four "Document `four` should have been returned" + Expect.stringContains json JsonDocument.five "Document `five` should have been returned" + finally + writer.Complete() } testTask "succeeds when documents are found using IN with numeric field" { use! db = SqliteDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByFields SqliteDb.TableName writer All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] - Expect.equal (streamText stream) $"[{JsonDocument.three}]" "There should have been one document returned" + let writer = writeStream stream + try + do! Json.writeByFields SqliteDb.TableName writer All [ Field.In "NumValue" [ 2; 4; 6; 8 ] ] + Expect.equal + (streamText stream) $"[{JsonDocument.three}]" "There should have been one document returned" + finally + writer.Complete() } testTask "succeeds when documents are not found" { use! db = SqliteDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByFields SqliteDb.TableName writer Any [ Field.Greater "NumValue" 100 ] - verifyEmpty (streamText stream) + let writer = writeStream stream + try + do! Json.writeByFields SqliteDb.TableName writer Any [ Field.Greater "NumValue" 100 ] + verifyEmpty (streamText stream) + finally + writer.Complete() } testTask "succeeds for InArray when matching documents exist" { use! db = SqliteDb.BuildDb() @@ -1086,14 +1120,18 @@ let jsonTests = testList "Json" [ for doc in ArrayDocument.TestDocuments do do! insert SqliteDb.TableName doc use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByFields SqliteDb.TableName writer All [ Field.InArray "Values" SqliteDb.TableName [ "c" ] ] - let json = streamText stream - verifyBeginEnd json - Expect.stringContains - json """{"Id":"first","Values":["a","b","c"]}""" "Document `first` should have been returned" - Expect.stringContains - json """{"Id":"second","Values":["c","d","e"]}""" "Document `second` should have been returned" + let writer = writeStream stream + try + do! Json.writeByFields + SqliteDb.TableName writer All [ Field.InArray "Values" SqliteDb.TableName [ "c" ] ] + let json = streamText stream + verifyBeginEnd json + Expect.stringContains + json """{"Id":"first","Values":["a","b","c"]}""" "Document `first` should have been returned" + Expect.stringContains + json """{"Id":"second","Values":["c","d","e"]}""" "Document `second` should have been returned" + finally + writer.Complete() } testTask "succeeds for InArray when no matching documents exist" { use! db = SqliteDb.BuildDb() @@ -1101,9 +1139,13 @@ let jsonTests = testList "Json" [ for doc in ArrayDocument.TestDocuments do do! insert SqliteDb.TableName doc use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByFields SqliteDb.TableName writer All [ Field.InArray "Values" SqliteDb.TableName [ "j" ] ] - verifyEmpty (streamText stream) + let writer = writeStream stream + try + do! Json.writeByFields + SqliteDb.TableName writer All [ Field.InArray "Values" SqliteDb.TableName [ "j" ] ] + verifyEmpty (streamText stream) + finally + writer.Complete() } ] testList "writeByFieldsOrdered" [ @@ -1112,48 +1154,60 @@ let jsonTests = testList "Json" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByFieldsOrdered - SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id" ] - Expect.equal - (streamText stream) $"[{JsonDocument.five},{JsonDocument.four}]" "Incorrect documents were returned" + let writer = writeStream stream + try + do! Json.writeByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id" ] + Expect.equal + (streamText stream) $"[{JsonDocument.five},{JsonDocument.four}]" "Incorrect documents were returned" + finally + writer.Complete() } testTask "succeeds when sorting descending" { use! db = SqliteDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByFieldsOrdered - SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id DESC" ] - Expect.equal - (streamText stream) $"[{JsonDocument.four},{JsonDocument.five}]" "Incorrect documents were returned" + let writer = writeStream stream + try + do! Json.writeByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Greater "NumValue" 15 ] [ Field.Named "Id DESC" ] + Expect.equal + (streamText stream) $"[{JsonDocument.four},{JsonDocument.five}]" "Incorrect documents were returned" + finally + writer.Complete() } testTask "succeeds when sorting case-sensitively" { use! db = SqliteDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByFieldsOrdered - SqliteDb.TableName writer All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "Value" ] - Expect.equal - (streamText stream) - $"[{JsonDocument.three},{JsonDocument.one},{JsonDocument.two}]" - "Documents not ordered correctly" + let writer = writeStream stream + try + do! Json.writeByFieldsOrdered + SqliteDb.TableName writer All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "Value" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.three},{JsonDocument.one},{JsonDocument.two}]" + "Documents not ordered correctly" + finally + writer.Complete() } testTask "succeeds when sorting case-insensitively" { use! db = SqliteDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeByFieldsOrdered - SqliteDb.TableName writer All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "i:Value" ] - Expect.equal - (streamText stream) - $"[{JsonDocument.three},{JsonDocument.two},{JsonDocument.one}]" - "Documents not ordered correctly" + let writer = writeStream stream + try + do! Json.writeByFieldsOrdered + SqliteDb.TableName writer All [ Field.LessOrEqual "NumValue" 10 ] [ Field.Named "i:Value" ] + Expect.equal + (streamText stream) + $"[{JsonDocument.three},{JsonDocument.two},{JsonDocument.one}]" + "Documents not ordered correctly" + finally + writer.Complete() } ] testList "writeFirstByFields" [ @@ -1162,29 +1216,38 @@ let jsonTests = testList "Json" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Value" "another" ] - Expect.equal (streamText stream) JsonDocument.two "The incorrect document was returned" + let writer = writeStream stream + try + do! Json.writeFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Value" "another" ] + Expect.equal (streamText stream) JsonDocument.two "The incorrect document was returned" + finally + writer.Complete() } testTask "succeeds when multiple documents are found" { use! db = SqliteDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] - let json = streamText stream - Expect.notEqual json "{}" "There should have been a document returned" - verifyAny json [ JsonDocument.two; JsonDocument.four ] + let writer = writeStream stream + try + do! Json.writeFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] + let json = streamText stream + Expect.notEqual json "{}" "There should have been a document returned" + verifyAny json [ JsonDocument.two; JsonDocument.four ] + finally + writer.Complete() } testTask "succeeds when a document is not found" { use! db = SqliteDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Value" "absent" ] - verifyNoDoc (streamText stream) + let writer = writeStream stream + try + do! Json.writeFirstByFields SqliteDb.TableName writer Any [ Field.Equal "Value" "absent" ] + verifyNoDoc (streamText stream) + finally + writer.Complete() } ] testList "writeFirstByFieldsOrdered" [ @@ -1193,20 +1256,26 @@ let jsonTests = testList "Json" [ do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeFirstByFieldsOrdered - SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar" ] - Expect.equal (streamText stream) JsonDocument.two "An incorrect document was returned" + let writer = writeStream stream + try + do! Json.writeFirstByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar" ] + Expect.equal (streamText stream) JsonDocument.two "An incorrect document was returned" + finally + writer.Complete() } testTask "succeeds when sorting descending" { use! db = SqliteDb.BuildDb() do! loadDocs () use stream = new MemoryStream() - use writer = writeStream stream - do! Json.writeFirstByFieldsOrdered - SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar DESC" ] - Expect.equal (streamText stream) JsonDocument.four "An incorrect document was returned" + let writer = writeStream stream + try + do! Json.writeFirstByFieldsOrdered + SqliteDb.TableName writer Any [ Field.Equal "Sub.Foo" "green" ] [ Field.Named "Sub.Bar DESC" ] + Expect.equal (streamText stream) JsonDocument.four "An incorrect document was returned" + finally + writer.Complete() } ] ] -- 2.47.2 From 79ace3ea63883406dd4851774db91ca003fe0653 Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Fri, 11 Apr 2025 11:41:10 -0400 Subject: [PATCH 18/22] Finish PipeWriter implementation --- src/Common/Library.fs | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/Common/Library.fs b/src/Common/Library.fs index fe55d12..78c517a 100644 --- a/src/Common/Library.fs +++ b/src/Common/Library.fs @@ -634,6 +634,8 @@ module Query = |> function it -> $" ORDER BY {it}" +#nowarn "FS3511" // "let rec" is not statically compilable + open System.IO.Pipelines /// Functions that manipulate PipeWriters @@ -648,8 +650,7 @@ module PipeWriter = let writeString (writer: PipeWriter) (text: string) = backgroundTask { try let! writeResult = writer.WriteAsync(Encoding.UTF8.GetBytes text) - let! flushResult = writer.FlushAsync() - return not (writeResult.IsCompleted || flushResult.IsCompleted) + return not writeResult.IsCompleted with :? System.ObjectDisposedException -> return false } @@ -659,19 +660,20 @@ module PipeWriter = /// true if the pipe is still open, false if not [] let writeStrings writer items = backgroundTask { - let rec writeNext docs idx = backgroundTask { - match items |> Seq.tryItem idx with + let theItems = Seq.cache items + let rec writeNext idx = backgroundTask { + match theItems |> Seq.tryItem idx with | Some item -> if idx > 0 then let! _ = writeString writer "," () match! writeString writer item with - | true -> return! writeNext docs (idx + 1) + | true -> return! writeNext (idx + 1) | false -> return false | None -> return true } let! _ = writeString writer "[" - let! isCleanFinish = writeNext items 0 + let! isCleanFinish = writeNext 0 if isCleanFinish then let! _ = writeString writer "]" () -- 2.47.2 From 9560e27913c1ac0f4188a71cfa236ad96b4d9eed Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Fri, 11 Apr 2025 17:22:00 -0400 Subject: [PATCH 19/22] More WIP on docfx --- docs/advanced/custom-serialization.md | 38 +++ docs/advanced/index.md | 16 ++ docs/advanced/related.md | 376 ++++++++++++++++++++++++++ docs/advanced/transactions.md | 96 +++++++ docs/basic-usage.md | 21 +- docs/getting-started.md | 6 +- docs/toc.yml | 12 +- index.md | 6 +- 8 files changed, 554 insertions(+), 17 deletions(-) create mode 100644 docs/advanced/custom-serialization.md create mode 100644 docs/advanced/index.md create mode 100644 docs/advanced/related.md create mode 100644 docs/advanced/transactions.md diff --git a/docs/advanced/custom-serialization.md b/docs/advanced/custom-serialization.md new file mode 100644 index 0000000..2800a7e --- /dev/null +++ b/docs/advanced/custom-serialization.md @@ -0,0 +1,38 @@ +# Custom Serialization + +_Documentation pages for `BitBadger.Npgsql.Documents` redirect here. This library replaced it as of v3; see project home if this applies to you._ + +JSON documents are sent to and received from both PostgreSQL and SQLite as `string`s; the translation to and from your domain objects (commonly called POCOs) is handled via .NET. By default, the serializer used by the library is based on `System.Text.Json` with [converters for common F# types][fs]. + +## Implementing a Custom Serializer + +`IDocumentSerializer` (found in the `BitBadger.Documents` namespace) specifies two methods. `Serialize` takes a `T` and returns a `string`; `Deserialize` takes a `string` and returns an instance of `T`. (These show as `'T` in F#.) While implementing those two methods is required, the custom implementation can use whatever library you desire, and contain converters for custom types. + +Once this serializer is implemented and constructed, provide it to the library: + +```csharp +// C# + var serializer = /* constructed serializer */; + Configuration.UseSerializer(serializer); +``` + +```fsharp +// F# + let serializer = (* constructed serializer *) + Configuration.useSerializer serializer +``` + +The biggest benefit to registering a serializer (apart from control) is that all JSON operations will use the same serializer. This is most important for PostgreSQL's JSON containment queries; the object you pass as the criteria will be translated properly before it is compared. However, "unstructured" data does not mean "inconsistently structured" data; if your application uses custom serialization, extending this to your documents ensures that the structure is internally consistent. + +## Uses for Custom Serialization + +- If you use a custom serializer (or serializer options) in your application, a custom serializer implementation can utilize these existing configuration options. +- If you prefer [`Newtonsoft.Json`][nj], you can wrap `JsonConvert` or `JsonSerializer` calls in a custom converter. F# users may consider incorporating Microsoft's [`FSharpLu.Json`][fj] converter. +- If your project uses [`NodaTime`][], your custom serializer could include its converters for `System.Text.Json` or `Newtonsoft.Json`. +- If you use DDD to define custom types, you can implement converters to translate them to/from your preferred JSON representation. + + +[fs]: https://github.com/Tarmil/FSharp.SystemTextJson "FSharp.SystemTextJson • GitHub" +[nj]: https://www.newtonsoft.com/json "Json.NET" +[fj]: https://github.com/microsoft/fsharplu/blob/main/FSharpLu.Json.md "FSharpLu.Json • GitHub" +[`NodaTime`]: https://nodatime.org/ "NodaTime" diff --git a/docs/advanced/index.md b/docs/advanced/index.md new file mode 100644 index 0000000..1e9d905 --- /dev/null +++ b/docs/advanced/index.md @@ -0,0 +1,16 @@ +# Advanced Usage + +_Documentation pages for `BitBadger.Npgsql.Documents` redirect here. This library replaced it as of v3; see project home if this applies to you._ + +While the functions provided by the library cover lots of use cases, there are other times when applications need something else. Below are some of those. + +- [Customizing Serialization][ser] +- [Related Documents and Custom Queries][rel] +- [Transactions][txn] +- [Referential Integrity][ref] (PostgreSQL only) + + +[ser]: ./custom-serialization.md "Advanced Usage: Custom Serialization • BitBadger.Documents" +[rel]: ./related.md "Advanced Usage: Related Documents • BitBadger.Documents" +[txn]: ./transactions.md "Advanced Usage: Transactions • BitBadger.Documents" +[ref]: ./integrity.html "Advanced Usage: Referential Integrity • BitBadger.Documents" diff --git a/docs/advanced/related.md b/docs/advanced/related.md new file mode 100644 index 0000000..539f6eb --- /dev/null +++ b/docs/advanced/related.md @@ -0,0 +1,376 @@ +# Related Documents and Custom Queries + +_Documentation pages for `BitBadger.Npgsql.Documents` redirect here. This library replaced it as of v3; see project home if this applies to you._ + +_NOTE: This page is longer than the ideal documentation page. Understanding how to assemble custom queries requires understanding how data is stored, and the list of ways to retrieve information can be... a lot. The hope is that one reading will serve as education, and the lists of options will serve as reference lists that will assist you in crafting your queries._ + +## Overview + +Document stores generally have fewer relationships than traditional relational databases, particularly those that arise when data is structured in [Third Normal Form][tnf]{target=_blank rel=noopener}; related collections are stored in the document, and ever-increasing surrogate keys (_a la_ sequences and such) do not play well with distributed data. Unless all data is stored in a single document, though, there will still be a natural relation between documents. + +Thinking back to our earlier examples, we did not store the collection of rooms in each hotel's document; each room is its own document and contains the ID of the hotel as one of its properties. + +```csharp +// C# +public class Hotel +{ + public string Id { get; set; } = ""; + // ... more properties +} + +public class Room +{ + public string Id { get; set; } = ""; + public string HotelId { get; set; } = ""; + // ... more properties +} +``` + +```fsharp +// F# +[] +type Hotel = + { Id: string + // ... more fields + } + +[] +type Room = + { Id: string + HotelId: string + // ... more fields + } +``` + +> The `CLIMutable` attribute is required on record types that are instantiated by the CLR; this attribute generates a zero-parameter constructor. + +## Document Table SQL in Depth + +The library creates tables with a `data` column of type `JSONB` (PostgreSQL) or `TEXT` (SQLite), with a unique index on the configured ID name that serves as the primary key (for these examples, we'll assume it's the default `Id`). The indexes created by the library all apply to the `data` column. The by-ID query for a hotel would be... + +```sql +SELECT data FROM hotel WHERE data->>'Id' = @id +``` + +...with the ID passed as the `@id` parameter. + +> _Using a "building block" method/function `Query.WhereById` will create the `data->>'Id' = @id` criteria using [the configured ID name][id]._ + +Finding all the rooms for a hotel, using our indexes we created earlier, could use a field comparison query... + +```sql +SELECT data FROM room WHERE data->>'HotelId' = @field +``` + +...with `@field` being "abc123"; PostgreSQL could also use a JSON containment query... + +```sql +SELECT data FROM room WHERE data @> @criteria +``` + +...with something like `new { HotelId = "abc123" }` passed as the matching document in the `@criteria` parameter. + +So far, so good; but, if we're looking up a room, we do not want to have to make 2 queries just to also be able to display the hotel's name. The `WHERE` clause on the first query above uses the expression `data->>'Id'`; this extracts a field from a JSON column as `TEXT` in PostgreSQL (or "best guess" in SQLite, but usually text). Since this is the value our unique index indexes, and we are using a relational database, we can write an efficient JOIN between these two tables. + +```sql +SELECT r.data, h.data AS hotel_data + FROM room r + INNER JOIN hotel h ON h.data->>'Id' = r.data->>'HotelId' + WHERE r.data->>'Id' = @id +``` + +_(This syntax would work without the unique index; for PostgreSQL, it would default to using the GIN index (`Full` or `Optimized`), if it exists, but it wouldn't be quite as efficient as a zero-or-one unique index lookup. For SQLite, this would result in a full table scan. Both PostgreSQL and SQLite also support a `->` operator, which extracts the field as a JSON value instead of its text.)_ + +## Using Building Blocks + +Most of the data access methods in both libraries are built up from query fragments and reusable functions; these are exposed for use in building custom queries. + +### Queries + +For every method or function described in [Basic Usage][], the `Query` static class/module contains the building blocks needed to construct query for that operation. Both the parent and implementation namespaces have a `Query` module; in C#, you'll need to qualify the implementation module namespace. + +In `BitBadger.Documents.Query`, you'll find: +- **StatementWhere** takes a SQL statement and a `WHERE` clause and puts them together on either side of the text ` WHERE ` +- **Definition** contains methods/functions to ensure tables, their keys, and field indexes exist. +- **Insert**, **Save**, **Count**, **Find**, **Update**, and **Delete** are the prefixes of the queries for those actions; they all take a table name and return this query (with no `WHERE` clause) +- **Exists** also requires a `WHERE` clause, due to how the query is constructed + because it is inserted as a subquery + +Within each implementation's `Query` module: +- **WhereByFields** takes a `FieldMatch` case and a set of fields. `Field` has constructor functions for each comparison it supports; these functions generally take a field name and a value, though the latter two do not require a value. + - **Equal** uses `=` to create an equality comparison + - **Greater** uses `>` to create a greater-than comparison + - **GreaterOrEqual** uses `>=` to create a greater-than-or-equal-to comparison + - **Less** uses `<` to create a less-than comparison + - **LessOrEqual** uses `<=` to create a less-than-or-equal-to comparison + - **NotEqual** uses `<>` to create a not-equal comparison + - **Between** uses `BETWEEN` to create a range comparison + - **In** uses `IN` to create an equality comparison within a set of given values + - **InArray** uses `?|` in PostgreSQL, and a combination of `EXISTS` / `json_each` / `IN` in SQLite, to create an equality comparison within a given set of values against an array in a JSON document + - **Exists** uses `IS NOT NULL` to create an existence comparison + - **NotExists** uses `IS NULL` to create a non-existence comparison; fields are considered null if they are either not part of the document, or if they are part of the document but explicitly set to `null` +- **WhereById** takes a parameter name and generates a field `Equal` comparison against the configured ID field. +- **Patch** and **RemoveFields** use each implementation's unique syntax for partial updates and field removals. +- **ByFields**, **ByContains** (PostgreSQL), and **ByJsonPath** (PostgreSQL) are functions that take a statement and the criteria, and construct a query to fit that criteria. For `ByFields`, each field parameter will use its specified name if provided (an incrementing `field[n]` if not). `ByContains` uses `@criteria` as its parameter name, which can be any object. `ByJsonPath` uses `@path`, which should be a `string`. + +That's a lot of reading! Some examples a bit below will help this make sense. + +### Parameters + +Traditional ADO.NET data access involves creating a connection object, then adding parameters to that object. This library follows a more declarative style, where parameters are passed via `IEnumerable` collections. To assist with creating these collections, each implementation has some helper functions. For C#, these calls will need to be prefixed with `Parameters`; for F#, this module is auto-opened. This is one area where names differ in other than just casing, so both will be listed. + +- **Parameters.Id** / **idParam** generate an `@id` parameter with the numeric, `string`, or `ToString()`ed value of the ID passed. +- **Parameters.Json** / **jsonParam** generate a user-provided-named JSON-formatted parameter for the value passed (this can be used for PostgreSQL's JSON containment queries as well) +- **Parameters.AddFields** / **addFieldParams** append field parameters to the given parameter list +- **Parameters.FieldNames** / **fieldNameParams** create parameters for the list of field names to be removed; for PostgreSQL, this returns a single parameter, while SQLite returns a list of parameters +- **Parameters.None** / **noParams** is an empty set of parameters, and can be cleaner and convey intent better than something like `new[] { }` _(For C# 12 or later, the collection expression `[]` is much terser.)_ + +If you need a parameter beyond these, both `NpgsqlParameter` and `SqliteParameter` have a name-and-value constructor; that isn't many more keystrokes. + +### Results + +The `Results` module is implementation specific. Both libraries provide `Results.FromData`, which deserializes a `data` column into the requested type; and `FromDocument`, which does the same thing, but allows the column to be named as well. We'll see how we can use these in further examples. As with parameters, C# users need to qualify the class name, but the module is auto-opened for F#. + +## Putting It All Together + +The **Custom** static class/module has four methods/functions: + +- **List** requires a query, parameters, and a mapping function, and returns a list of documents. +- **Single** requires a query, parameters, and a mapping function, and returns one or no documents (C# `TDoc?`, F# `'TDoc option`) +- **Scalar** requires a query, parameters, and a mapping function, and returns a scalar value (non-nullable; used for counts, existence, etc.) +- **NonQuery** requires a query and parameters and has no return value + +> _Within each library, every other call is written in terms of `Custom.List`, `Custom.Scalar`, or `Custom.NonQuery`; your custom queries will use the same path the provided ones do!_ + +Let's jump in with an example. When we query for a room, let's say that we also want to retrieve its hotel information as well. We saw the query above, but here is how we can implement it using a custom query. + +```csharp +// C#, All + // return type is Tuple? + var data = await Custom.Single( + $"SELECT r.data, h.data AS hotel_data + FROM room r + INNER JOIN hotel h ON h.data->>'{Configuration.IdField()}' = r.data->>'HotelId' + WHERE r.{Query.WhereById("@id")}", + new[] { Parameters.Id("my-room-key") }, + // rdr's type will be RowReader for PostgreSQL, SqliteDataReader for SQLite + rdr => Tuple.Create(Results.FromData(rdr), Results.FromDocument("hotel_data", rdr)); + if (data is not null) + { + var (room, hotel) = data; + // do stuff with the room and hotel data + } +``` + +```fsharp +// F#, All + // return type is (Room * Hotel) option + let! data = + Custom.single + $"""SELECT r.data, h.data AS hotel_data + FROM room r + INNER JOIN hotel h ON h.data->>'{Configuration.idField ()}' = r.data->>'HotelId' + WHERE r.{Query.whereById "@id"}""" + [ idParam "my-room-key" ] + // rdr's type will be RowReader for PostgreSQL, SqliteDataReader for SQLite + fun rdr -> (fromData rdr), (fromDocument "hotel_data" rdr) + match data with + | Some (Room room, Hotel hotel) -> + // do stuff with room and hotel + | None -> () +``` + +These queries use `Configuration.IdField` and `WhereById` to use the configured ID field. Creating custom queries using these building blocks allows us to utilize the configured value without hard-coding it throughout our custom queries. If the configuration changes, these queries will pick up the new field name seamlessly. + +While this example retrieves the entire document, this is not required. If we only care about the name of the associated hotel, we could amend the query to retrieve only that information. + +```csharp +// C#, All + // return type is Tuple? + var data = await Custom.Single( + $"SELECT r.data, h.data ->> 'Name' AS hotel_name + FROM room r + INNER JOIN hotel h ON h.data->>'{Configuration.IdField()}' = r.data->>'HotelId' + WHERE r.{Query.WhereById("@id")}", + new[] { Parameters.Id("my-room-key") }, + // PostgreSQL + row => Tuple.Create(Results.FromData(row), row.string("hotel_name"))); + // SQLite; could use rdr.GetString(rdr.GetOrdinal("hotel_name")) below as well + // rdr => Tuple.Create(Results.FromData(rdr), rdr.GetString(1))); + + if (data is not null) + { + var (room, hotelName) = data; + // do stuff with the room and hotel name + } +``` + +```fsharp +// F#, All + // return type is (Room * string) option + let! data = + Custom.single + $"""SELECT r.data, h.data->>'Name' AS hotel_name + FROM room r + INNER JOIN hotel h ON h.data->>'{Configuration.idField ()}' = r.data->>'HotelId' + WHERE r.{Query.whereById "@id"}""" + [ idParam "my-room-key" ] + // PostgreSQL + fun row -> (fromData row), row.string "hotel_name" + // SQLite; could use rdr.GetString(rdr.GetOrdinal("hotel_name")) below as well + // fun rdr -> (fromData rdr), rdr.GetString(1) + match data with + | Some (Room room, string hotelName) -> + // do stuff with room and hotel name + | None -> () +``` + +These queries are amazingly efficient, using 2 unique index lookups to return this data. Even though we do not have a foreign key between these two tables, simply being in a relational database allows us to retrieve this related data. + +Revisiting our "take these rooms out of service" SQLite query from the Basic Usage page, here's how that could look using building blocks available since version 4 (PostgreSQL will accept this query syntax as well, though the parameter types would be different): + +```csharp +// C#, SQLite + var fields = [Field.GreaterOrEqual("RoomNumber", 221), Field.LessOrEqual("RoomNumber", 240)]; + await Custom.NonQuery( + Sqlite.Query.ByFields(Sqlite.Query.Patch("room"), FieldMatch.All, fields, + new { InService = false }), + Parameters.AddFields(fields, [])); +``` + +```fsharp +// F#, SQLite + let fields = [ Field.GreaterOrEqual "RoomNumber" 221; Field.LessOrEqual "RoomNumber" 240 ] + do! Custom.nonQuery + (Query.byFields (Query.patch "room") All fields {| InService = false |}) + (addFieldParams fields [])) +``` + +This uses two field comparisons to incorporate the room number range instead of a `BETWEEN` clause; we would definitely want to have that field indexed if this was going to be a regular query or our data was going to grow beyond a trivial size. + +_You may be thinking "wait - what's the difference between that an the regular `Patch` call?" And you'd be right; that is exactly what `Patch.ByFields` does. `Between` is also a better comparison for this, and either `FieldMatch` type will work, as we're only passing one field. No building blocks required!_ + +```csharp +// C#, All + await Patch.ByFields("room", FieldMatch.Any, [Field.Between("RoomNumber", 221, 240)], + new { InService = false }); +``` + +```fsharp +// F#, All + do! Patch.byFields "room" Any [ Field.Between "RoomNumber 221 240 ] {| InService = false |} +``` + +## Going Even Further + +### Updating Data in Place + +One drawback to document databases is the inability to update values in place; however, with a bit of creativity, we can do a lot more than we initially think. For a single field, SQLite has a `json_set` function that takes an existing JSON field, a field name, and a value to which it should be set. This allows us to do single-field updates in the database. If we wanted to raise our rates 10% for every room, we could use this query: + +```sql +-- SQLite +UPDATE room SET data = json_set(data, 'Rate', data ->> 'Rate' * 1.1) +``` + +If we get any more complex, though, Common Table Expressions (CTEs) can help us. Perhaps we decided that we only wanted to raise the rates for hotels in New York, Chicago, and Los Angeles, and we wanted to exclude any brand with the word "Value" in its name. A CTE lets us select the source data we need to craft the update, then use that in the `UPDATE`'s clauses. + +```sql +-- SQLite +WITH to_update AS + (SELECT r.data->>'Id' AS room_id, r.data->>'Rate' AS current_rate, r.data AS room_data + FROM room r + INNER JOIN hotel h ON h.data->>'Id' = r.data->>'HotelId' + WHERE h.data->>'City' IN ('New York', 'Chicago', 'Los Angeles') + AND LOWER(h.data->>'Name') NOT LIKE '%value%') +UPDATE room + SET data = json_set(to_update.room_data, 'Rate', to_update.current_rate * 1.1) + WHERE room->>'Id' = to_update.room_id +``` + +Both PostgreSQL and SQLite provide JSON patching, where multiple fields (or entire structures) can be changed at once. Let's revisit our rate increase; if we are making the rate more than $500, we'll apply a status of "Premium" to the room. If it is less than that, it should keep its same value. + +First up, PostgreSQL: +```sql +-- PostgreSQL +WITH to_update AS + (SELECT r.data->>'Id' AS room_id, (r.data->>'Rate')::decimal AS rate, r.data->>'Status' AS status + FROM room r + INNER JOIN hotel h ON h.data->>'Id' = r.data->>'HotelId' + WHERE h.data->>'City' IN ('New York', 'Chicago', 'Los Angeles') + AND LOWER(h.data ->> 'Name') NOT LIKE '%value%') +UPDATE room + SET data = data || + ('{"Rate":' || to_update.rate * 1.1 || '","Status":"' + || CASE WHEN to_update.rate * 1.1 > 500 THEN 'Premium' ELSE to_update.status END + || '"}') + WHERE room->>'Id' = to_update.room_id +``` + +In SQLite: +```sql +-- SQLite +WITH to_update AS + (SELECT r.data->>'Id' AS room_id, r.data->>'Rate' AS rate, r.data->>'Status' AS status + FROM room r + INNER JOIN hotel h ON h.data->>'Id' = r.data->>'HotelId' + WHERE h.data->>'City' IN ('New York', 'Chicago', 'Los Angeles') + AND LOWER(h.data->>'Name') NOT LIKE '%value%') +UPDATE room + SET data = json_patch(data, json( + '{"Rate":' || to_update.rate * 1.1 || '","Status":"' + || CASE WHEN to_update.rate * 1.1 > 500 THEN 'Premium' ELSE to_update.status END + || '"}')) + WHERE room->>'Id' = to_update.room_id +``` + +For PostgreSQL, `->>` always returns text, so we need to cast the rate to a number. In either case, we do not want to use this technique for user-provided data; however, in place, it allowed us to complete all of our scenarios without having to load the documents into our application and manipulate them there. + +Updates in place may not need parameters (though it would be easy to foresee a "rate adjustment" feature where the 1.1 adjustment was not hard-coded); in fact, none of the samples in this section used the document libraries at all. These queries can be executed by `Custom.NonQuery`, though, providing parameters as required. + +### Using This Library for Non-Document Queries + +The `Custom` methods/functions can be used with non-document tables as well. This may be a convenient and consistent way to access your data, while delegating connection management to the library and its configured data source. + +Let's walk through a short example using C# and PostgreSQL: + +```csharp +// C#, PostgreSQL + using Npgsql.FSharp; // Needed for RowReader and Sql types + using static CommonExtensionsAndTypesForNpgsqlFSharp; // Needed for Sql functions + + // Stores metadata for a given user + public class MetaData + { + public string Id { get; set; } = ""; + public string UserId { get; set; } = ""; + public string Key { get; set; } = ""; + public string Value { get; set; } = ""; + } + + // Static class to hold mapping functions + public static class Map + { + // These parameters are the column names from the underlying table + public MetaData ToMetaData(RowReader row) => + new MetaData + { + Id = row.string("id"), + UserId = row.string("user_id"), + Key = row.string("key"), + Value = row.string("value") + }; + } + + // somewhere in a class, retrieving data + public Task> MetaDataForUser(string userId) => + Document.Custom.List("SELECT * FROM user_metadata WHERE user_id = @userId", + new { Tuple.Create("@userId", Sql.string(userId)) }, + Map.ToMetaData); +``` + +For F#, the `using static` above is not needed; that module is auto-opened when `Npgsql.FSharp` is opened. For SQLite in either language, the mapping function uses a `SqliteDataReader` object, which implements the standard ADO.NET `DataReader` functions of `Get[Type](idx)` (and `GetOrdinal(name)` for the column index). + + +[tnf]: https://en.wikipedia.org/wiki/Third_normal_form "Third Normal Form • Wikipedia" +[id]: ../getting-started.md#field-name "Getting Started (ID Fields) • BitBadger.Documents" +[Basic Usage]: ../basic-usage.md "Basic Usage • BitBadger.Documents" diff --git a/docs/advanced/transactions.md b/docs/advanced/transactions.md new file mode 100644 index 0000000..039ebab --- /dev/null +++ b/docs/advanced/transactions.md @@ -0,0 +1,96 @@ +# Transactions + +_Documentation pages for `BitBadger.Npgsql.Documents` redirect here. This library replaced it as of v3; see project home if this applies to you._ + +On occasion, there may be a need to perform multiple updates in a single database transaction, where either all updates succeed, or none do. + +## Controlling Database Transactions + +The `Configuration` static class/module of each library [provides a way to obtain a connection][conn]. Whatever strategy your application uses to obtain the connection, the connection object is how ADO.NET implements transactions. + +```csharp +// C#, All + // "conn" is assumed to be either NpgsqlConnection or SqliteConnection + await using var txn = await conn.BeginTransactionAsync(); + try + { + // do stuff + await txn.CommitAsync(); + } + catch (Exception ex) + { + await txn.RollbackAsync(); + // more error handling + } +``` + +```fsharp +// F#, All + // "conn" is assumed to be either NpgsqlConnection or SqliteConnection + use! txn = conn.BeginTransactionAsync () + try + // do stuff + do! txn.CommitAsync () + with ex -> + do! txt.RollbackAsync () + // more error handling +``` + +## Executing Queries on the Connection + +This precise scenario was the reason that all methods and functions are implemented on the connection object; all extensions execute the commands in the context of the connection. Imagine an application where a user signs in. We may want to set an attribute on the user record that says that now is the last time they signed in; and we may also want to reset a failed logon counter, as they have successfully signed in. This would look like: + +```csharp +// C#, All ("conn" is our connection object) + await using var txn = await conn.BeginTransactionAsync(); + try + { + await conn.PatchById("user_table", userId, new { LastSeen = DateTime.Now }); + await conn.PatchById("security", userId, new { FailedLogOnCount = 0 }); + await txn.CommitAsync(); + } + catch (Exception ex) + { + await txn.RollbackAsync(); + // more error handling + } +``` + +```fsharp +// F#, All ("conn" is our connection object) + use! txn = conn.BeginTransactionAsync() + try + do! conn.patchById "user_table" userId {| LastSeen = DateTime.Now |} + do! conn.patchById "security" userId {| FailedLogOnCount = 0 |} + do! txn.CommitAsync() + with ex -> + do! txn.RollbackAsync() + // more error handling +``` + +### A Functional Alternative + +The PostgreSQL library has a static class/module called `WithProps`; the SQLite library has a static class/module called `WithConn`. Each of these accept the `SqlProps` or `SqliteConnection` parameter as the last parameter of the query. For SQLite, we need nothing else to pass the connection to these methods/functions; for PostgreSQL, though, we'll need to create a `SqlProps` object based off the connection. + +```csharp +// C#, PostgreSQL + using Npgsql.FSharp; + // ... + var props = Sql.existingConnection(conn); + // ... + await WithProps.Patch.ById("user_table", userId, new { LastSeen = DateTime.Now }, props); +``` + +```fsharp +// F#, PostgreSQL + open Npgsql.FSharp + // ... + let props = Sql.existingConnection conn + // ... + do! WithProps.Patch.ById "user_table" userId {| LastSeen = DateTime.Now |} props +``` + +If we do not want to qualify with `WithProps` or `WithConn`, C# users can add `using static [WithProps|WithConn];` to bring these functions into scope; F# users can add `open BitBadger.Documents.[Postgres|Sqlite].[WithProps|WithConn]` to bring them into scope. However, in C#, this will affect the entire file, and in F#, it will affect the file from that point through the end of the file. Unless you want to go all-in with the connection-last functions, it is probably better to qualify the occasional call. + + +[conn]: ../getting-started.md#the-connection "Getting Started (The Connection) • BitBadger.Documents" diff --git a/docs/basic-usage.md b/docs/basic-usage.md index 55bc9ca..1ca46a6 100644 --- a/docs/basic-usage.md +++ b/docs/basic-usage.md @@ -114,7 +114,7 @@ Functions to find documents start with `Find.`. There are variants to find all d All `Find` methods and functions have two corresponding `Json` functions. * The first set return the expected document(s) as a `string`, and will always return valid JSON. Single-document queries with nothing found will return `{}`, while zero-to-many queries will return `[]` if no documents match the given criteria. -* The second set are prefixed with `Write`, and take a `StreamWriter` immediately after the table name parameter. These functions write results to the given stream instead of returning them, which can be useful for JSON API scenarios. +* The second set are prefixed with `Write`, and take a `PipeWriter` immediately after the table name parameter. These functions write results to the given pipeline as they are retrieved from the database, instead of accumulating them all and returning a `string`. This can be useful for JSON API scenarios; ASP.NET Core's `HttpResponse.BodyWriter` property is a `PipeWriter` (and pipelines are [preferred over streams][pipes]). ## Deleting Documents @@ -132,17 +132,18 @@ Functions to check for existence start with `Exists.`. Documents may be checked The table below shows which commands are available for each access method. (X = supported for both, P = PostgreSQL only) -Operation | `All` | `ById` | `ByFields` | `ByContains` | `ByJsonPath` | `FirstByFields` | `FirstByContains` | `FirstByJsonPath` -----------|:-----:|:------:|:---------:|:------------:|:------------:|:--------------:|:-----------------:|:----------------:| -`Count` | X | | X | P | P | -`Exists` | | X | X | P | P | -`Find` | X | X | X | P | P | X | P | P | -`Patch` | | X | X | P | P | -`RemoveFields` | | X | X | P | P | -`Delete` | | X | X | P | P | +| Operation | `All` | `ById` | `ByFields` | `ByContains` | `ByJsonPath` | `FirstByFields` | `FirstByContains` | `FirstByJsonPath` | +|-----------------|:-----:|:------:|:----------:|:------------:|:------------:|:---------------:|:-----------------:|:-----------------:| +| `Count` | X | | X | P | P | | | | +| `Exists` | | X | X | P | P | | | | +| `Find` / `Json` | X | X | X | P | P | X | P | P | +| `Patch` | | X | X | P | P | | | | +| `RemoveFields` | | X | X | P | P | | | | +| `Delete` | | X | X | P | P | | | | `Insert`, `Save`, and `Update.*` operate on single documents. [best-guess on types]: https://sqlite.org/datatype3.html "Datatypes in SQLite • SQLite" [JSON Path]: https://www.postgresql.org/docs/15/functions-json.html#FUNCTIONS-SQLJSON-PATH "JSON Functions and Operators • PostgreSQL Documentation" -[Advanced Usage]: /open-source/relational-documents/dotnet/advanced-usage.html "Advanced Usage • BitBadger.Documents • Bit Badger Solutions" +[Advanced Usage]: ./advanced/index.md "Advanced Usage • BitBadger.Documents • Bit Badger Solutions" +[pipes]: https://learn.microsoft.com/en-us/aspnet/core/fundamentals/middleware/request-response?view=aspnetcore-9.0 "Request and Response Operations • Microsoft Learn" diff --git a/docs/getting-started.md b/docs/getting-started.md index b8acfa9..900cca0 100644 --- a/docs/getting-started.md +++ b/docs/getting-started.md @@ -182,6 +182,6 @@ Now that we have tables, let's [use them][]! [`Npgsql` docs]: https://www.npgsql.org/doc/connection-string-parameters "Connection String Parameter • Npgsql" [`Microsoft.Data.Sqlite` docs]: https://learn.microsoft.com/en-us/dotnet/standard/data/sqlite/connection-strings "Connection Strings • Microsoft.Data.Sqlite • Microsoft Learn" -[ser]: ./advanced/custom-serialization.html "Advanced Usage: Custom Serialization • BitBadger.Documents" -[json-index]: https://www.postgresql.org/docs/current/datatype-json.html#JSON-INDEXING -[use them]: ./basic-usage.html "Basic Usage • BitBadger.Documents" +[ser]: ./advanced/custom-serialization.md "Advanced Usage: Custom Serialization • BitBadger.Documents" +[json-index]: https://www.postgresql.org/docs/current/datatype-json.html#JSON-INDEXING "Indexing JSON Fields • PostgreSQL" +[use them]: ./basic-usage.md "Basic Usage • BitBadger.Documents" diff --git a/docs/toc.yml b/docs/toc.yml index 016a104..f6ca704 100644 --- a/docs/toc.yml +++ b/docs/toc.yml @@ -1,4 +1,14 @@ - name: Getting Started href: getting-started.md - name: Basic Usage - href: basic-usage.md \ No newline at end of file + href: basic-usage.md +- name: Advanced Usage + href: advanced/index.md + items: + - name: Custom Serialization + href: advanced/custom-serialization.md + - name: Related Documents and Custom Queries + href: advanced/related.md + - name: Transactions + href: advanced/transactions.md + \ No newline at end of file diff --git a/index.md b/index.md index fbe5dcd..af1d45f 100644 --- a/index.md +++ b/index.md @@ -80,9 +80,9 @@ Issues can be filed on the project's GitHub repository. [pkg-link-pgsql]: https://www.nuget.org/packages/BitBadger.Documents.Postgres/ "BitBadger.Documents.Postgres • NuGet" [pkg-shield-sqlite]: https://img.shields.io/nuget/vpre/BitBadger.Documents.Sqlite [pkg-link-sqlite]: https://www.nuget.org/packages/BitBadger.Documents.Sqlite/ "BitBadger.Documents.Sqlite • NuGet" -[Getting Started]: docs/getting-started.html "Getting Started • BitBadger.Documents" -[Basic Usage]: /open-source/relational-documents/dotnet/basic-usage.html "Basic Usage • BitBadger.Documents • Bit Badger Solutions" -[Advanced Usage]: /open-source/relational-documents/dotnet/advanced-usage.html "Advanced Usage • BitBadger.Documents • Bit Badger Solutions" +[Getting Started]: ./docs/getting-started.md "Getting Started • BitBadger.Documents" +[Basic Usage]: ./docs/basic-usage.md "Basic Usage • BitBadger.Documents" +[Advanced Usage]: ./docs/advanced/index.md "Advanced Usage • BitBadger.Documents" [v3v4]: /open-source/relational-documents/dotnet/upgrade-v3-to-v4.html "Upgrade from v3 to v4 • BitBadger.Documents • Bit Badger Solutions" [v4rel]: https://git.bitbadger.solutions/bit-badger/BitBadger.Documents/releases/tag/v4 "Version 4 • Releases • BitBadger.Documents • Bit Badger Solutions Git" [v2v3]: /open-source/relational-documents/dotnet/upgrade-v2-to-v3.html "Upgrade from v2 to v3 • BitBadger.Documents • Bit Badger Solutions" -- 2.47.2 From 037c668ae316852319fc2c6f17c9f7102b0cc242 Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Fri, 11 Apr 2025 20:28:00 -0400 Subject: [PATCH 20/22] All doc text in docfx --- docs/advanced/custom-serialization.md | 8 +- docs/advanced/index.md | 2 +- docs/advanced/integrity.md | 222 ++++++++++++++++++++++++++ docs/advanced/related.md | 211 ++++++++++++------------ docs/advanced/transactions.md | 100 ++++++------ docs/basic-usage.md | 50 +++--- docs/getting-started.md | 34 ++-- docs/toc.yml | 11 +- docs/upgrade/v2.md | 37 +++++ docs/upgrade/v3.md | 11 ++ docs/upgrade/v4.md | 35 ++++ index.md | 6 +- 12 files changed, 522 insertions(+), 205 deletions(-) create mode 100644 docs/advanced/integrity.md create mode 100644 docs/upgrade/v2.md create mode 100644 docs/upgrade/v3.md create mode 100644 docs/upgrade/v4.md diff --git a/docs/advanced/custom-serialization.md b/docs/advanced/custom-serialization.md index 2800a7e..b9e108c 100644 --- a/docs/advanced/custom-serialization.md +++ b/docs/advanced/custom-serialization.md @@ -12,14 +12,14 @@ Once this serializer is implemented and constructed, provide it to the library: ```csharp // C# - var serializer = /* constructed serializer */; - Configuration.UseSerializer(serializer); +var serializer = /* constructed serializer */; +Configuration.UseSerializer(serializer); ``` ```fsharp // F# - let serializer = (* constructed serializer *) - Configuration.useSerializer serializer +let serializer = (* constructed serializer *) +Configuration.useSerializer serializer ``` The biggest benefit to registering a serializer (apart from control) is that all JSON operations will use the same serializer. This is most important for PostgreSQL's JSON containment queries; the object you pass as the criteria will be translated properly before it is compared. However, "unstructured" data does not mean "inconsistently structured" data; if your application uses custom serialization, extending this to your documents ensures that the structure is internally consistent. diff --git a/docs/advanced/index.md b/docs/advanced/index.md index 1e9d905..06aa368 100644 --- a/docs/advanced/index.md +++ b/docs/advanced/index.md @@ -13,4 +13,4 @@ While the functions provided by the library cover lots of use cases, there are o [ser]: ./custom-serialization.md "Advanced Usage: Custom Serialization • BitBadger.Documents" [rel]: ./related.md "Advanced Usage: Related Documents • BitBadger.Documents" [txn]: ./transactions.md "Advanced Usage: Transactions • BitBadger.Documents" -[ref]: ./integrity.html "Advanced Usage: Referential Integrity • BitBadger.Documents" +[ref]: ./integrity.md "Advanced Usage: Referential Integrity • BitBadger.Documents" diff --git a/docs/advanced/integrity.md b/docs/advanced/integrity.md new file mode 100644 index 0000000..5f52d79 --- /dev/null +++ b/docs/advanced/integrity.md @@ -0,0 +1,222 @@ +# Referential Integrity + +_Documentation pages for `BitBadger.Npgsql.Documents` redirect here. This library replaced it as of v3; see project home if this applies to you._ + +One of the hallmarks of document database is loose association between documents. In our running hotel and room example, there is no technical reason we could not delete every hotel in the database, leaving all the rooms with hotel IDs that no longer exist. This is a feature-not-a-bug, but it shows the tradeoffs inherent to selecting a data storage mechanism. In our case, this is less than ideal - but, since we are using PostgreSQL, a relational database, we can implement referential integrity if, when, and where we need it. + +> _NOTE: This page has very little to do with the document library itself; these are all modifications that can be made via PostgreSQL. SQLite may have similar capabilities, but this author has yet to explore that._ + +## Enforcing Referential Integrity on the Child Document + +While we've been able to use `data->>'Id'` in place of column names for most things up to this point, here is where we hit a roadblock; we cannot define a foreign key constraint against an arbitrary expression. Through database triggers, though, we can accomplish the same thing. + +Triggers are implemented in PostgreSQL through a function/trigger definition pair. A function defined as a trigger has `NEW` and `OLD` defined as the data that is being manipulated (different ones, depending on the operation; no `OLD` for `INSERT`s, no `NEW` for `DELETE`s, etc.). For our purposes here, we'll use `NEW`, as we're trying to verify the data as it's being inserted or updated. + +```sql +CREATE OR REPLACE FUNCTION room_hotel_id_fk() RETURNS TRIGGER AS $$ + DECLARE + hotel_id TEXT; + BEGIN + SELECT data->>'Id' INTO hotel_id FROM hotel WHERE data->>'Id' = NEW.data->>'HotelId'; + IF hotel_id IS NULL THEN + RAISE EXCEPTION 'Hotel ID % does not exist', NEW.data->>'HotelId'; + END IF; + RETURN NEW; + END; +$$ LANGUAGE plpgsql; + +CREATE OR REPLACE TRIGGER hotel_enforce_fk BEFORE INSERT OR UPDATE ON room + FOR EACH ROW EXECUTE FUNCTION room_hotel_id_fk(); +``` + +This is as straightforward as we can make it; if the query fails to retrieve data (returning `NULL` here, not raising `NO_DATA_FOUND` like Oracle would), we raise an exception. Here's what that looks like in practice. + +``` +hotel=# insert into room values ('{"Id": "one", "HotelId": "fifteen"}'); +ERROR: Hotel ID fifteen does not exist +CONTEXT: PL/pgSQL function room_hotel_id_fk() line 7 at RAISE +hotel=# insert into hotel values ('{"Id": "fifteen", "Name": "Demo Hotel"}'); +INSERT 0 1 +hotel=# insert into room values ('{"Id": "one", "HotelId": "fifteen"}'); +INSERT 0 1 +``` + +(This assumes we'll always have a `HotelId` field; [see below][] on how to create this trigger if the foreign key is optional.) + +## Enforcing Referential Integrity on the Parent Document + +We've only addressed half of the parent/child relationship so far; now, we need to make sure parents don't disappear. + +### Referencing the Child Key + +The trigger on `room` referenced the unique index in its lookup. When we try to go from `hotel` to `room`, though, we'll need to address the `HotelId` field of the `room`' document. For the best efficiency, we can index that field. + +```sql +CREATE INDEX IF NOT EXISTS idx_room_hotel_id ON room ((data->>'HotelId')); +``` + +### `ON DELETE DO NOTHING` + +When defining a foreign key constraint, the final part of that clause is an `ON DELETE` action; if it's excluded, it defaults to `DO NOTHING`. The effect of this is that rows cannot be deleted if they are referenced in a child table. This can be implemented by looking for any rows that reference the hotel being deleted, and raising an exception if any are found. + +```sql +CREATE OR REPLACE FUNCTION hotel_room_delete_prevent() RETURNS TRIGGER AS $$ + DECLARE + has_rows BOOL; + BEGIN + SELECT EXISTS(SELECT 1 FROM room WHERE OLD.data->>'Id' = data->>'HotelId') INTO has_rows; + IF has_rows THEN + RAISE EXCEPTION 'Hotel ID % has dependent rooms; cannot delete', OLD.data->>'Id'; + END IF; + RETURN OLD; + END; +$$ LANGUAGE plpgsql; + +CREATE OR REPLACE TRIGGER hotel_room_delete BEFORE DELETE ON hotel + FOR EACH ROW EXECUTE FUNCTION hotel_room_delete_prevent(); +``` + +This trigger in action... + +``` +hotel=# delete from hotel where data->>'Id' = 'fifteen'; +ERROR: Hotel ID fifteen has dependent rooms; cannot delete +CONTEXT: PL/pgSQL function hotel_room_delete_prevent() line 7 at RAISE +hotel=# select * from room; + data +------------------------------------- + {"Id": "one", "HotelId": "fifteen"} +(1 row) +``` + +There's that child record! We've successfully prevented an orphaned room. + +### `ON DELETE CASCADE` + +Rather than prevent deletion, another foreign key constraint option is to delete the dependent records as well; the delete "cascades" (like a waterfall) to the child tables. Implementing this is even less code! + +```sql +CREATE OR REPLACE FUNCTION hotel_room_delete_cascade() RETURNS TRIGGER AS $$ + BEGIN + DELETE FROM room WHERE data->>'HotelId' = OLD.data->>'Id'; + RETURN OLD; + END; +$$ LANGUAGE plpgsql; + +CREATE OR REPLACE TRIGGER hotel_room_delete BEFORE DELETE ON hotel + FOR EACH ROW EXECUTE FUNCTION hotel_room_delete_cascade(); +``` + +Here's is what happens when we try the same `DELETE` statement that was prevented above... + +``` +hotel=# select * from room; + data +------------------------------------- + {"Id": "one", "HotelId": "fifteen"} +(1 row) + +hotel=# delete from hotel where data->>'Id' = 'fifteen'; +DELETE 1 +hotel=# select * from room; + data +------ +(0 rows) +``` + +We deleted a hotel, not rooms, but the rooms are now gone as well. + +### `ON DELETE SET NULL` + +The final option for a foreign key constraint is to set the column in the dependent table to `NULL`. There are two options to set a field to `NULL` in a `JSONB` document; we can either explicitly give the field a value of `null`, or we can remove the field from the document. As there is no schema, the latter is cleaner; PostgreSQL will return `NULL` for any non-existent field. + +```sql +CREATE OR REPLACE FUNCTION hotel_room_delete_set_null() RETURNS TRIGGER AS $$ + BEGIN + UPDATE room SET data = data - 'HotelId' WHERE data->>'HotelId' = OLD.data ->> 'Id'; + RETURN OLD; + END; +$$ LANGUAGE plpgsql; + +CREATE OR REPLACE TRIGGER hotel_room_delete BEFORE DELETE ON hotel + FOR EACH ROW EXECUTE FUNCTION hotel_room_delete_set_null(); +``` + +That `-` operator is new for us. When used on a `JSON` or `JSONB` field, it removes the named field from the document. + +Let's watch it work... + +``` +hotel=# delete from hotel where data->>'Id' = 'fifteen'; +ERROR: Hotel ID does not exist +CONTEXT: PL/pgSQL function room_hotel_id_fk() line 7 at RAISE +SQL statement "UPDATE room SET data = data - 'HotelId' WHERE data->>'HotelId' = OLD.data->>'Id'" +PL/pgSQL function hotel_room_delete_set_null() line 3 at SQL statement +``` + +Oops! This trigger execution fired the `BEFORE UPDATE` trigger on `room`, and it took exception to us setting that value to `NULL`. The child table trigger assumes we'll always have a value. We'll need to tweak that trigger to allow this. + +```sql +CREATE OR REPLACE FUNCTION room_hotel_id_nullable_fk() RETURNS TRIGGER AS $$ + DECLARE + hotel_id TEXT; + BEGIN + IF NEW.data->>'HotelId' IS NOT NULL THEN + SELECT data->>'Id' INTO hotel_id FROM hotel WHERE data->>'Id' = NEW.data->>'HotelId'; + IF hotel_id IS NULL THEN + RAISE EXCEPTION 'Hotel ID % does not exist', NEW.data->>'HotelId'; + END IF; + END IF; + RETURN NEW; + END; +$$ LANGUAGE plpgsql; + +CREATE OR REPLACE TRIGGER hotel_enforce_fk BEFORE INSERT OR UPDATE ON room + FOR EACH ROW EXECUTE FUNCTION room_hotel_id_nullable_fk(); +``` + +Now, when we try to run the deletion, it works. + +``` +hotel=# select * from room; + data +------------------------------------- + {"Id": "one", "HotelId": "fifteen"} +(1 row) + +hotel=# delete from hotel where data->>'Id' = 'fifteen'; +DELETE 1 +hotel=# select * from room; + data +--------------- + {"Id": "one"} +(1 row) +``` + +## Should We Do This? + +You may be thinking "Hey, this is pretty cool; why not do this everywhere?" Well, the answer is - as it is with _everything_ software-development-related - "it depends." + +### No...? + +The flexible, schemaless data storage paradigm that we call "document databases" allow changes to happen quickly. While "schemaless" can mean "ad hoc," in practice most documents have a well-defined structure. Not having to define columns for each item, then re-define or migrate them when things change, brings a lot of benefits. + +What we've implemented above, in this example, complicates some processes. Sure, triggers can be disabled then re-enabled, but unlike true constraints, they do not validate existing data. If we were to disable triggers, run some updates, and re-enable them, we could end up with records that can't be saved in their current state. + +### Yes...? + +The lack of referential integrity in document databases can be an impediment to adoption in areas where that paradigm may be more suitable than a relational one. To be sure, there are fewer relationships in a document database whose documents have complex structures, arrays, etc. This doesn't mean that there won't be relationships, though; in our hotel example, we could easily see a "reservation" document that has the IDs of a customer and a room. Just as it didn't make much sense to embed the rooms in a hotel document, it doesn't make sense to embed customers in a room document. + +What PostgreSQL brings to all of this is that it does not have to be an all-or-nothing decision re: referential integrity. We can implement a document store with no constraints, then apply the ones we absolutely must have. We realize we're complicating maintenance a bit (though `pgdump` will create a backup with the proper order for restoration), but we like that PostgreSQL will protect us from broken code or mistyped `UPDATE` statements. + +## Going Further + +As the trigger functions are executing SQL, it would be possible to create a set of reusable trigger functions that take table/column as parameters. Dynamic SQL in PL/pgSQL was additional complexity that would have distracted from the concepts. Feel free to take the examples above and make them reusable. + +Finally, one piece we will not cover is `CHECK` constraints. These can be applied to tables using the `data->>'Key'` syntax, and can be used to apply more of a schema feel to the unstructured `JSONB` document. PostgreSQL's handling of JSON data really is first-class and unopinionated; you can use as much or as little as you like! + +[« Return to "Advanced Usage" for `PDODocument`][adv-pdo] + + +[see below]: #on-delete-set-null +[adv-pdo]: https://bitbadger.solutions/open-source/relational-documents/php/advanced-usage.html "Advanced Usage • PDODocument • Bit Badger Solutions" diff --git a/docs/advanced/related.md b/docs/advanced/related.md index 539f6eb..b75b50c 100644 --- a/docs/advanced/related.md +++ b/docs/advanced/related.md @@ -6,7 +6,7 @@ _NOTE: This page is longer than the ideal documentation page. Understanding how ## Overview -Document stores generally have fewer relationships than traditional relational databases, particularly those that arise when data is structured in [Third Normal Form][tnf]{target=_blank rel=noopener}; related collections are stored in the document, and ever-increasing surrogate keys (_a la_ sequences and such) do not play well with distributed data. Unless all data is stored in a single document, though, there will still be a natural relation between documents. +Document stores generally have fewer relationships than traditional relational databases, particularly those that arise when data is structured in [Third Normal Form][tnf]; related collections are stored in the document, and ever-increasing surrogate keys (_a la_ sequences and such) do not play well with distributed data. Unless all data is stored in a single document, though, there will still be a natural relation between documents. Thinking back to our earlier examples, we did not store the collection of rooms in each hotel's document; each room is its own document and contains the ID of the hotel as one of its properties. @@ -133,51 +133,54 @@ The `Results` module is implementation specific. Both libraries provide `Results ## Putting It All Together -The **Custom** static class/module has four methods/functions: +The **Custom** static class/module has seven methods/functions: - **List** requires a query, parameters, and a mapping function, and returns a list of documents. +- **JsonArray** is the same as `List`, but returns the documents as `string` in a JSON array. +- **WriteJsonArray** writes documents to a `PipeWriter` as they are read from the database; the result is the same a `JsonArray`, but no unified strings is constructed. - **Single** requires a query, parameters, and a mapping function, and returns one or no documents (C# `TDoc?`, F# `'TDoc option`) +- **JsonSingle** is the same as `Single`, but returns a JSON `string` instead (returning `{}` if no document is found). - **Scalar** requires a query, parameters, and a mapping function, and returns a scalar value (non-nullable; used for counts, existence, etc.) - **NonQuery** requires a query and parameters and has no return value -> _Within each library, every other call is written in terms of `Custom.List`, `Custom.Scalar`, or `Custom.NonQuery`; your custom queries will use the same path the provided ones do!_ +> _Within each library, every other call is written in terms of these functions; your custom queries will use the same code the provided ones do!_ Let's jump in with an example. When we query for a room, let's say that we also want to retrieve its hotel information as well. We saw the query above, but here is how we can implement it using a custom query. ```csharp // C#, All - // return type is Tuple? - var data = await Custom.Single( - $"SELECT r.data, h.data AS hotel_data - FROM room r - INNER JOIN hotel h ON h.data->>'{Configuration.IdField()}' = r.data->>'HotelId' - WHERE r.{Query.WhereById("@id")}", - new[] { Parameters.Id("my-room-key") }, - // rdr's type will be RowReader for PostgreSQL, SqliteDataReader for SQLite - rdr => Tuple.Create(Results.FromData(rdr), Results.FromDocument("hotel_data", rdr)); - if (data is not null) - { - var (room, hotel) = data; - // do stuff with the room and hotel data - } +// return type is Tuple? +var data = await Custom.Single( + $"SELECT r.data, h.data AS hotel_data + FROM room r + INNER JOIN hotel h ON h.data->>'{Configuration.IdField()}' = r.data->>'HotelId' + WHERE r.{Query.WhereById("@id")}", + new[] { Parameters.Id("my-room-key") }, + // rdr's type will be RowReader for PostgreSQL, SqliteDataReader for SQLite + rdr => Tuple.Create(Results.FromData(rdr), Results.FromDocument("hotel_data", rdr)); +if (data is not null) +{ + var (room, hotel) = data; + // do stuff with the room and hotel data +} ``` ```fsharp // F#, All - // return type is (Room * Hotel) option - let! data = - Custom.single - $"""SELECT r.data, h.data AS hotel_data - FROM room r - INNER JOIN hotel h ON h.data->>'{Configuration.idField ()}' = r.data->>'HotelId' - WHERE r.{Query.whereById "@id"}""" - [ idParam "my-room-key" ] - // rdr's type will be RowReader for PostgreSQL, SqliteDataReader for SQLite - fun rdr -> (fromData rdr), (fromDocument "hotel_data" rdr) - match data with - | Some (Room room, Hotel hotel) -> - // do stuff with room and hotel - | None -> () +// return type is (Room * Hotel) option +let! data = + Custom.single + $"""SELECT r.data, h.data AS hotel_data + FROM room r + INNER JOIN hotel h ON h.data->>'{Configuration.idField ()}' = r.data->>'HotelId' + WHERE r.{Query.whereById "@id"}""" + [ idParam "my-room-key" ] + // rdr's type will be RowReader for PostgreSQL, SqliteDataReader for SQLite + fun rdr -> (fromData rdr), (fromDocument "hotel_data" rdr) +match data with +| Some (Room room, Hotel hotel) -> + // do stuff with room and hotel +| None -> () ``` These queries use `Configuration.IdField` and `WhereById` to use the configured ID field. Creating custom queries using these building blocks allows us to utilize the configured value without hard-coding it throughout our custom queries. If the configuration changes, these queries will pick up the new field name seamlessly. @@ -186,43 +189,43 @@ While this example retrieves the entire document, this is not required. If we on ```csharp // C#, All - // return type is Tuple? - var data = await Custom.Single( - $"SELECT r.data, h.data ->> 'Name' AS hotel_name - FROM room r - INNER JOIN hotel h ON h.data->>'{Configuration.IdField()}' = r.data->>'HotelId' - WHERE r.{Query.WhereById("@id")}", - new[] { Parameters.Id("my-room-key") }, - // PostgreSQL - row => Tuple.Create(Results.FromData(row), row.string("hotel_name"))); - // SQLite; could use rdr.GetString(rdr.GetOrdinal("hotel_name")) below as well - // rdr => Tuple.Create(Results.FromData(rdr), rdr.GetString(1))); +// return type is Tuple? +var data = await Custom.Single( + $"SELECT r.data, h.data ->> 'Name' AS hotel_name + FROM room r + INNER JOIN hotel h ON h.data->>'{Configuration.IdField()}' = r.data->>'HotelId' + WHERE r.{Query.WhereById("@id")}", + new[] { Parameters.Id("my-room-key") }, + // PostgreSQL + row => Tuple.Create(Results.FromData(row), row.string("hotel_name"))); + // SQLite; could use rdr.GetString(rdr.GetOrdinal("hotel_name")) below as well + // rdr => Tuple.Create(Results.FromData(rdr), rdr.GetString(1))); - if (data is not null) - { - var (room, hotelName) = data; - // do stuff with the room and hotel name - } +if (data is not null) +{ + var (room, hotelName) = data; + // do stuff with the room and hotel name +} ``` ```fsharp // F#, All - // return type is (Room * string) option - let! data = - Custom.single - $"""SELECT r.data, h.data->>'Name' AS hotel_name - FROM room r - INNER JOIN hotel h ON h.data->>'{Configuration.idField ()}' = r.data->>'HotelId' - WHERE r.{Query.whereById "@id"}""" - [ idParam "my-room-key" ] - // PostgreSQL - fun row -> (fromData row), row.string "hotel_name" - // SQLite; could use rdr.GetString(rdr.GetOrdinal("hotel_name")) below as well - // fun rdr -> (fromData rdr), rdr.GetString(1) - match data with - | Some (Room room, string hotelName) -> - // do stuff with room and hotel name - | None -> () +// return type is (Room * string) option +let! data = + Custom.single + $"""SELECT r.data, h.data->>'Name' AS hotel_name + FROM room r + INNER JOIN hotel h ON h.data->>'{Configuration.idField ()}' = r.data->>'HotelId' + WHERE r.{Query.whereById "@id"}""" + [ idParam "my-room-key" ] + // PostgreSQL + fun row -> (fromData row), row.string "hotel_name" + // SQLite; could use rdr.GetString(rdr.GetOrdinal("hotel_name")) below as well + // fun rdr -> (fromData rdr), rdr.GetString(1) +match data with +| Some (Room room, string hotelName) -> + // do stuff with room and hotel name +| None -> () ``` These queries are amazingly efficient, using 2 unique index lookups to return this data. Even though we do not have a foreign key between these two tables, simply being in a relational database allows us to retrieve this related data. @@ -231,19 +234,19 @@ Revisiting our "take these rooms out of service" SQLite query from the Basic Usa ```csharp // C#, SQLite - var fields = [Field.GreaterOrEqual("RoomNumber", 221), Field.LessOrEqual("RoomNumber", 240)]; - await Custom.NonQuery( - Sqlite.Query.ByFields(Sqlite.Query.Patch("room"), FieldMatch.All, fields, - new { InService = false }), - Parameters.AddFields(fields, [])); +var fields = [Field.GreaterOrEqual("RoomNumber", 221), Field.LessOrEqual("RoomNumber", 240)]; +await Custom.NonQuery( + Sqlite.Query.ByFields(Sqlite.Query.Patch("room"), FieldMatch.All, fields, + new { InService = false }), + Parameters.AddFields(fields, [])); ``` ```fsharp // F#, SQLite - let fields = [ Field.GreaterOrEqual "RoomNumber" 221; Field.LessOrEqual "RoomNumber" 240 ] - do! Custom.nonQuery - (Query.byFields (Query.patch "room") All fields {| InService = false |}) - (addFieldParams fields [])) +let fields = [ Field.GreaterOrEqual "RoomNumber" 221; Field.LessOrEqual "RoomNumber" 240 ] +do! Custom.nonQuery + (Query.byFields (Query.patch "room") All fields {| InService = false |}) + (addFieldParams fields [])) ``` This uses two field comparisons to incorporate the room number range instead of a `BETWEEN` clause; we would definitely want to have that field indexed if this was going to be a regular query or our data was going to grow beyond a trivial size. @@ -252,13 +255,13 @@ _You may be thinking "wait - what's the difference between that an the regular ` ```csharp // C#, All - await Patch.ByFields("room", FieldMatch.Any, [Field.Between("RoomNumber", 221, 240)], - new { InService = false }); +await Patch.ByFields("room", FieldMatch.Any, [Field.Between("RoomNumber", 221, 240)], + new { InService = false }); ``` ```fsharp // F#, All - do! Patch.byFields "room" Any [ Field.Between "RoomNumber 221 240 ] {| InService = false |} +do! Patch.byFields "room" Any [ Field.Between "RoomNumber" 221 240 ] {| InService = false |} ``` ## Going Even Further @@ -269,7 +272,7 @@ One drawback to document databases is the inability to update values in place; h ```sql -- SQLite -UPDATE room SET data = json_set(data, 'Rate', data ->> 'Rate' * 1.1) +UPDATE room SET data = json_set(data, 'Rate', data->>'Rate' * 1.1) ``` If we get any more complex, though, Common Table Expressions (CTEs) can help us. Perhaps we decided that we only wanted to raise the rates for hotels in New York, Chicago, and Los Angeles, and we wanted to exclude any brand with the word "Value" in its name. A CTE lets us select the source data we need to craft the update, then use that in the `UPDATE`'s clauses. @@ -335,37 +338,37 @@ Let's walk through a short example using C# and PostgreSQL: ```csharp // C#, PostgreSQL - using Npgsql.FSharp; // Needed for RowReader and Sql types - using static CommonExtensionsAndTypesForNpgsqlFSharp; // Needed for Sql functions +using Npgsql.FSharp; // Needed for RowReader and Sql types +using static CommonExtensionsAndTypesForNpgsqlFSharp; // Needed for Sql functions - // Stores metadata for a given user - public class MetaData - { - public string Id { get; set; } = ""; - public string UserId { get; set; } = ""; - public string Key { get; set; } = ""; - public string Value { get; set; } = ""; - } +// Stores metadata for a given user +public class MetaData +{ + public string Id { get; set; } = ""; + public string UserId { get; set; } = ""; + public string Key { get; set; } = ""; + public string Value { get; set; } = ""; +} - // Static class to hold mapping functions - public static class Map - { - // These parameters are the column names from the underlying table - public MetaData ToMetaData(RowReader row) => - new MetaData - { - Id = row.string("id"), - UserId = row.string("user_id"), - Key = row.string("key"), - Value = row.string("value") - }; - } +// Static class to hold mapping functions +public static class Map +{ + // These parameters are the column names from the underlying table + public MetaData ToMetaData(RowReader row) => + new MetaData + { + Id = row.string("id"), + UserId = row.string("user_id"), + Key = row.string("key"), + Value = row.string("value") + }; +} - // somewhere in a class, retrieving data - public Task> MetaDataForUser(string userId) => - Document.Custom.List("SELECT * FROM user_metadata WHERE user_id = @userId", - new { Tuple.Create("@userId", Sql.string(userId)) }, - Map.ToMetaData); +// somewhere in a class, retrieving data +public Task> MetaDataForUser(string userId) => + Document.Custom.List("SELECT * FROM user_metadata WHERE user_id = @userId", + new { Tuple.Create("@userId", Sql.string(userId)) }, + Map.ToMetaData); ``` For F#, the `using static` above is not needed; that module is auto-opened when `Npgsql.FSharp` is opened. For SQLite in either language, the mapping function uses a `SqliteDataReader` object, which implements the standard ADO.NET `DataReader` functions of `Get[Type](idx)` (and `GetOrdinal(name)` for the column index). diff --git a/docs/advanced/transactions.md b/docs/advanced/transactions.md index 039ebab..703b075 100644 --- a/docs/advanced/transactions.md +++ b/docs/advanced/transactions.md @@ -10,30 +10,30 @@ The `Configuration` static class/module of each library [provides a way to obtai ```csharp // C#, All - // "conn" is assumed to be either NpgsqlConnection or SqliteConnection - await using var txn = await conn.BeginTransactionAsync(); - try - { - // do stuff - await txn.CommitAsync(); - } - catch (Exception ex) - { - await txn.RollbackAsync(); - // more error handling - } +// "conn" is assumed to be either NpgsqlConnection or SqliteConnection +await using var txn = await conn.BeginTransactionAsync(); +try +{ + // do stuff + await txn.CommitAsync(); +} +catch (Exception ex) +{ + await txn.RollbackAsync(); + // more error handling +} ``` ```fsharp // F#, All - // "conn" is assumed to be either NpgsqlConnection or SqliteConnection - use! txn = conn.BeginTransactionAsync () - try - // do stuff - do! txn.CommitAsync () - with ex -> - do! txt.RollbackAsync () - // more error handling +// "conn" is assumed to be either NpgsqlConnection or SqliteConnection +use! txn = conn.BeginTransactionAsync () +try + // do stuff + do! txn.CommitAsync () +with ex -> + do! txt.RollbackAsync () + // more error handling ``` ## Executing Queries on the Connection @@ -42,30 +42,30 @@ This precise scenario was the reason that all methods and functions are implemen ```csharp // C#, All ("conn" is our connection object) - await using var txn = await conn.BeginTransactionAsync(); - try - { - await conn.PatchById("user_table", userId, new { LastSeen = DateTime.Now }); - await conn.PatchById("security", userId, new { FailedLogOnCount = 0 }); - await txn.CommitAsync(); - } - catch (Exception ex) - { - await txn.RollbackAsync(); - // more error handling - } +await using var txn = await conn.BeginTransactionAsync(); +try +{ + await conn.PatchById("user_table", userId, new { LastSeen = DateTime.Now }); + await conn.PatchById("security", userId, new { FailedLogOnCount = 0 }); + await txn.CommitAsync(); +} +catch (Exception ex) +{ + await txn.RollbackAsync(); + // more error handling +} ``` ```fsharp // F#, All ("conn" is our connection object) - use! txn = conn.BeginTransactionAsync() - try - do! conn.patchById "user_table" userId {| LastSeen = DateTime.Now |} - do! conn.patchById "security" userId {| FailedLogOnCount = 0 |} - do! txn.CommitAsync() - with ex -> - do! txn.RollbackAsync() - // more error handling +use! txn = conn.BeginTransactionAsync() +try + do! conn.patchById "user_table" userId {| LastSeen = DateTime.Now |} + do! conn.patchById "security" userId {| FailedLogOnCount = 0 |} + do! txn.CommitAsync() +with ex -> + do! txn.RollbackAsync() + // more error handling ``` ### A Functional Alternative @@ -74,20 +74,20 @@ The PostgreSQL library has a static class/module called `WithProps`; the SQLite ```csharp // C#, PostgreSQL - using Npgsql.FSharp; - // ... - var props = Sql.existingConnection(conn); - // ... - await WithProps.Patch.ById("user_table", userId, new { LastSeen = DateTime.Now }, props); +using Npgsql.FSharp; +// ... +var props = Sql.existingConnection(conn); +// ... +await WithProps.Patch.ById("user_table", userId, new { LastSeen = DateTime.Now }, props); ``` ```fsharp // F#, PostgreSQL - open Npgsql.FSharp - // ... - let props = Sql.existingConnection conn - // ... - do! WithProps.Patch.ById "user_table" userId {| LastSeen = DateTime.Now |} props +open Npgsql.FSharp +// ... +let props = Sql.existingConnection conn +// ... +do! WithProps.Patch.ById "user_table" userId {| LastSeen = DateTime.Now |} props ``` If we do not want to qualify with `WithProps` or `WithConn`, C# users can add `using static [WithProps|WithConn];` to bring these functions into scope; F# users can add `open BitBadger.Documents.[Postgres|Sqlite].[WithProps|WithConn]` to bring them into scope. However, in C#, this will affect the entire file, and in F#, it will affect the file from that point through the end of the file. Unless you want to go all-in with the connection-last functions, it is probably better to qualify the occasional call. diff --git a/docs/basic-usage.md b/docs/basic-usage.md index 1ca46a6..6cb6caa 100644 --- a/docs/basic-usage.md +++ b/docs/basic-usage.md @@ -32,15 +32,15 @@ The library provides three different ways to save data. The first equates to a S ```csharp // C#, All - var room = new Room(/* ... */); - // Parameters are table name and document - await Document.Insert("room", room); +var room = new Room(/* ... */); +// Parameters are table name and document +await Document.Insert("room", room); ``` ```fsharp // F#, All - let room = { Room.empty with (* ... *) } - do! insert "room" room +let room = { Room.empty with (* ... *) } +do! insert "room" room ``` The second is `Save`; and inserts the data it if does not exist and replaces the document if it does exist (what some call an "upsert"). It utilizes the `ON CONFLICT` syntax to ensure an atomic statement. Its parameters are the same as those for `Insert`. @@ -49,37 +49,37 @@ The third equates to a SQL `UPDATE` statement. `Update` applies to a full docume ```csharp // C#, All - var hotel = await Document.Find.ById("hotel", hotelId); - if (!(hotel is null)) - { - // update hotel properties from the posted form - await Update.ById("hotel", hotel.Id, hotel); - } +var hotel = await Document.Find.ById("hotel", hotelId); +if (!(hotel is null)) +{ + // update hotel properties from the posted form + await Update.ById("hotel", hotel.Id, hotel); +} ``` ```fsharp // F#, All - match! Find.byId "hotel" hotelId with - | Some hotel -> - do! Update.byId "hotel" hotel.Id updated - { hotel with (* properties from posted form *) } - | None -> () +match! Find.byId "hotel" hotelId with +| Some hotel -> + do! Update.byId "hotel" hotel.Id updated + { hotel with (* properties from posted form *) } +| None -> () ``` For the next example, suppose we are upgrading our hotel, and need to take rooms 221-240 out of service*. We can utilize a patch via JSON Path** to accomplish this. ```csharp // C#, PostgreSQL - await Patch.ByJsonPath("room", - "$ ? (@.HotelId == \"abc\" && (@.RoomNumber >= 221 && @.RoomNumber <= 240)", - new { InService = false }); +await Patch.ByJsonPath("room", + "$ ? (@.HotelId == \"abc\" && (@.RoomNumber >= 221 && @.RoomNumber <= 240)", + new { InService = false }); ``` ```fsharp // F#, PostgreSQL - do! Patch.byJsonPath "room" - "$ ? (@.HotelId == \"abc\" && (@.RoomNumber >= 221 && @.RoomNumber <= 240)" - {| InService = false |}; +do! Patch.byJsonPath "room" + "$ ? (@.HotelId == \"abc\" && (@.RoomNumber >= 221 && @.RoomNumber <= 240)" + {| InService = false |}; ``` _* - we are ignoring the current reservations, end date, etc. This is very naïve example!_ @@ -88,13 +88,13 @@ _* - we are ignoring the current reservations, end date, etc. This is very naïv ```csharp // C#, Both - await Patch.ByFields("room", FieldMatch.Any, [Field.Between("RoomNumber", 221, 240)], - new { InService = false }); +await Patch.ByFields("room", FieldMatch.Any, [Field.Between("RoomNumber", 221, 240)], + new { InService = false }); ``` ```fsharp // F#, Both - do! Patch.byFields "room" Any [ Field.Between "RoomNumber" 221 240 ] {| InService = false |} +do! Patch.byFields "room" Any [ Field.Between "RoomNumber" 221 240 ] {| InService = false |} ``` This could also be done with `All`/`FieldMatch.All` and `GreaterOrEqual` and `LessOrEqual` field comparisons, or even a custom query; these are fully explained in the [Advanced Usage][] section. diff --git a/docs/getting-started.md b/docs/getting-started.md index 900cca0..496f920 100644 --- a/docs/getting-started.md +++ b/docs/getting-started.md @@ -141,21 +141,21 @@ Let's create a general-purpose index on hotels, a "HotelId" index on rooms, and ```csharp // C#, Postgresql - await Definition.EnsureTable("hotel"); - await Definition.EnsureDocumentIndex("hotel", DocumentIndex.Full); - await Definition.EnsureTable("room"); - // parameters are table name, index name, and fields to be indexed - await Definition.EnsureFieldIndex("room", "hotel_id", new[] { "HotelId" }); - await Definition.EnsureDocumentIndex("room", DocumentIndex.Optimized); +await Definition.EnsureTable("hotel"); +await Definition.EnsureDocumentIndex("hotel", DocumentIndex.Full); +await Definition.EnsureTable("room"); +// parameters are table name, index name, and fields to be indexed +await Definition.EnsureFieldIndex("room", "hotel_id", new[] { "HotelId" }); +await Definition.EnsureDocumentIndex("room", DocumentIndex.Optimized); ``` ```fsharp // F#, PostgreSQL - do! Definition.ensureTable "hotel" - do! Definition.ensureDocumentIndex "hotel" Full - do! Definition.ensureTable "room" - do! Definition.ensureFieldIndex "room" "hotel_id" [ "HotelId" ] - do! Definition.ensureDocumentIndex "room" Optimized +do! Definition.ensureTable "hotel" +do! Definition.ensureDocumentIndex "hotel" Full +do! Definition.ensureTable "room" +do! Definition.ensureFieldIndex "room" "hotel_id" [ "HotelId" ] +do! Definition.ensureDocumentIndex "room" Optimized ``` ### SQLite @@ -166,16 +166,16 @@ Let's create hotel and room tables, then index rooms by hotel ID and room number ```csharp // C#, SQLite - await Definition.EnsureTable("hotel"); - await Definition.EnsureTable("room"); - await Definition.EnsureIndex("room", "hotel_and_nbr", new[] { "HotelId", "RoomNumber" }); +await Definition.EnsureTable("hotel"); +await Definition.EnsureTable("room"); +await Definition.EnsureIndex("room", "hotel_and_nbr", new[] { "HotelId", "RoomNumber" }); ``` ```fsharp // F# - do! Definition.ensureTable "hotel" - do! Definition.ensureTable "room" - do! Definition.ensureIndex "room" "hotel_and_nbr", [ "HotelId"; "RoomNumber" ] +do! Definition.ensureTable "hotel" +do! Definition.ensureTable "room" +do! Definition.ensureIndex "room" "hotel_and_nbr", [ "HotelId"; "RoomNumber" ] ``` Now that we have tables, let's [use them][]! diff --git a/docs/toc.yml b/docs/toc.yml index f6ca704..982e8f0 100644 --- a/docs/toc.yml +++ b/docs/toc.yml @@ -11,4 +11,13 @@ href: advanced/related.md - name: Transactions href: advanced/transactions.md - \ No newline at end of file + - name: Referential Integrity + href: advanced/integrity.md +- name: Upgrading + items: + - name: v3 to v4 + href: upgrade/v4.md + - name: v2 to v3 + href: upgrade/v3.md + - name: v1 to v2 + href: upgrade/v2.md diff --git a/docs/upgrade/v2.md b/docs/upgrade/v2.md new file mode 100644 index 0000000..fcf8acf --- /dev/null +++ b/docs/upgrade/v2.md @@ -0,0 +1,37 @@ +# Migrating from v1 to v2 + +_NOTE: This was an upgrade for the `BitBadger.Npgsql.Documents` library, which this library replaced as of v3._ + +## Why + +In version 1 of this library, the document tables used by this library had two columns: `id` and `data`. `id` served as the primary key, and `data` was the `JSONB` column for the document. Since its release, the author learned that a field in a `JSONB` column could have a unique index that would then serve the role of a primary key. + +Version 2 of this library implements this change, both in table setup and in how it constructs queries that occur by a document's ID. + +## How + +On the [GitHub release page][], there is a MigrateToV2 utility program - one for Windows, and one for Linux. Download and extract the single file in the archive; it requires no installation. It uses an environment variable for the connection string, and takes a table name and an ID column field via the command line. + +A quick example under Linux/bash (assuming the ID field in the JSON document is named `Id`)... +``` +export PGDOC_CONN_STR="Host=localhost;Port=5432;User ID=example_user;Password=example_pw;Database=my_docs" +./MigrateToV2 ex.doc_table +./MigrateToV2 ex.another_one +``` + +If the ID field has a different name, it can be passed as a second parameter. The utility will display the table name and ID field and ask for confirmation; if you are scripting it, you can set the environment variable `PGDOC_I_KNOW_WHAT_I_AM_DOING` to `true`, and it will bypass this confirmation. Note that the utility itself is quite basic; you are responsible for giving it sane input. If you have customized the tables or the JSON serializer, though, keep reading. + +## What + +If you have extended the original tables, you may need to handle this migration within either PostgreSQL/psql or your code. The process entails two steps. First, create a unique index on the ID field; in this example, we'll use `name` for the example ID field. Then, drop the `id` column. The below SQL will accomplish this for the fictional `my_table` table. + +```sql +CREATE UNIQUE INDEX idx_my_table_key ON my_table ((data ->> 'name')); +ALTER TABLE my_table DROP COLUMN id; +``` + +If the ID field is different, you will also need to tell the library that. Use `Configuration.UseIdField("name")` (C#) / `Configuration.useIdField "name"` (F#) to specify the name. This will need to be done before queries are executed, as the library uses this field for ID queries. See the [Setting Up instructions][setup] for details on this new configuration parameter. + + +[GitHub release page]: https://github.com/bit-badger/BitBadger.Npgsql.Documents +[setup]: ../getting-started.md#configuring-document-ids "Getting Started • BitBadger.Documents" diff --git a/docs/upgrade/v3.md b/docs/upgrade/v3.md new file mode 100644 index 0000000..8622ebd --- /dev/null +++ b/docs/upgrade/v3.md @@ -0,0 +1,11 @@ +# Upgrade from v2 to v3 + +The biggest change with this release is that `BitBadger.Npgsql.Documents` became `BitBadger.Documents`, a set of libraries providing the same API over both PostgreSQL and SQLite (provided the underlying database supports it). Existing PostgreSQL users should have a smooth transition. + +* Drop `Npgsql` from namespace (`BitBadger.Npgsql.Documents` becomes `BitBadger.Documents`) +* Add implementation (PostgreSQL namespace is `BitBadger.Documents.Postgres`, SQLite is `BitBadger.Documents.Sqlite`) +* Both C# and F# idiomatic functions will be visible when those namespaces are `import`ed or `open`ed +* There is a `Field` constructor for creating field conditions (though look at [v4][]'s changes here as well) + + +[v4]: ./v4.md#op-type-removal "Upgrade from v3 to v4 • BitBadger.Documents" diff --git a/docs/upgrade/v4.md b/docs/upgrade/v4.md new file mode 100644 index 0000000..ef0660d --- /dev/null +++ b/docs/upgrade/v4.md @@ -0,0 +1,35 @@ +# Upgrade from v3 to v4 + +## The Quick Version + +- Add `BitBadger.Documents.[Postgres|Sqlite].Compat` to your list of `using` (C#) or `open` (F#) statements. This namespace has deprecated versions of the methods/functions that were removed in v4. These generate warnings, rather than the "I don't know what this is" compiler errors. +- If your code referenced `Query.[Action].[ById|ByField|etc]`, the sides of the query on each side of the `WHERE` clause are now separate. A query to patch a document by its ID would go from `Query.Patch.ById(tableName)` to `Query.ById(Query.Patch(tableName))`. These functions may also require more parameters; keep reading for details on that. +- Custom queries had to be used when querying more than one field, or when the results in the database needed to be ordered. v4 provides solutions for both of these within the library itself. + +## `ByField` to `ByFields` and PostgreSQL Numbers + +All methods/functions that ended with `ByField` now end with `ByFields`, and take a `FieldMatch` case (`Any` equates to `OR`, `All` equates to `AND`) and sequence of `Field` objects. These `Field`s need to have their values as well, because the PostgreSQL library will now cast the field from the document to numeric and bind the parameter as-is. + +That is an action-packed paragraph; these changes have several ripple effects throughout the library: +- Queries like `Query.Find.ByField` would need the full collection of fields to generate the SQL. Instead, `Query.ByFields` takes a "first-half" statement as its first parameter, then the field match and parameters as its next two. +- `Field` instances in version 3 needed to have a parameter name, which was specified externally to the object itself. In version 4, `ParameterName` is an optional member of the `Field` object, and the library will generate parameter names if it is missing. In both C# and F#, the `.WithParameterName(string)` method can be chained to the `Field.[OP]` call to specify a name, and F# users can also use the language's `with` keyword (`{ Field.EQ "TheField" "value" with ParameterName = Some "@theField" }`). + +## `Op` Type Removal + +The `Op` type has been replaced with a `Comparison` type which captures both the type of comparison and the object of the comparison in one type. This is considered an internal implementation detail, as that type was not intended for use outside the library; however, it was `public`, so its removal warrants at least a mention. + +Additionally, the addition of `In` and `InArray` field comparisons drove a change to the `Field` type's static creation functions. These now have the comparison spelled out, as opposed to the two-to-three character abbreviations. (These abbreviated functions still exists as aliases, so this change will not result in compile errors.) The functions to create fields are: + +| Old | New | +|:-----:|-----------------------| +| `EQ` | `Equal` | +| `GT` | `Greater` | +| `GE` | `GreaterOrEqual` | +| `LT` | `Less` | +| `LE` | `LessOrEqual` | +| `NE` | `NotEqual` | +| `BT` | `Between` | +| `IN` | `In` _(since v4 rc1)_ | +| -- | `InArray` _(v4 rc4)_ | +| `EX` | `Exists` | +| `NEX` | `NotExists` | diff --git a/index.md b/index.md index af1d45f..45602d8 100644 --- a/index.md +++ b/index.md @@ -83,11 +83,11 @@ Issues can be filed on the project's GitHub repository. [Getting Started]: ./docs/getting-started.md "Getting Started • BitBadger.Documents" [Basic Usage]: ./docs/basic-usage.md "Basic Usage • BitBadger.Documents" [Advanced Usage]: ./docs/advanced/index.md "Advanced Usage • BitBadger.Documents" -[v3v4]: /open-source/relational-documents/dotnet/upgrade-v3-to-v4.html "Upgrade from v3 to v4 • BitBadger.Documents • Bit Badger Solutions" +[v3v4]: ./docs/upgrade/v4.md "Upgrade from v3 to v4 • BitBadger.Documents" [v4rel]: https://git.bitbadger.solutions/bit-badger/BitBadger.Documents/releases/tag/v4 "Version 4 • Releases • BitBadger.Documents • Bit Badger Solutions Git" -[v2v3]: /open-source/relational-documents/dotnet/upgrade-v2-to-v3.html "Upgrade from v2 to v3 • BitBadger.Documents • Bit Badger Solutions" +[v2v3]: ./docs/upgrade/v3.md "Upgrade from v2 to v3 • BitBadger.Documents" [v3rel]: https://git.bitbadger.solutions/bit-badger/BitBadger.Documents/releases/tag/v3 "Version 3 • Releases • BitBadger.Documents • Bit Badger Solutions Git" -[v1v2]: /open-source/relational-documents/dotnet/upgrade-v1-to-v2.html "Upgrade from v1 to v2 • BitBadger.Npgsql.Documents • Bit Badger Solutions" +[v1v2]: ./docs/upgrade/v2.md "Upgrade from v1 to v2 • BitBadger.Documents" [v2rel]: https://github.com/bit-badger/BitBadger.Npgsql.Documents/releases/tag/v2 "Version 2 • Releases • BitBadger.Npgsql.Documents • GitHub" [MongoDB]: https://www.mongodb.com/ "MongoDB" [Npgsql.FSharp]: https://zaid-ajaj.github.io/Npgsql.FSharp/#/ "Npgsql.FSharp" -- 2.47.2 From f80914daef996007d3bd7a1123cca8c889c6a07d Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Fri, 11 Apr 2025 21:27:12 -0400 Subject: [PATCH 21/22] Add template tweaks to docfx site --- bitbadger-doc.png | Bin 0 -> 12624 bytes doc-template/public/main.css | 4 ++++ doc-template/public/main.js | 10 ++++++++++ docfx.json | 14 ++++++++++---- docs/getting-started.md | 34 +++++++++++++++++----------------- 5 files changed, 41 insertions(+), 21 deletions(-) create mode 100644 bitbadger-doc.png create mode 100644 doc-template/public/main.css create mode 100644 doc-template/public/main.js diff --git a/bitbadger-doc.png b/bitbadger-doc.png new file mode 100644 index 0000000000000000000000000000000000000000..22b1fe2e111d43569dcb24ac686659cf9ccbe42d GIT binary patch literal 12624 zcmeHtcT`i`)^C6ap$dW;nnsW=kkAw9AiYMqAcPQVLhrpJQk8b-AXPfjoAe+eD7`8m zA_CGxy26W|bMHNOyl;#*?ila;??!XXJFCHxxQKL8U>1HO7j?nsmu#M#x!+763>xcfL`AQ*2uYXHD|4wtw|-3y@# zzYxBs+?C}Mr;wqyLCV^x{8dqPGD(7&IRM;q=+oJQ=8YyDZb*5kR#@+#9i|#nT-&Dj zKEQDJLz17=(t2Fq6)+(t` z`|A>kONBnSe1r~P{f33>_;nFG6{IQ4rRmVHg#NOyn7S&Os!+%=z~y=tgIQspTatVX z__FEtK+xL@x1E;Yccsu_ihd%^reSxXTh?ynmPdT@Aok!ujaP4kKHuiKw0PQDqE*%g ze*10btnPH8y+1fug~cY)j^2s(Eq0D{E#M=FZ;*EnzSpdJ{dVA|){lG4YtM#=-^Tut z{$0`~+~|&H%~*GZm3`lMpRWEmkNGR#3fB)GEA;IvJhabyoFAGSe@0Z&-L8Gap+cLV z7b(PBLswwN)i0khLN9%enihPbe#?+CmW8rZKJ4ph_r=l?wz6lUFpWVAU?v#952JA3 zAs%jB@xOUjYaa-bCNcD#H^%|$ato*DO}>M`Go%7l^BFJ5Ot%3F+L74TRSbn33=~~w zi~;v)!0nfOr`uZ^MqEw-R<;JunU|Jm!FKD>5U)&;4tneh&K-F=!Wo?V*8DYAhdsV3$j6e zi?};Tu^DOVKop!@F%S^}5dkRwJ#RZtAvS3Wh@`8PH3F%q^cMu-N{Y?a-Q5`>DCp(o zCEz72;N)r}2!q4nf>0qrAt8Q(1izb)qdUr*-_ecz7sQ_!iWoPvtDUpEos%Qv7beQm z$-`ZWjg2r4`G)&kqHS*h?zZ*hO|0nLhN&mz5-@*hbO-+QN6WZgKd8&$1 zY`^A5SUI8XtPsDOFsQhtxG)OJZv_<-<`;p&(fn|XgcZLfN*rwkhYQ1nEJgnYrRwPB zj&ej}enAny1?&hoaJ03Al@%Prk3oxB@r#IJgbCUhYko19B}QCS2qR%>E%Y}CZC5)& zR-&+fx9S&^6#)t=1``t!vPAR4L@h=5MPN`-ez*iwoL@u?1IM7mg<*uoZzwA?LdnS$ zha!a24u`VA2s%63{2us4I6_WGRfU9W2V-nxG)XreWvk;r*`xJv$uczB}p{ zn=o+^5fLG%xHw!~NK8Tk_Aem=jH??V6MtdCpaQ~Tzej$B1wn9zKrHH4o)Q3l_Y-_U zD7a!!?oO_HPEJ@UwqGVeerf)5Sd&mrRw#FrBFY^@0EG&PBA_A&7)(zHhJe8l!s7gd zOX%O^oviGveg0q4zlsMU`De&g?A!?R`~2?uv!d=}T>iZJ^Ac(3#yEy~dbLs&ijO4on%+x-uzU@2rNVFk6u@DoA~$00)a3BL;U|D z4E`5kf`2(I_-oDhhsTnF{}(5czXkrb$Po1Y>?15*gtbucpNrvNoc&sM{ue)g#o~W) z1_JbdC;5-~{V!errRzUp;6D=nZ+88cuK$RE|48`1+4cX8E{cC`r!bC$yC5&ZX6frJ zB#E%mBDGXkRs>x9`sQ_%{OTchRxx%104V8y9YlbPtY1$9$lO&m?~$zosfg&gqPA^J z0RRXE_zWKVPkN${EhvMGuP%FMs4#+Zk{Cs{Y)K}IXP#d zjfowliyg&-F|kh|6w%xkilVfapow@}YFXmM*!`{-6|$;!g~?DQIR|w8s17oNyR`eV zKmD;Ny;2CwM2?5LGDX$VDdXCm8b_A5tHNKlwqSloU!BfJE}As6mS(up1ReyerrYG& zBq!s+)MzS=g?Go*4e50^@v_X|FSBM)QV5CcK+F@MBH$i`n*JsA@k zGvO&p9#yk1DnuexEA?;d-%cSko^KAYR5wZZJHHL$eWPJ;T`s#*ll)frrtdqGi};_| z`xlwx*2P@)Pe*eFLt7Ica%Rx>gd(pfQUP@CB;R6}svhIid9m1M(s5q9ZdGxdka9VtYp^(t(`@dp1SX$wE+%^a}M7A}T6S z_ih`>AN8#a$NjALYtW#K{%lS`F*< ztUXLj{1|Z@%%BcCU|Y#_s#OM9SrJuyk~O|JylD&_bD2-UtUb1WQr$yi!U-doBVmTa2(H=MBq)Utf|W6-{Y`As)&p zE1!BpnV&MMV^|lGa4TrE_m|};m%;w)TqD;YtH8sp0OEk3PL8D>cy7;?D>@C!?;a~S zlj1-*X&dIi17O~jC(n$ohCNDDotF?>?|qSVO%}GZ1`bYgFmYgEWb9r^kck%_kszsk z%>~6n?`QN(xWCNDZ!QTby^Caz1(|}JNj~jwIbNYnzo+v6CRz$YIq_nBdC+x}qw)T@A%3F#oK{sxYyDVAoG%iz%UWVdnc_Ja7 zAo#9F`5gsAM2Vz24}k~AjsnK1l_^xW!h=UibJp^VXmq*PXJGa-5&#R;(y==&TwHgu z+$zjFUuyXV!y+x8hgXegLGgJaI^3I9H50vD!G9>-W(-a2f|H?R@^WbP&^mQ@)K3Db zQ)%Cs^ful(mJ7GMCq7}(oBGsF|FoBdAte>3 zR`f#0G&U#aMmTDwro`icqbzr-CVDqd%b7OcfL#Sg(gy)XZTgA@PsqPLn0GT$MR+!bGYG< zZ~&bg&!nOR6cBD10I6*`q9zVaz6PcPuPtS@4CkG9-&{!dkhBSSz5gQWa)QRz!pr@y zSh0Qmp(_S{56;%41CNwD7e+rUe`Q^0!p@ErF`IjDpUm5@_MC1vvV??`tv>r^>QD?7 zZP`-r3S3%kD9N5u4}Yomx~;joX&@&f%iU{dHm1YxNNy0M=Ac(5ot$x&6$^TjT!W*W zb$>l#jqJ?TYHKp&>%M4R&7Qt6ZqZ{tn|&*mxmcsgfuq7l6NS3bQc0{hf92)0wj9jj z0GbfR-6ZBGL{OT6(q%i%$DcOL9+VyO>=QIQUkLXEdny=uBUoxOKDQI$PE zGjRT;D$}ubJv+#BpU;MhhQ_xMllHT#DHfKq+3ahvx{(0S@ryApZ`qux+G1R2YYM9m zABI~oa2#fb(SKX`NRpENd=syfkXK!OGwL#H4?UqY2cPqVKLNr>3(z?x4sj6`S`8cQ zkJ-Rrt1+JH4t_Tq+SfzGA=<>p-A)pPFg#dTD>fwN4=fb=3PTKsuEjr{)tGwCr|qQXW{DdX`xoV9d)w(rMyi zI-oih5^mBEsa7P+(+;*6Pwyc+a(~nk`q_ z#GKCi-#Jihh90sB#NmJU$FNWma$U77-)YG`Q`M<6xFQ<%O5%HU$X7<}% zlB1%cx^t`$gYon~B!U4w*1+vgN)Cx@=BUnBVCKBQ zxovylc-YC`VZecumu~Wr<-yP;G9ne9Yrqu{b*}bKPR*(-+^TNY2e-jD3xSs3<9p+2 zY9;CJd63^R-`mJqaC|y_;aD#rwN0zrhP%6qw5#{{RBPGqnpJy#hpK`l7=}1B(Cjc# z5J$|~QsX7C^K!$Nm(PeK(2q>ss3yhd@TCE}COD=5eHNwL{<^)2qH637a$r>u=(qup z+OJ!AqEgcS;p6wd1o5m3JYP=0=acSfiV0pi`K}oca`hsdQ5LPeL*RQ?;>69dz%qa5 zim|>02Qi!yK4c2h98Wp#nfUsUnuf+P$cZz>%({O4s9kUCg#SUgyuXgx|y#uM1pee=ie!`5~D11bQv z^{`ACH-B0kMMHhjPbi1U#m%qlbUQ6SJu753@7QwCMH1PkP=t0xCDWOTNBH}Z^}LH( zqH0T#M-@{AK32Vmq;%Q{k5wh*?cc+VLsF6Q&hf}13OqbzuPn{f2~OW)la3XFIQ z_|tqG_;+`JyK(09diN(y&cd5XK>$g^t_GsxDb?o-hVzX(koGMF|M)FaTUs8lE#rj} zIv6XEMx00j6TdkF<;Nv18Uta{> zF`b3TA=M2<=}gk@F5iaW#$a{w2c*_aG}pSjxT%L3Vqx42YOf0Km+A$Sv*(^A&3|y) z)b6}^?|0Z~uhRh2AMpGErRef`(>)g-yv3A<9bTy!m#m*DFE6Jd&Y2m#M2llmVG15= zN7V_ZV_7HY8*je9=sJ%PK4dE2*28bml}nsE5#N#1ropXm71I zlZJIub%}IQNf@e-N7NTB8#Copc3pZ}69WNyr3yTF!VLHSQV9I~L^%-d*?-l^uUgl& zZq1~;eoDtgoZ@v7TprfFsK?X07OtrI)h{-+L zR{VK|WuQAlbW@8-4=~eyG8)jd;Qv#T_xnvx;O6%*+R2Or6&|Q1OzIW4KvOJ0pyXhy*!XehVjt2lyV%Ky=el9g$B>5b%Bxg>freW^8|ddS2Q8VOquuY@ zCZEsiRmNf3#xX%xK=8tNHv_^VvB@Q&YdZ;&J~>jhGjC!PwcA{(HwZ}c zL_>AdM8Sc&h#^^TiGQ}#KnG5zai#h+5Kt+K%%L2(W@x@!Tz5+5wIlt5_Z1nqa)L^B zFU*;AgwgT66>5An>=g#i64m{Y#o*~`Q#ctXSVG~xhm@#ho_+q<#&;9G>YAn7WwSxU z{Z0ofd*K41xLK(ZiVgP5QuYAQT!6k{j$Ycj(vIdzynjGrwo4tJWK(YEEc4MS^|sU{xY(h*=h-_nf~m-h9tb{ z(5K_!Gd>ooq3-YP-n*%Mjrby+L87r)+hXr0R-oYa%DGZBS?`^aTshv|P#Eq(#oinB z=E)Nj90t2_HRh6j@2P-k6jNh4vzsC05 z4tbbXgQ!WxaGVj5T-Cl27Sf9~3hgCMs4lQ1dOqO~W>Y~!Y`5Jq!%KBc2AHUl-$m0& zu22lriV+|0@r1pW7?-1c^Qf?=I{kAdyVO2F(Zge~RF{`Yn3L`qRFNSf5y?48OA(RI z_~wm9zTTZzt}P5ox(sTJ5vxd!J1Klu7ca+y9~Gc(C+H2Ou*=c1@H22y{!}+eCL`72 zt1@b+N*-#sN@h^W*WotK*lD0UU92gNYgv1_WAqT|aY z0?#8E83*kgIU?>4Ual&atQ38wD5c6M$90)*5R`%>@3$l>LeizEF`~fTM-JR6yrs$I zr6{az(C8xLCNsU@P@I7?)Sif8)XbCZZID%!8pfctLwC-#)o_yjC` zGGd)L*gq!YgJnqh_Q@uav`I>O(aHOwZTIb1>=X|FN6IkI3zN#j$BQvrI|~d2Q>1G5 zTvXVuJ}cY}hJ`xJs`&97%K?feDys89ufayK92=6U-rbCB&l86Wp8EuJ_n=G3z`Ew!$P5@{Onu0D5d5s{Mit)FPJ^6{Q)Js4fLF=gR5d&%3n8Q)~) zy@$z^?lLmHYz3eCYC2II0gE)LG|d9voF(tV?+I?$z9-eHbpDj`^yNMRnfB0Am-q3f z4xz`NC9rrfR7og%L{hiXKrp*nXbc<%0Wmdb4j&k$V)`4jRI$ve#p2I&UBx;rjv>wj!9fw_ZG$;XXT$eeEng{s%htQw^-s56)bYiBv zjYln2HsnJ}hF<3E_-sns_9}zN+RP+GkVWD=N8`*(*Xk>tlfRkTg3GSC8aLJlkuSFR zc{RJPQfMhYD=5Hu(1zz81_Y6OcKH!bt6iqDm^f@zU8?umJW;JOi;F?A2x+Im9IYN| zsd^+?y_`8A_pO&UmDa-a4gLTRLAtaT$A%0unhr#6N;tFJ)w*2#9-}~(fB=-00PXK9 z*KpEq%6=>UNd*wb7tYV!t@31vLLl0nj1&@u;jMTqUGOA8XFSaJ`@5k>o9?szteuI5 zBVh49Fb|_0I@X@Zq^V-f^gbxYwWZx}IaAzJuROVGxDi7dU8Be|Sy|X#Z;&*5(hQz< z$)yP04SSHYD8H^}zu)TN;0eR;Oqx_m#anPt6eVkI-un!2*ocUU{?2Bd^AWBDY9{C~>#42mr+hTFN+Ok={a)(19wK13>t%=F z%j1RKi1ob!0I%k>j&1wLl3H$nSyRVR{M`>{PK#dox()i~r<-rRUz&%FB5sYZ#Cxz@ zk6+0%sl0*eYa~~f6oUnRJ@5WWRt8v5YbeVNS}q!;90?j%X4jN4a24yu&L5U?R9p_* zy|H-Yl!|;|?bKpW-tEFN(>SRE_0SYZd0b*QC-#UA7(Js}rtZRa5(~=GZNtp}1W=R^ z^EzyprdIfjd+tk91{(!UwHVsGFt(njacx<^ZY3BzdInM(YW9n+@!IhemNI+TXuSm& z_w?|<%-@}>JMn)|R#_=EKRcW9cJ&2ry?#3N8?wbzI_mIH>(joEcMi$4$I{F-Yl_tv zukD4sknZlEZ4c)fahoxMl1;d?p|>P%C2MAi>*D0kvi7 zJT@~05j(kM$cIyffj8ebIbE!5v2G;@n9W^W?KEiPi+#4Af^=AscOqFlTI*{(m_1eF z$j>!!#xf%ZZa`O}lR0aJV5IYnK-eK2;3hl;8DXvC69b$DumElZGr&GKGkhC&W+bULvG;2!r z2z@p?GFg;odN91)tNiS``GQyj`fO<4b9(yb~Ig_X)m5`-xX6~ zytRs`*(y{qWA1Z8k8>OTY!-NKFfMblC$-qIle6Rh(s=If-T8Y#K|AntjVVC}Yvin{ zCk`B$D}rwZbb(dzGds8d&v}x;o%;?`6@?>JTGb^bHg+v~r(P-ct8?6s5daB1I0lBl zv__kHX#|YTWZ&}fV7r|@a63_}MWsa`+`@uf-?;LGz3qtF{M(!H2T@ua<-@pv6_gG= zfD}*xJboN9X>(`m%hxR@hn=q*8yo9izuGzs*$8LP`WIK%)rEO9dUdJ$)ZmyKas9Hb z0^v0fZ>%skWA-pcY8YNMds=zIqh$Y#GUQY*!6+hi=<1s{487THhYU(IH$R;9`du7} z4S9~InY+A?+|J>!A?&yD!W$VB@7$lJNAJ^7DTKB4R0pkUJ5u%92C+^Ezm*KoG&CGo zOnbfhb%Bby3!Le~Vsn$-Wj{$o>?>Al=J$} zXPFsQAy`h_i&^*CE2hr)_TvUJ7SA5+FoSyF?W^j39|AcmuB-LGZXCWWb$XKIakBYz z&TH$S$!UAjycRq7w8(OJqlhh2b3b{FEmO63zg*h`&mYFEtz2l&ZeI#(kY*O7E+7 zoWo?DNZiMD#lE|4lm*fv zRmwgxRZrVJ$wH8o)hjXZC<^(w*?#*hJPs5wS((MVQ%>pB#?~r%Gjv{rgiRlD$jOLW z-(`HoU3HDBmloA?SD0-fXz{2qaBC>{Le}+}#8#5{8bu#JXr>bL+$0Z)zvw!q4M53}vKCy`?O;be3 z*SHM~X>)C)Tyg$P8F7ucLG;>rwyot2$eNjTamfuZE9*;d3k%JJsk)Q}lu`eySEz|H zefOioXHvOxh*f|hcS?X>x!fI>g;1`qz~lrOpYfxMA3)=-sWwePr;m!Y^90*XHsy>PCT5SEk{LkNlaey6pFloR>!!|g%w zHGqpW;OhCM_JasOb2ZR5dux5&Y%cH+zV^`hbaUioYjx0I@#>HvBE=pam`}zv(-;#U zul4==_bwOgaUclqBTqh6lqA0QjFU)1o`;-+BN+)Zitg)Q=gZ!$qH5gga$(`Fgo^nR zo*&BbRnkPDylaf^u<&YVC&b6GGG=E1K$gMXea*XhA&s=7euRx7h($JmRlX^fCDFxuYJ^m0eQPn8nkI0P&1qwy!* zcK+Oz3!4$(oCqke^{m3TH=D(oF~H9S?aM4Edy5 z{iY9TWkC=X?6dSJu?c?O`?(OiUBOn-{U(vOtsZ_OJzz9zw$%2e>n~T|=sQdWoR&Fc z?XzeTUJuy#^)A4NJv9o=s!kYC{*e}#l)CKG_>RBM{i84Z>8rT8vWjGXZJ3i z9Ex-1!%`+*Onq*7J7{Ga+<&x|D^j4y-|(is>j(sBZEc;bMbnS;r#ZGSZ#Xzg`0eDF zt;huJo_7}%C?Ar5)npyw;?_v4=<Md<$l D_HYw> literal 0 HcmV?d00001 diff --git a/doc-template/public/main.css b/doc-template/public/main.css new file mode 100644 index 0000000..cfa8c03 --- /dev/null +++ b/doc-template/public/main.css @@ -0,0 +1,4 @@ +article h2 { + border-bottom: solid 1px gray; + margin-bottom: 1rem; +} diff --git a/doc-template/public/main.js b/doc-template/public/main.js new file mode 100644 index 0000000..e60362e --- /dev/null +++ b/doc-template/public/main.js @@ -0,0 +1,10 @@ +export default { + defaultTheme: "auto", + iconLinks: [ + { + icon: "git", + href: "https://git.bitbadger.solutions/bit-badger/BitBadger.Documents", + title: "Source Repository" + } + ] +} \ No newline at end of file diff --git a/docfx.json b/docfx.json index 312c800..1f586b1 100644 --- a/docfx.json +++ b/docfx.json @@ -12,7 +12,10 @@ ] } ], - "dest": "api" + "dest": "api", + "properties": { + "TargetFramework": "net9.0" + } } ], "build": { @@ -29,20 +32,23 @@ "resource": [ { "files": [ - "images/**" + "bitbadger-doc.png" ] } ], "output": "_site", "template": [ "default", - "modern" + "modern", + "doc-template" ], "globalMetadata": { "_appName": "BitBadger.Documents", "_appTitle": "BitBadger.Documents", + "_appLogoPath": "bitbadger-doc.png", + "_appFooter": "Hand-crafted documentation created with docfx by Bit Badger Solutions", "_enableSearch": true, "pdf": false } } -} \ No newline at end of file +} diff --git a/docs/getting-started.md b/docs/getting-started.md index 496f920..3767ac0 100644 --- a/docs/getting-started.md +++ b/docs/getting-started.md @@ -37,16 +37,16 @@ Both PostgreSQL and SQLite use the standard ADO.NET connection string format ([` ```csharp // C#, SQLite // ... - var config = ...; // parsed IConfiguration - Sqlite.Configuration.UseConnectionString(config.GetConnectionString("SQLite")); +var config = ...; // parsed IConfiguration +Sqlite.Configuration.UseConnectionString(config.GetConnectionString("SQLite")); // ... ``` ```fsharp // F#, SQLite // ... - let config = ...; // parsed IConfiguration - Configuration.useConnectionString (config.GetConnectionString("SQLite")) +let config = ...; // parsed IConfiguration +Configuration.useConnectionString (config.GetConnectionString("SQLite")) // ... ``` @@ -55,18 +55,18 @@ For PostgreSQL, the library needs an `NpgsqlDataSource` instead. There is a buil ```csharp // C#, PostgreSQL // ... - var config = ...; // parsed IConfiguration - var dataSource = new NpgsqlDataSourceBuilder(config.GetConnectionString("Postgres")).Build(); - Postgres.Configuration.UseDataSource(dataSource); +var config = ...; // parsed IConfiguration +var dataSource = new NpgsqlDataSourceBuilder(config.GetConnectionString("Postgres")).Build(); +Postgres.Configuration.UseDataSource(dataSource); // ... ``` ```fsharp // F#, PostgreSQL // ... - let config = ...; // parsed IConfiguration - let dataSource = new NpgsqlDataSourceBuilder(config.GetConnectionString("Postgres")).Build() - Configuration.useDataSource dataSource +let config = ...; // parsed IConfiguration +let dataSource = new NpgsqlDataSourceBuilder(config.GetConnectionString("Postgres")).Build() +Configuration.useDataSource dataSource // ... ``` @@ -80,17 +80,17 @@ For PostgreSQL, the library needs an `NpgsqlDataSource` instead. There is a buil ```csharp // C#, PostgreSQL - builder.Services.AddScoped(svcProvider => - Postgres.Configuration.DataSource().OpenConnection()); +builder.Services.AddScoped(svcProvider => + Postgres.Configuration.DataSource().OpenConnection()); // C#, SQLite - builder.Services.AddScoped(svcProvider => Sqlite.Configuration.DbConn()); +builder.Services.AddScoped(svcProvider => Sqlite.Configuration.DbConn()); ``` ```fsharp // F#, PostgreSQL - let _ = builder.Services.AddScoped Configuration.dataSource().OpenConnection()) +let _ = builder.Services.AddScoped Configuration.dataSource().OpenConnection()) // F#, SQLite - let _ = builder.Services.AddScoped(fun sp -> Configuration.dbConn ()) +let _ = builder.Services.AddScoped(fun sp -> Configuration.dbConn ()) ``` After registering, this connection will be available on the request context and can be injected in the constructor for things like Razor Pages or MVC Controllers. @@ -145,7 +145,7 @@ await Definition.EnsureTable("hotel"); await Definition.EnsureDocumentIndex("hotel", DocumentIndex.Full); await Definition.EnsureTable("room"); // parameters are table name, index name, and fields to be indexed -await Definition.EnsureFieldIndex("room", "hotel_id", new[] { "HotelId" }); +await Definition.EnsureFieldIndex("room", "hotel_id", ["HotelId"]); await Definition.EnsureDocumentIndex("room", DocumentIndex.Optimized); ``` @@ -168,7 +168,7 @@ Let's create hotel and room tables, then index rooms by hotel ID and room number // C#, SQLite await Definition.EnsureTable("hotel"); await Definition.EnsureTable("room"); -await Definition.EnsureIndex("room", "hotel_and_nbr", new[] { "HotelId", "RoomNumber" }); +await Definition.EnsureIndex("room", "hotel_and_nbr", ["HotelId", "RoomNumber"]); ``` ```fsharp -- 2.47.2 From 918cb384d495defe1fe6ad553ecb8066d22af483 Mon Sep 17 00:00:00 2001 From: "Daniel J. Summers" Date: Sat, 19 Apr 2025 15:47:42 -0400 Subject: [PATCH 22/22] Final doc tweaks before 4.1 --- docfx.json | 9 +- docs/advanced/index.md | 4 +- docs/advanced/integrity.md | 222 ------------------------------------- docs/toc.yml | 2 - favicon.ico | Bin 0 -> 9528 bytes index.md | 22 ++-- src/Common/README.md | 10 +- src/Directory.Build.props | 4 +- src/Postgres/README.md | 6 +- src/Sqlite/README.md | 4 +- 10 files changed, 30 insertions(+), 253 deletions(-) delete mode 100644 docs/advanced/integrity.md create mode 100644 favicon.ico diff --git a/docfx.json b/docfx.json index 1f586b1..b4ce50f 100644 --- a/docfx.json +++ b/docfx.json @@ -22,7 +22,10 @@ "content": [ { "files": [ - "**/*.{md,yml}" + "index.md", + "toc.yml", + "api/**/*.{md,yml}", + "docs/**/*.{md,yml}" ], "exclude": [ "_site/**" @@ -32,7 +35,8 @@ "resource": [ { "files": [ - "bitbadger-doc.png" + "bitbadger-doc.png", + "favicon.ico" ] } ], @@ -46,6 +50,7 @@ "_appName": "BitBadger.Documents", "_appTitle": "BitBadger.Documents", "_appLogoPath": "bitbadger-doc.png", + "_appFaviconPath": "favicon.ico", "_appFooter": "Hand-crafted documentation created with docfx by Bit Badger Solutions", "_enableSearch": true, "pdf": false diff --git a/docs/advanced/index.md b/docs/advanced/index.md index 06aa368..270feb4 100644 --- a/docs/advanced/index.md +++ b/docs/advanced/index.md @@ -7,10 +7,10 @@ While the functions provided by the library cover lots of use cases, there are o - [Customizing Serialization][ser] - [Related Documents and Custom Queries][rel] - [Transactions][txn] -- [Referential Integrity][ref] (PostgreSQL only) +- [Referential Integrity with Documents][ref] (PostgreSQL only; conceptual) [ser]: ./custom-serialization.md "Advanced Usage: Custom Serialization • BitBadger.Documents" [rel]: ./related.md "Advanced Usage: Related Documents • BitBadger.Documents" [txn]: ./transactions.md "Advanced Usage: Transactions • BitBadger.Documents" -[ref]: ./integrity.md "Advanced Usage: Referential Integrity • BitBadger.Documents" +[ref]: /concepts/referential-integrity.html "Appendix: Referential Integrity with Documents • Concepts • Relationanl Documents" diff --git a/docs/advanced/integrity.md b/docs/advanced/integrity.md deleted file mode 100644 index 5f52d79..0000000 --- a/docs/advanced/integrity.md +++ /dev/null @@ -1,222 +0,0 @@ -# Referential Integrity - -_Documentation pages for `BitBadger.Npgsql.Documents` redirect here. This library replaced it as of v3; see project home if this applies to you._ - -One of the hallmarks of document database is loose association between documents. In our running hotel and room example, there is no technical reason we could not delete every hotel in the database, leaving all the rooms with hotel IDs that no longer exist. This is a feature-not-a-bug, but it shows the tradeoffs inherent to selecting a data storage mechanism. In our case, this is less than ideal - but, since we are using PostgreSQL, a relational database, we can implement referential integrity if, when, and where we need it. - -> _NOTE: This page has very little to do with the document library itself; these are all modifications that can be made via PostgreSQL. SQLite may have similar capabilities, but this author has yet to explore that._ - -## Enforcing Referential Integrity on the Child Document - -While we've been able to use `data->>'Id'` in place of column names for most things up to this point, here is where we hit a roadblock; we cannot define a foreign key constraint against an arbitrary expression. Through database triggers, though, we can accomplish the same thing. - -Triggers are implemented in PostgreSQL through a function/trigger definition pair. A function defined as a trigger has `NEW` and `OLD` defined as the data that is being manipulated (different ones, depending on the operation; no `OLD` for `INSERT`s, no `NEW` for `DELETE`s, etc.). For our purposes here, we'll use `NEW`, as we're trying to verify the data as it's being inserted or updated. - -```sql -CREATE OR REPLACE FUNCTION room_hotel_id_fk() RETURNS TRIGGER AS $$ - DECLARE - hotel_id TEXT; - BEGIN - SELECT data->>'Id' INTO hotel_id FROM hotel WHERE data->>'Id' = NEW.data->>'HotelId'; - IF hotel_id IS NULL THEN - RAISE EXCEPTION 'Hotel ID % does not exist', NEW.data->>'HotelId'; - END IF; - RETURN NEW; - END; -$$ LANGUAGE plpgsql; - -CREATE OR REPLACE TRIGGER hotel_enforce_fk BEFORE INSERT OR UPDATE ON room - FOR EACH ROW EXECUTE FUNCTION room_hotel_id_fk(); -``` - -This is as straightforward as we can make it; if the query fails to retrieve data (returning `NULL` here, not raising `NO_DATA_FOUND` like Oracle would), we raise an exception. Here's what that looks like in practice. - -``` -hotel=# insert into room values ('{"Id": "one", "HotelId": "fifteen"}'); -ERROR: Hotel ID fifteen does not exist -CONTEXT: PL/pgSQL function room_hotel_id_fk() line 7 at RAISE -hotel=# insert into hotel values ('{"Id": "fifteen", "Name": "Demo Hotel"}'); -INSERT 0 1 -hotel=# insert into room values ('{"Id": "one", "HotelId": "fifteen"}'); -INSERT 0 1 -``` - -(This assumes we'll always have a `HotelId` field; [see below][] on how to create this trigger if the foreign key is optional.) - -## Enforcing Referential Integrity on the Parent Document - -We've only addressed half of the parent/child relationship so far; now, we need to make sure parents don't disappear. - -### Referencing the Child Key - -The trigger on `room` referenced the unique index in its lookup. When we try to go from `hotel` to `room`, though, we'll need to address the `HotelId` field of the `room`' document. For the best efficiency, we can index that field. - -```sql -CREATE INDEX IF NOT EXISTS idx_room_hotel_id ON room ((data->>'HotelId')); -``` - -### `ON DELETE DO NOTHING` - -When defining a foreign key constraint, the final part of that clause is an `ON DELETE` action; if it's excluded, it defaults to `DO NOTHING`. The effect of this is that rows cannot be deleted if they are referenced in a child table. This can be implemented by looking for any rows that reference the hotel being deleted, and raising an exception if any are found. - -```sql -CREATE OR REPLACE FUNCTION hotel_room_delete_prevent() RETURNS TRIGGER AS $$ - DECLARE - has_rows BOOL; - BEGIN - SELECT EXISTS(SELECT 1 FROM room WHERE OLD.data->>'Id' = data->>'HotelId') INTO has_rows; - IF has_rows THEN - RAISE EXCEPTION 'Hotel ID % has dependent rooms; cannot delete', OLD.data->>'Id'; - END IF; - RETURN OLD; - END; -$$ LANGUAGE plpgsql; - -CREATE OR REPLACE TRIGGER hotel_room_delete BEFORE DELETE ON hotel - FOR EACH ROW EXECUTE FUNCTION hotel_room_delete_prevent(); -``` - -This trigger in action... - -``` -hotel=# delete from hotel where data->>'Id' = 'fifteen'; -ERROR: Hotel ID fifteen has dependent rooms; cannot delete -CONTEXT: PL/pgSQL function hotel_room_delete_prevent() line 7 at RAISE -hotel=# select * from room; - data -------------------------------------- - {"Id": "one", "HotelId": "fifteen"} -(1 row) -``` - -There's that child record! We've successfully prevented an orphaned room. - -### `ON DELETE CASCADE` - -Rather than prevent deletion, another foreign key constraint option is to delete the dependent records as well; the delete "cascades" (like a waterfall) to the child tables. Implementing this is even less code! - -```sql -CREATE OR REPLACE FUNCTION hotel_room_delete_cascade() RETURNS TRIGGER AS $$ - BEGIN - DELETE FROM room WHERE data->>'HotelId' = OLD.data->>'Id'; - RETURN OLD; - END; -$$ LANGUAGE plpgsql; - -CREATE OR REPLACE TRIGGER hotel_room_delete BEFORE DELETE ON hotel - FOR EACH ROW EXECUTE FUNCTION hotel_room_delete_cascade(); -``` - -Here's is what happens when we try the same `DELETE` statement that was prevented above... - -``` -hotel=# select * from room; - data -------------------------------------- - {"Id": "one", "HotelId": "fifteen"} -(1 row) - -hotel=# delete from hotel where data->>'Id' = 'fifteen'; -DELETE 1 -hotel=# select * from room; - data ------- -(0 rows) -``` - -We deleted a hotel, not rooms, but the rooms are now gone as well. - -### `ON DELETE SET NULL` - -The final option for a foreign key constraint is to set the column in the dependent table to `NULL`. There are two options to set a field to `NULL` in a `JSONB` document; we can either explicitly give the field a value of `null`, or we can remove the field from the document. As there is no schema, the latter is cleaner; PostgreSQL will return `NULL` for any non-existent field. - -```sql -CREATE OR REPLACE FUNCTION hotel_room_delete_set_null() RETURNS TRIGGER AS $$ - BEGIN - UPDATE room SET data = data - 'HotelId' WHERE data->>'HotelId' = OLD.data ->> 'Id'; - RETURN OLD; - END; -$$ LANGUAGE plpgsql; - -CREATE OR REPLACE TRIGGER hotel_room_delete BEFORE DELETE ON hotel - FOR EACH ROW EXECUTE FUNCTION hotel_room_delete_set_null(); -``` - -That `-` operator is new for us. When used on a `JSON` or `JSONB` field, it removes the named field from the document. - -Let's watch it work... - -``` -hotel=# delete from hotel where data->>'Id' = 'fifteen'; -ERROR: Hotel ID does not exist -CONTEXT: PL/pgSQL function room_hotel_id_fk() line 7 at RAISE -SQL statement "UPDATE room SET data = data - 'HotelId' WHERE data->>'HotelId' = OLD.data->>'Id'" -PL/pgSQL function hotel_room_delete_set_null() line 3 at SQL statement -``` - -Oops! This trigger execution fired the `BEFORE UPDATE` trigger on `room`, and it took exception to us setting that value to `NULL`. The child table trigger assumes we'll always have a value. We'll need to tweak that trigger to allow this. - -```sql -CREATE OR REPLACE FUNCTION room_hotel_id_nullable_fk() RETURNS TRIGGER AS $$ - DECLARE - hotel_id TEXT; - BEGIN - IF NEW.data->>'HotelId' IS NOT NULL THEN - SELECT data->>'Id' INTO hotel_id FROM hotel WHERE data->>'Id' = NEW.data->>'HotelId'; - IF hotel_id IS NULL THEN - RAISE EXCEPTION 'Hotel ID % does not exist', NEW.data->>'HotelId'; - END IF; - END IF; - RETURN NEW; - END; -$$ LANGUAGE plpgsql; - -CREATE OR REPLACE TRIGGER hotel_enforce_fk BEFORE INSERT OR UPDATE ON room - FOR EACH ROW EXECUTE FUNCTION room_hotel_id_nullable_fk(); -``` - -Now, when we try to run the deletion, it works. - -``` -hotel=# select * from room; - data -------------------------------------- - {"Id": "one", "HotelId": "fifteen"} -(1 row) - -hotel=# delete from hotel where data->>'Id' = 'fifteen'; -DELETE 1 -hotel=# select * from room; - data ---------------- - {"Id": "one"} -(1 row) -``` - -## Should We Do This? - -You may be thinking "Hey, this is pretty cool; why not do this everywhere?" Well, the answer is - as it is with _everything_ software-development-related - "it depends." - -### No...? - -The flexible, schemaless data storage paradigm that we call "document databases" allow changes to happen quickly. While "schemaless" can mean "ad hoc," in practice most documents have a well-defined structure. Not having to define columns for each item, then re-define or migrate them when things change, brings a lot of benefits. - -What we've implemented above, in this example, complicates some processes. Sure, triggers can be disabled then re-enabled, but unlike true constraints, they do not validate existing data. If we were to disable triggers, run some updates, and re-enable them, we could end up with records that can't be saved in their current state. - -### Yes...? - -The lack of referential integrity in document databases can be an impediment to adoption in areas where that paradigm may be more suitable than a relational one. To be sure, there are fewer relationships in a document database whose documents have complex structures, arrays, etc. This doesn't mean that there won't be relationships, though; in our hotel example, we could easily see a "reservation" document that has the IDs of a customer and a room. Just as it didn't make much sense to embed the rooms in a hotel document, it doesn't make sense to embed customers in a room document. - -What PostgreSQL brings to all of this is that it does not have to be an all-or-nothing decision re: referential integrity. We can implement a document store with no constraints, then apply the ones we absolutely must have. We realize we're complicating maintenance a bit (though `pgdump` will create a backup with the proper order for restoration), but we like that PostgreSQL will protect us from broken code or mistyped `UPDATE` statements. - -## Going Further - -As the trigger functions are executing SQL, it would be possible to create a set of reusable trigger functions that take table/column as parameters. Dynamic SQL in PL/pgSQL was additional complexity that would have distracted from the concepts. Feel free to take the examples above and make them reusable. - -Finally, one piece we will not cover is `CHECK` constraints. These can be applied to tables using the `data->>'Key'` syntax, and can be used to apply more of a schema feel to the unstructured `JSONB` document. PostgreSQL's handling of JSON data really is first-class and unopinionated; you can use as much or as little as you like! - -[« Return to "Advanced Usage" for `PDODocument`][adv-pdo] - - -[see below]: #on-delete-set-null -[adv-pdo]: https://bitbadger.solutions/open-source/relational-documents/php/advanced-usage.html "Advanced Usage • PDODocument • Bit Badger Solutions" diff --git a/docs/toc.yml b/docs/toc.yml index 982e8f0..941f98c 100644 --- a/docs/toc.yml +++ b/docs/toc.yml @@ -11,8 +11,6 @@ href: advanced/related.md - name: Transactions href: advanced/transactions.md - - name: Referential Integrity - href: advanced/integrity.md - name: Upgrading items: - name: v3 to v4 diff --git a/favicon.ico b/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..22ca446ef7fa60edfaea87e1007d7dff21cebc68 GIT binary patch literal 9528 zcmaiZRa6`>xAj1WGWY-k4DK$)-Q9|pqQ#{+#a)UHR@_~RyOiSY?!^m5i7vN z$jUA=Fs1p+q7)~>!m4S?m+TSU>NCT6J?bDX zmh#Z@`dQDY4z5seb&C#*1T*BomX`=|dCs1x8nIwAty#C?{d~)(=DKB`mi~_I{HFrm zRkFKqOv(iP@JOB(%l=hBepotHs@nC>HNiOF&(n4MYOaAoNGKL;HXRHZ*=kMBUN{L9 z8#YG>*TyxrGtX>uF7-g`G=r9LSeJRLx8K7)yfI8u$pJ<1HoRNBvBW46S;pyIZ+gt-y*%Os_P33{Ct zIj`^I)7>aFEQ$;#mGmI0KF}jk&gEEI16Lrl)(C_oaF1bxkd^f+B6X$h zL+zH+jOEExWr)sH;{KaaaeiD|=~owXo*65TqU}-$`<}D?xBD@d)+8u^D+)wIMYF1y znd^8xY{_wowI$+YW%iZu=Y(_i-n8thBRu{fj1*J^I-D$qk}y$1dKND?KLmFKtg5NV{nci zIE1QRyk>f`=xih?G4lJESogc}cq%&SFuxW{`u$CM{&5-enRN`t;Y=aR>hk<--Vz;J zO3TpM6=Q5RJKQ&Wjoj z!of{x86e#GIVH?j$U0|&%_yU6(}Bz51cHHaP1bq$A)eeXr=dYu?gMAfCF;V0_N1!1 zTi`iat`kwQ^;tJv_x&T)=SPE6)I;*+{KG^U8}Cbv@J$gfuGR*%3Z0_vkN7taWi2`I zYYv;kY3?A`ydl>2wrm{pdJa1|uaNogJoz+(sU$(T`ri_H7gvQVj0}tpzUGTtwQtfi zrj_WJH}j7#k2!91Jv;F)pVBEsM~^8PY0h5Up;3dSdyB3fR`E=~_@TC|Fs0M_2FR(V zvg5z|o{K#U^#^x9JgIC2o_XAEwI@7cZ_eDFtzS(i(xFiUJKgkn&2z)5>cYQtfGolq z9;}7WP&}Qs`y;SOK?8ZCx0I%V|7a)!5J-&il!vu{AANowX%3H!9D53vOgK6q7y-zY z`y9i53J^mu{ft`@o&I_%9U5(MvFP8)CV$UAbQe93{1Sn8L^X6H)Sm`5pv1|0fqOGndk3~vFP{lJZ>jm(@)~z4TN&EUwYW%dItq1 zGRpZIqmI4@wtMQ^uLVrhbRy;_GDflI_4b)a+&r*gG4cgy9D|T>JUPO-zEQ-RsLDo* z5$rs_e<-|-bfrL%?gk~kv5FLO+Fzk_mBsvcPjj_9mbRh#R}EL$HT*yunpysR;Bt`92^A;)@hbqr%H2GCdh_x zlNXepsjj#4EIelT&Dh`X=J?HY@o4*>?OW$JU2NGuuLS+Q`*AvYbi_=68;b;%m83fk zkgz^gpxC`OVXF6c2{xME>z(M?JH6Y=okU7$_UMWVw!8aQvX>S2HR!tmyX^c_-0XFA zaE7QElb$^;EBpor96_CT=<(ZY?Nl_%wzb`IlzUa_!tMqfHtD9t5BxMu^AlGdY1tqa zsvKZd#3Wr}WAA%BIvW+75Z*X9?vnDd??-chZ+2q+pbBhEnyg^S(R&RxUqoS_K}?Zik=7vH z>sS`YSLDP|=A6cX)|0KrmS)aks#NlN?GS`j~UTFN;mPT_S{OC$dW;ROL z#sD))`H)GNabHw{wH|5@?!6YZwzeTAZa)&k@^WcWH;p9sRg0-wmWtw%pXani{n7$! z!Ir2Ow9$O`=N{Q+Cg-7zCMP?4Zxeq+8K7Z)y;^MB(*PVCiNfh~=W18`iku0d944RSg zum+W`nrbf9nH*}9QJ!Y=FY5lk3C$%6 zob}OCrd{2ViuIWA6mUg6&-dpdDa%scXt1P!_CQ6lg^kwN<<`wAIDduWU9V9Rv?kFU zzU8R!8^t%JO>k`XE~tx*Va$k4OlLd&+&MxzC}V}+rB|%j^PXUR5{h)=Kuc`_M?}vF z;!_7#uX0w-t3jCY&nCu5th(-5D4AHDBiMO$J97r2!E#)#WZL%p_xq0d2k|23B^pej zEUB63JPdN zv4voZgJQh2>FG$M|A;kcf7p1%Z7G$sAw%`euqoYGT7xViFh;j(?8!e_w@MBgQO?3Z zV7nejCKpxx@6e~wny|bx_keL5gXxC-aN{>yB=-4X%bbK6@34N%p^xCDMy~r1paoN7KI*KuY*;VfmLWLK=IuL5m$OC~gmK5VY<6CYavE_L{5*u}l0ql0z2h z@DthUoxM}nujTz8Z(}5tRO;__lWp?v31*&eiIwj~iRug|#O^jD(`WGIa5W*guf@B! z`^R_QpDSRp@Uw%6o(3hsZVNYVm~19_I?Cb5K@tdVQh^Vt#+LKq;$Ph2aX$Q`uH9(P zyt;DTBBngNx5+QobOsM`g?B2<+I{l0!}~hSPJEYVEqZX$Z)Y>7=Q>=F6@BCHxcS3l zUoV%I5DbIJf6B0yh@=M_y3;ivL2jGZ$H}*{<)i=0W97^=J6W^dZKYtC$2_ZM{1H zRSJ9w-;Fv#8hh3YZXO4{}lZo@buKf0E zs2KC6?Z{-=yq=6pFP@krikPAi6GuoFn;3?o(B6s8MM!HpsKLg5%31%V<2El;YJukc zdTLR~Ou(%gh5u!TYOXh`5aNN9W!OAUCRQd^YA{r~niwsbNPqrm-Q>>gjPHHvke!y? zN74wp0}oQ8uVLyLXairI1Fr1_vq_I8HSi9aV)*l*uXY{JxcBCk<{p~ zPaAGC0xoBtL=4jhXY^wD#!xI`kBtOhypOM468;5+!anTz6yxKsOU`M7&z0N~66(d8 zUxl^{w1g%qk{JcX_=Ylhv~|i@CnqOoZ5O4?hF}=0?G~)l-=9yIIQULYc;5W!H(%~> z`akGd@?~7|zw})B(RB*|APD>qJ$vVLW&5O9$pwr^yz}x==?VP%VFpumM%q#nY6>;A z)^Mj{V2cQ*un6^4!;2ccDVG_P0lcSS(G;~s2BG4)MB^#y%+lEw9Y7l$ z5?O!$dKWlh%o4O{e6n@--6ocCIVa1{WU20C+4=Eb;Jdq4#IH9dngD4@Ujl_2L*>5p z?fX}r7hB``#`+fb<5mR_S6pdM(dGn;6YBI2zRW#mGyo4Ea>pk2{I>KP;qDop+7xR8 z6F$d^eE@byU44mc#cx_aUluaqX2!(&sE5>944_zD6eU=U$ZD2pN>Leat|DrvL}oBO z%(d{-3{Ce6xn~tQiRuZO#m`vqPo*%$Wv1`~4R=jFpi~qXxfCJ|0wRa^x01^%j3f!+ zOJ%^o_t{52S9VyhlOk=-X=^nrkf(emZRox5!sZFTEctGsAGp18#0oD8@29?$QqJ_r zk~LrB-`m3{wtiRLZmU+R$}ayzMKe{QLoZED8|P^;)|o(1P@_c8{^MJbAJDUdyew;q z%iFRKWVC|B)>#mjf-Ie>pI~Jeo1ji7&5M$SPXBCrO~dW*i|+?Bu!&E=ncIR5SihbJ zv(>pROuV*gscV$RLtbgoTrHnnNIqCLH+#jpL{8qysPRH;oC3!)_s zu-E?7zfs*CLN*A0Azp#>*=zX*^2mzgwSAi zn-{C+I)?Tipwiy9wI3rB^0!(P!DcZmYw7Vxi_&IShsfw?w38MRWe|Cm`B~HvN8=K7 zM++1~q63Vefnk?JFq3DAG^==7j?Ote!Be>5eHl{ZF5!!#6?Hzt@8qe{ij51F=11+6 zh=&{r&?L23?l}4R<-0zp)4Y!zK#S3T;?Y;9prG{%A3CRUC&5 zCT>dK3B{xI7jDAnIC$G-<(reb4J0fqEOOlEl`?;g>dB00gy69lm$v`#k4UahI}sqZ z5B9{9P92BI9PxK7EpvdG*TJDmwXv3p>!zI?6RiNHrcy3Rm=OmRu*FxRxVZSWxVssZ z{)(vJ7mUPAA@c!FZShZK+w@2>bX2y`56GV;-QMCu0qaS@B}-{cxOQzyX$)X#jVg9) zcX!^NKO9D$7sj0V0$;(K*@u+0?i90q%~!=D;4GF6rv+6(t;Fjs@{lTDxPo{ls8XD}>Kn<898(CBX8g z2Tt?+Cl__CgCwb^E40A085ys{YndxQZX@AQILK|gjPi)C#J{)9w+4T zXN=!!yBjKg@}6M4%nmA|TcJkxtJZPphl@iSiAQHO|C{LFTiv+^Eyy5GhwZ-JMl1a% zu&F-3%;(RT*jSBjBm4|yjDokTr4EA5ugbsdzIwC($NCFPdujuiLp^aKr9iC~AWSnShzo=Af}ZTDYGz=*C&Z^HH}cRcupQt{P1KGwMZ^o40b2A8m}OZo=u0Ex#T$la_?X?pvIVpy#$s3esW+ktm9;Syj;ljrlaZ zuR29-MhqaCvV#LCC8f<{t0P;*iZ*~Kbjv)^t|3Yq7l@=>5q5>pkZ=9*MOUs0%P|3d z!u+GV=uB#T#qm43zX@11AbTmsd4D7MIkh6%tOT4$Aj@Z7q{r3}^ep~z4;T7A8_G`q zHrRi!x23oJo3q;Z_sj6g#+v!N6GP~JBic_3!cvHt4m-7;#|g{7;}Fz(6?)(g6qJ`w zyA&Aj^&Fp0Ij3wmOfvidc$y;MpoGsbfZka6#&KdF`Ak8u=UMZ$?|4|RHfs^ThNoGE$QO?8U;dkaL+)>x&xGHLXT2>kSxDszL zWgf0@e_Vz4SikEl-Vt78wF(p$hP`8%UC{1pC6|WOSjRNTplV7fR_Mj*7TJcsTurdn zGq!hYzN*GjsFGgm^H_ zq98?kza~SGnaH103|4=4WCSg)+5)|i;HkHosS;&%+>;%(-mfm6cTh93G8V_=O}YMz zgA*Ko45l##O(S+)AM65hJb&16bY9Y^iSE)7vKvg!rL}Hb)(S-)`XTZXE__r~)wqSY z#@sqQuMm3)O~0=4X*IsJ%Jelfln}Hh9c8GmvId#unEaYDfp+v(2R}*f`W&bdj=;qy ztPaJbDxjBncirhLIZq@;RVjJ+|03j=hm7pAS`T+y)`j2Q`iD|cfsjj5^jgDDzy1Ev zp1iwjcKds60I@sJ{Nt?JhKV58w8q<4DnV($OxO3`1(`6$x7Bq|K?S~2<$u?=uHSTk zS^Kzp6DAJH`RP39zwSJLYXHP8c*u)9^g(o6i(?Kj&1ouxBe7uK9H>@%Gl*#&*Mqs9 zE$1kx*$G?bImG|s_MtI$qNkXz+0akUIzG^@K9E2T;z!GEsS+)6}Wra(pHC8h-_Nt zVJm@!fl{Q*^k&dTyBl9M9LlY z*k5-w;VaqY+@|=D5G%pj&)}rq1l27ZP^jXmGCAM1?=ekusWe&J-U=EC23%?Umcpsi zH)}ynLTK|*VW^S4`?@z^QWd-5@!lBXkR7^rGK6LVa zm2MI!^m4d4TiYpxG8~0B0CrxnWxN2&?_66gr|fwM3EgPOLQYR$n$<*noVXu#QsWl& z^rR|nKVkm;|U{=ndE*r|CtN|>Vs0Xkggk?Hn!Ub{+@DIK0EQ}x!(I>PbgnReZ6 zO^mYE)!SZ~`}&USwdNZ1X>w9*(h<5@5*;yJ>xWQ*;C+nl(L>S=?#G`#`2S-jTlqDW z-W4uja!Z)ofPy3iJxNcEPnON50@?Cszo=X3>rRj8ZjtM3$5mG`Zxb4<7AyoyX@XqF z=sVB;NJyct`5a`}h~uSbu{7p@RTlZTGFD0oznc^*r)2GguBh>F)(dvf#@e;(97In^ z?Dk`Dj7DiIMqBCYrKPSVsW7`5rw_%XyBwD~Ti=hT{-z|cG&e_HX#`94F59&I`5Q_3 zLL_N*dQzoY$6(n>|MH)?W|Ck8qB}uW&$3FDbvOXk`0yL2yXIVBRtmRvI9TV@gMpEe zHdBstYf!FPaQ;b#t})K}!`+V+x9daa-4pv-tKn->g2InfTs`h7Og6Mo- zrxuI~Gl^%|vqPzBtwcy5Qb#(eMz{UXsBTs6_r&&g zD#oyqH)7gR&dariGh%le6AxI2T_7z1c!OOlkj~ zw<6~LFR8@LV1#R;a zkK1=r;vRc8A2AXEM(r9qsp)rrZv4)7&zv7EUfBPw!}85d^<$=CCmG(MhAwVGt%Y}aCTu~z+)vKN8p1CE(`KJRF!?AXh(4N zHiMXauZQ9uH|R0k@cwv)pDPf-$*<6*DTK}nCR}jCeZAieiLPrU&wV7`eiMbz;J60K zTo1oDIk2g9-Fg!)i-wr+>Pfgco!LE@*_L(gTlIrpr=fE&d7G^`d0qrkZ#{b~R9Y<- zD}A9;9GlE}3qwLC&h+Y(s#hQPKm z#_y_}(LGk-QXgZQ6^QnX6zsZ|%#^3TUD!`0?Jt#f38)RaSL`%Y-hA zrZ29xIhw$IG+A4>=H~;cxYWRPy`u^xOfkCI5`Io&u+ka4tVz@u);v9_FKG+a;3xfk zTIIUa8xp(eB~q=F%Ziazq+;?X$N^>UOVf;SHfx>l8?2duMvqqJ;%c7OSTYcW)wk?A zP)vmoDXzXGqsr{cdyC6ox#XzqVyu>cO21^xdB^9P;5vIR1~Ma0q%> zA@+qCeAjXsWf1oy(P+ThZW*S!gc-yPzFj&YgNn||dAHlg_o?+eu}5%XHVSkd<4qgf z8Dw=wo~^#H3%+r}Z{*i*#$3Sw9gIcHu;ef|EtU7mVFZssS=e_=n3!N1TwP2%v;u13 z4aUL*e<~+!`4pw$tUX+Zr%MIF!<)lmpkRC)`TKstF!3;?a>*Vm4Kuh1P^6a`krE^R zD^$Irr0%6|hib4yq@bg(glK8*3L{e!$-QCLc89fSRoy5+O>N&o(PJVfPvgqQK4+3M ztQ=vrPD6F1pK4j_e%9srWJ#iYi?1O+m{j?vaB~;FSOxoMpjcW`{+{4QTrTLHri*rd zlLP9yXFWUWglj$9-!?#+!-g)KAiE*Ytu<11$_ zrL4_kNwc2c!>3l~?f5-GAs89*KZ(@z4}pmZ+b--Qbw&|v>5ohVRgjo22)fv}aJ+cK zMUoarHkA?Qg%s6@q}Z9&yP5Y=hi6*K);2aBZ0yck*AQyqkE$zN{gtE1Ai-S(KH{il zg-3`d1|pwES2Mj&A5?C}?bjTTX$jI3uFsJ#E?i+&?WGwzIzkV0btr7w2CBaTjxkfc z-e|F@`>J&|a?ZKtG(V;2f`orjRN=45l*VpS!Co*>^9QMB;&)GUB=i(l`Z5H^A0(1r zALa9!%&a>;m3^_zVNLmf#BB{lF8@_ooj2B#@VLmpo5;J>K1+}@^d-dy;E{C<7W|fw zxGN=Tx{lq|#g~T7winmq$6=Gxz@V=NUo&l7dH)c?Y{T*(&o#!XbB;4mH1lgAK^bc) zdLO4YmHKT61K~`UyI{1{!ooraxd_e$MU60`0cG$)vZ=Vir{UAu$Yw>#o5#C#4|Fd PaW~i|gS7Jhm2LkA{Ji#} literal 0 HcmV?d00001 diff --git a/index.md b/index.md index 45602d8..0f7bc1c 100644 --- a/index.md +++ b/index.md @@ -1,28 +1,24 @@ --- _layout: landing +title: Welcome! --- BitBadger.Documents provides a lightweight document-style interface over [PostgreSQL][]'s and [SQLite][]'s JSON storage capabilities, with first-class support for both C# and F# programs. _(It is developed by the community; it is not officially affiliated with either project.)_ -> NOTE: v4.1 is the latest version. See below for upgrading. - +> [!TIP] > Expecting `BitBadger.Npgsql.Documents`? This library replaced it as of v3. ## Installing -### PostgreSQL +### PostgreSQL [![Nuget (with prereleases)][pkg-shield-pgsql]][pkg-link-pgsql] -[![Nuget (with prereleases)][pkg-shield-pgsql]][pkg-link-pgsql] - -``` +```shell dotnet add package BitBadger.Documents.Postgres ``` -### SQLite +### SQLite [![Nuget (with prereleases)][pkg-shield-sqlite]][pkg-link-sqlite] -[![Nuget (with prereleases)][pkg-shield-sqlite]][pkg-link-sqlite] - -``` +```shell dotnet add package BitBadger.Documents.Sqlite ``` @@ -40,7 +36,7 @@ dotnet add package BitBadger.Documents.Sqlite ## Why Documents? -Document databases usually store JSON objects (as their "documents") to provide a schemaless persistence of data; they also provide fault-tolerant ways to query that possibly-unstructured data. [MongoDB][] was the pioneer and is the leader in this space, but there are several who provide their own take on it, and their own programming API to come along with it. They also usually have some sort of clustering, replication, and sharding solution that allows them to be scaled out (horizontally) to handle a large amount of traffic. +Document databases usually store JSON objects (as their "documents") to provide schemaless persistence of data; they also provide fault-tolerant ways to query that possibly-unstructured data. [MongoDB][] was the pioneer and is the leader in this space, but there are several who provide their own take on it, and their own programming API to come along with it. They also usually have some sort of clustering, replication, and sharding solution that allows them to be scaled out (horizontally) to handle a large amount of traffic. As a mature relational database, PostgreSQL has a long history of robust data access from the .NET environment; Npgsql is actively developed, and provides both ADO.NET and EF Core APIs. PostgreSQL also has well-established, battle-tested horizontal scaling options. Additionally, the [Npgsql.FSharp][] project provides a functional API over Npgsql's ADO.NET data access. These three factors make PostgreSQL an excellent choice for document storage, and its relational nature can help in areas where traditional document databases become more complex. @@ -67,7 +63,7 @@ PostgreSQL is the most popular non-WordPress database for good reason. The [SQLite "About" page][sqlite-about] has a short description of the project and its strengths. Simplicity, flexibility, and a large install base speak for themselves. A lot of people believe they will need a lot of features offered by server-based relational databases, and live with that complexity even when the project is small. A smarter move may be to build with SQLite; if the need arises for something more, the project is very likely a success! -Many of the benefits listed for PostgreSQL apply here as well, including its test coverage - but SQLite removes the requirement to run it as a server! +Many of the benefits listed for PostgreSQL apply here as well, including its test coverage, but SQLite removes the requirement to run it as a server! ## Support @@ -94,4 +90,4 @@ Issues can be filed on the project's GitHub repository. [Litestream]: https://litestream.io/ "Litestream" [sqlite-about]: https://sqlite.org/about.html "About • SQLite" [json-ops]: https://www.postgresql.org/docs/15/functions-json.html#FUNCTIONS-JSON-OP-TABLE "JSON Functions and Operators • Documentation • PostgreSQL" -[tests]: https://github.com/bit-badger/BitBadger.Documents/actions/workflows/ci.yml "Actions • BitBadger.Documents • GitHub" +[tests]: https://git.bitbadger.solutions/bit-badger/BitBadger.Documents/releases "Releases • BitBadger.Documents • Bit Badger Solutions Git" diff --git a/src/Common/README.md b/src/Common/README.md index c0107b8..ba3aa52 100644 --- a/src/Common/README.md +++ b/src/Common/README.md @@ -8,11 +8,11 @@ This package provides common definitions and functionality for `BitBadger.Docume - Select, insert, update, save (upsert), delete, count, and check existence of documents, and create tables and indexes for these documents - Automatically generate IDs for documents (numeric IDs, GUIDs, or random strings) -- Addresses documents via ID and via comparison on any field (for PostgreSQL, also via equality on any property by using JSON containment, or via condition on any property using JSON Path queries) -- Accesses documents as your domain models (POCOs) -- Uses `Task`-based async for all data access functions -- Uses building blocks for more complex queries +- Address documents via ID and via comparison on any field (for PostgreSQL, also via equality on any property by using JSON containment, or via condition on any property using JSON Path queries) +- Access documents as your domain models (POCOs), as JSON strings, or as JSON written directly to a `PipeWriter` +- Use `Task`-based async for all data access functions +- Use building blocks for more complex queries ## Getting Started -Install the library of your choice and follow its README; also, the [project site](https://bitbadger.solutions/open-source/relational-documents/) has complete documentation. +Install the library of your choice and follow its README; also, the [project site](https://relationaldocs.bitbadger.solutions/dotnet/) has complete documentation. diff --git a/src/Directory.Build.props b/src/Directory.Build.props index cced572..68d9ff6 100644 --- a/src/Directory.Build.props +++ b/src/Directory.Build.props @@ -6,12 +6,12 @@ 4.1.0.0 4.1.0.0 4.1.0 - Add JSON retrieval and stream-writing functions + Add JSON retrieval and pipe-writing functions; update project URL to site with public API docs danieljsummers Bit Badger Solutions README.md icon.png - https://bitbadger.solutions/open-source/relational-documents/ + https://relationaldocs.bitbadger.solutions/dotnet/ false https://git.bitbadger.solutions/bit-badger/BitBadger.Documents Git diff --git a/src/Postgres/README.md b/src/Postgres/README.md index ff442c9..d0b6cf2 100644 --- a/src/Postgres/README.md +++ b/src/Postgres/README.md @@ -13,7 +13,7 @@ This package provides a lightweight document library backed by [PostgreSQL](http ## Upgrading from v3 -There is a breaking API change for `ByField` (C#) / `byField` (F#), along with a compatibility namespace that can mitigate the impact of these changes. See [the migration guide](https://bitbadger.solutions/open-source/relational-documents/upgrade-from-v3-to-v4.html) for full details. +There is a breaking API change for `ByField` (C#) / `byField` (F#), along with a compatibility namespace that can mitigate the impact of these changes. See [the migration guide](https://relationaldocs.bitbadger.solutions/dotnet/upgrade/v4.html) for full details. ## Getting Started @@ -71,7 +71,7 @@ var customer = await Find.ById("customer", "123"); // Find.byId type signature is string -> 'TKey -> Task<'TDoc option> let! customer = Find.byId "customer" "123" ``` -_(keys are treated as strings or numbers depending on their defintion; however, they are indexed as strings)_ +_(keys are treated as strings or numbers depending on their definition; however, they are indexed as strings)_ Count customers in Atlanta (using JSON containment): @@ -103,4 +103,4 @@ do! Delete.byJsonPath "customer" """$.City ? (@ == "Chicago")""" ## More Information -The [project site](https://bitbadger.solutions/open-source/relational-documents/) has full details on how to use this library. +The [project site](https://relationaldocs.bitbadger.solutions/dotnet/) has full details on how to use this library. diff --git a/src/Sqlite/README.md b/src/Sqlite/README.md index fdd8a46..7c679f8 100644 --- a/src/Sqlite/README.md +++ b/src/Sqlite/README.md @@ -13,7 +13,7 @@ This package provides a lightweight document library backed by [SQLite](https:// ## Upgrading from v3 -There is a breaking API change for `ByField` (C#) / `byField` (F#), along with a compatibility namespace that can mitigate the impact of these changes. See [the migration guide](https://bitbadger.solutions/open-source/relational-documents/upgrade-from-v3-to-v4.html) for full details. +There is a breaking API change for `ByField` (C#) / `byField` (F#), along with a compatibility namespace that can mitigate the impact of these changes. See [the migration guide](https://relationaldocs.bitbadger.solutions/dotnet/upgrade/v4.html) for full details. ## Getting Started @@ -103,4 +103,4 @@ do! Delete.byFields "customer" Any [ Field.Equal "City" "Chicago" ] ## More Information -The [project site](https://bitbadger.solutions/open-source/relational-documents/) has full details on how to use this library. +The [project site](https://relationaldocs.bitbadger.solutions/dotnet/) has full details on how to use this library. -- 2.47.2