Version 2.1 #41

Merged
danieljsummers merged 123 commits from version-2.1 into main 2024-03-27 00:13:28 +00:00
9 changed files with 660 additions and 78 deletions
Showing only changes of commit 05d49e6ce9 - Show all commits

View File

@ -128,7 +128,7 @@ type IPostData =
abstract member FindPageOfCategorizedPosts : abstract member FindPageOfCategorizedPosts :
WebLogId -> CategoryId list -> pageNbr: int -> postsPerPage: int -> Task<Post list> WebLogId -> CategoryId list -> pageNbr: int -> postsPerPage: int -> Task<Post list>
/// Find posts to be displayed on an admin page (excluding revisions and prior permalinks) /// Find posts to be displayed on an admin page (excluding text, revisions, and prior permalinks)
abstract member FindPageOfPosts : WebLogId -> pageNbr: int -> postsPerPage: int -> Task<Post list> abstract member FindPageOfPosts : WebLogId -> pageNbr: int -> postsPerPage: int -> Task<Post list>
/// Find posts to be displayed on a page (excluding revisions and prior permalinks) /// Find posts to be displayed on a page (excluding revisions and prior permalinks)

View File

@ -5,7 +5,7 @@ open BitBadger.Documents.Postgres
open Microsoft.Extensions.Logging open Microsoft.Extensions.Logging
open MyWebLog open MyWebLog
open MyWebLog.Data open MyWebLog.Data
open NodaTime.Text open NodaTime
open Npgsql.FSharp open Npgsql.FSharp
/// PostgreSQL myWebLog post data implementation /// PostgreSQL myWebLog post data implementation
@ -20,9 +20,13 @@ type PostgresPostData(log: ILogger) =
return { post with Revisions = revisions } return { post with Revisions = revisions }
} }
/// Return a post with no revisions or text /// Return a post with no revisions or prior permalinks
let postWithoutLinks row =
{ fromData<Post> row with PriorPermalinks = [] }
/// Return a post with no revisions, prior permalinks, or text
let postWithoutText row = let postWithoutText row =
{ fromData<Post> row with Text = "" } { postWithoutLinks row with Text = "" }
/// Update a post's revisions /// Update a post's revisions
let updatePostRevisions (postId: PostId) oldRevs newRevs = let updatePostRevisions (postId: PostId) oldRevs newRevs =
@ -36,6 +40,13 @@ type PostgresPostData(log: ILogger) =
// IMPLEMENTATION FUNCTIONS // IMPLEMENTATION FUNCTIONS
/// Add a post
let add (post : Post) = backgroundTask {
log.LogTrace "Post.add"
do! insert Table.Post { post with Revisions = [] }
do! updatePostRevisions post.Id [] post.Revisions
}
/// Count posts in a status for the given web log /// Count posts in a status for the given web log
let countByStatus (status: PostStatus) webLogId = let countByStatus (status: PostStatus) webLogId =
log.LogTrace "Post.countByStatus" log.LogTrace "Post.countByStatus"
@ -55,7 +66,7 @@ type PostgresPostData(log: ILogger) =
Custom.single Custom.single
(selectWithCriteria Table.Post) (selectWithCriteria Table.Post)
[ jsonParam "@criteria" {| webLogDoc webLogId with Permalink = permalink |} ] [ jsonParam "@criteria" {| webLogDoc webLogId with Permalink = permalink |} ]
(fun row -> { fromData<Post> row with PriorPermalinks = [] }) postWithoutLinks
/// Find a complete post by its ID for the given web log /// Find a complete post by its ID for the given web log
let findFullById postId webLogId = backgroundTask { let findFullById postId webLogId = backgroundTask {
@ -118,7 +129,7 @@ type PostgresPostData(log: ILogger) =
ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' DESC ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' DESC
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}" LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
[ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |}; catParam ] [ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |}; catParam ]
fromData<Post> postWithoutLinks
/// Get a page of posts for the given web log (excludes text and revisions) /// Get a page of posts for the given web log (excludes text and revisions)
let findPageOfPosts webLogId pageNbr postsPerPage = let findPageOfPosts webLogId pageNbr postsPerPage =
@ -139,7 +150,7 @@ type PostgresPostData(log: ILogger) =
ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' DESC ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' DESC
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}" LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
[ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |} ] [ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |} ]
fromData<Post> postWithoutLinks
/// Get a page of tagged posts for the given web log (excludes revisions and prior permalinks) /// Get a page of tagged posts for the given web log (excludes revisions and prior permalinks)
let findPageOfTaggedPosts webLogId (tag: string) pageNbr postsPerPage = let findPageOfTaggedPosts webLogId (tag: string) pageNbr postsPerPage =
@ -150,40 +161,32 @@ type PostgresPostData(log: ILogger) =
ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' DESC ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' DESC
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}" LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
[ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |}; jsonParam "@tag" [| tag |] ] [ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |}; jsonParam "@tag" [| tag |] ]
fromData<Post> postWithoutLinks
/// Find the next newest and oldest post from a publish date for the given web log /// Find the next newest and oldest post from a publish date for the given web log
let findSurroundingPosts webLogId publishedOn = backgroundTask { let findSurroundingPosts webLogId (publishedOn: Instant) = backgroundTask {
log.LogTrace "Post.findSurroundingPosts" log.LogTrace "Post.findSurroundingPosts"
let queryParams () = let queryParams () =
[ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |} [ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |}
"@publishedOn", Sql.string ((InstantPattern.General.Format publishedOn)[..19]) ] "@publishedOn", Sql.timestamptz (publishedOn.ToDateTimeOffset()) ]
let pubField = nameof Post.Empty.PublishedOn let query op direction =
let! older =
Custom.list
$"{selectWithCriteria Table.Post} $"{selectWithCriteria Table.Post}
AND SUBSTR(data ->> '{pubField}', 1, 19) < @publishedOn AND (data ->> '{nameof Post.Empty.PublishedOn}')::timestamp with time zone %s{op} @publishedOn
ORDER BY data ->> '{pubField}' DESC ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' %s{direction}
LIMIT 1" LIMIT 1"
(queryParams ()) let! older = Custom.list (query "<" "DESC") (queryParams ()) postWithoutLinks
fromData<Post> let! newer = Custom.list (query ">" "") (queryParams ()) postWithoutLinks
let! newer =
Custom.list
$"{selectWithCriteria Table.Post}
AND SUBSTR(data ->> '{pubField}', 1, 19) > @publishedOn
ORDER BY data ->> '{pubField}'
LIMIT 1"
(queryParams ())
fromData<Post>
return List.tryHead older, List.tryHead newer return List.tryHead older, List.tryHead newer
} }
/// Save a post /// Update a post
let save (post : Post) = backgroundTask { let update (post : Post) = backgroundTask {
log.LogTrace "Post.save" log.LogTrace "Post.save"
let! oldPost = findFullById post.Id post.WebLogId match! findFullById post.Id post.WebLogId with
do! save Table.Post { post with Revisions = [] } | Some oldPost ->
do! updatePostRevisions post.Id (match oldPost with Some p -> p.Revisions | None -> []) post.Revisions do! Update.byId Table.Post post.Id { post with Revisions = [] }
do! updatePostRevisions post.Id oldPost.Revisions post.Revisions
| None -> ()
} }
/// Restore posts from a backup /// Restore posts from a backup
@ -212,7 +215,7 @@ type PostgresPostData(log: ILogger) =
} }
interface IPostData with interface IPostData with
member _.Add post = save post member _.Add post = add post
member _.CountByStatus status webLogId = countByStatus status webLogId member _.CountByStatus status webLogId = countByStatus status webLogId
member _.Delete postId webLogId = delete postId webLogId member _.Delete postId webLogId = delete postId webLogId
member _.FindById postId webLogId = findById postId webLogId member _.FindById postId webLogId = findById postId webLogId
@ -229,5 +232,5 @@ type PostgresPostData(log: ILogger) =
findPageOfTaggedPosts webLogId tag pageNbr postsPerPage findPageOfTaggedPosts webLogId tag pageNbr postsPerPage
member _.FindSurroundingPosts webLogId publishedOn = findSurroundingPosts webLogId publishedOn member _.FindSurroundingPosts webLogId publishedOn = findSurroundingPosts webLogId publishedOn
member _.Restore posts = restore posts member _.Restore posts = restore posts
member _.Update post = save post member _.Update post = update post
member _.UpdatePriorPermalinks postId webLogId permalinks = updatePriorPermalinks postId webLogId permalinks member _.UpdatePriorPermalinks postId webLogId permalinks = updatePriorPermalinks postId webLogId permalinks

View File

@ -649,7 +649,8 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
getAll (objList categoryIds) (nameof Post.Empty.CategoryIds) getAll (objList categoryIds) (nameof Post.Empty.CategoryIds)
filter [ nameof Post.Empty.WebLogId, webLogId :> obj filter [ nameof Post.Empty.WebLogId, webLogId :> obj
nameof Post.Empty.Status, Published ] nameof Post.Empty.Status, Published ]
without [ nameof Post.Empty.PriorPermalinks; nameof Post.Empty.Revisions ] merge (r.HashMap(nameof Post.Empty.PriorPermalinks, [||])
.With(nameof Post.Empty.Revisions, [||]))
distinct distinct
orderByDescending (nameof Post.Empty.PublishedOn) orderByDescending (nameof Post.Empty.PublishedOn)
skip ((pageNbr - 1) * postsPerPage) skip ((pageNbr - 1) * postsPerPage)
@ -660,7 +661,9 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
member _.FindPageOfPosts webLogId pageNbr postsPerPage = rethink<Post list> { member _.FindPageOfPosts webLogId pageNbr postsPerPage = rethink<Post list> {
withTable Table.Post withTable Table.Post
getAll [ webLogId ] (nameof Post.Empty.WebLogId) getAll [ webLogId ] (nameof Post.Empty.WebLogId)
without [ nameof Post.Empty.PriorPermalinks; nameof Post.Empty.Revisions ] merge (r.HashMap(nameof Post.Empty.Text, "")
.With(nameof Post.Empty.PriorPermalinks, [||])
.With(nameof Post.Empty.Revisions, [||]))
orderByFuncDescending (fun row -> orderByFuncDescending (fun row ->
row[nameof Post.Empty.PublishedOn].Default_(nameof Post.Empty.UpdatedOn) :> obj) row[nameof Post.Empty.PublishedOn].Default_(nameof Post.Empty.UpdatedOn) :> obj)
skip ((pageNbr - 1) * postsPerPage) skip ((pageNbr - 1) * postsPerPage)
@ -672,7 +675,8 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
withTable Table.Post withTable Table.Post
getAll [ webLogId ] (nameof Post.Empty.WebLogId) getAll [ webLogId ] (nameof Post.Empty.WebLogId)
filter (nameof Post.Empty.Status) Published filter (nameof Post.Empty.Status) Published
without [ nameof Post.Empty.PriorPermalinks; nameof Post.Empty.Revisions ] merge (r.HashMap(nameof Post.Empty.PriorPermalinks, [||])
.With(nameof Post.Empty.Revisions, [||]))
orderByDescending (nameof Post.Empty.PublishedOn) orderByDescending (nameof Post.Empty.PublishedOn)
skip ((pageNbr - 1) * postsPerPage) skip ((pageNbr - 1) * postsPerPage)
limit (postsPerPage + 1) limit (postsPerPage + 1)
@ -684,7 +688,8 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
getAll [ tag ] (nameof Post.Empty.Tags) getAll [ tag ] (nameof Post.Empty.Tags)
filter [ nameof Post.Empty.WebLogId, webLogId :> obj filter [ nameof Post.Empty.WebLogId, webLogId :> obj
nameof Post.Empty.Status, Published ] nameof Post.Empty.Status, Published ]
without [ nameof Post.Empty.PriorPermalinks; nameof Post.Empty.Revisions ] merge (r.HashMap(nameof Post.Empty.PriorPermalinks, [||])
.With(nameof Post.Empty.Revisions, [||]))
orderByDescending (nameof Post.Empty.PublishedOn) orderByDescending (nameof Post.Empty.PublishedOn)
skip ((pageNbr - 1) * postsPerPage) skip ((pageNbr - 1) * postsPerPage)
limit (postsPerPage + 1) limit (postsPerPage + 1)
@ -697,7 +702,8 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
withTable Table.Post withTable Table.Post
getAll [ webLogId ] (nameof Post.Empty.WebLogId) getAll [ webLogId ] (nameof Post.Empty.WebLogId)
filter (fun row -> row[nameof Post.Empty.PublishedOn].Lt publishedOn :> obj) filter (fun row -> row[nameof Post.Empty.PublishedOn].Lt publishedOn :> obj)
without [ nameof Post.Empty.PriorPermalinks; nameof Post.Empty.Revisions ] merge (r.HashMap(nameof Post.Empty.PriorPermalinks, [||])
.With(nameof Post.Empty.Revisions, [||]))
orderByDescending (nameof Post.Empty.PublishedOn) orderByDescending (nameof Post.Empty.PublishedOn)
limit 1 limit 1
result; withRetryDefault result; withRetryDefault
@ -708,7 +714,8 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
withTable Table.Post withTable Table.Post
getAll [ webLogId ] (nameof Post.Empty.WebLogId) getAll [ webLogId ] (nameof Post.Empty.WebLogId)
filter (fun row -> row[nameof Post.Empty.PublishedOn].Gt publishedOn :> obj) filter (fun row -> row[nameof Post.Empty.PublishedOn].Gt publishedOn :> obj)
without [ nameof Post.Empty.PriorPermalinks; nameof Post.Empty.Revisions ] merge (r.HashMap(nameof Post.Empty.PriorPermalinks, [||])
.With(nameof Post.Empty.Revisions, [||]))
orderBy (nameof Post.Empty.PublishedOn) orderBy (nameof Post.Empty.PublishedOn)
limit 1 limit 1
result; withRetryDefault result; withRetryDefault
@ -726,22 +733,20 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
} }
} }
member _.Update post = rethink { member this.Update post = backgroundTask {
match! this.FindById post.Id post.WebLogId with
| Some _ ->
do! rethink {
withTable Table.Post withTable Table.Post
get post.Id get post.Id
replace post replace post
write; withRetryDefault; ignoreResult conn write; withRetryDefault; ignoreResult conn
} }
| None -> ()
member _.UpdatePriorPermalinks postId webLogId permalinks = backgroundTask {
match! (
rethink<Post> {
withTable Table.Post
get postId
without [ nameof Post.Empty.Revisions; nameof Post.Empty.PriorPermalinks ]
resultOption; withRetryOptionDefault
} }
|> verifyWebLog webLogId (_.WebLogId)) conn with
member this.UpdatePriorPermalinks postId webLogId permalinks = backgroundTask {
match! this.FindById postId webLogId with
| Some _ -> | Some _ ->
do! rethink { do! rethink {
withTable Table.Post withTable Table.Post

View File

@ -33,6 +33,14 @@ type SQLitePostData(conn: SqliteConnection, log: ILogger) =
/// The SELECT statement to retrieve posts with a web log ID parameter /// The SELECT statement to retrieve posts with a web log ID parameter
let postByWebLog = Document.Query.selectByWebLog Table.Post let postByWebLog = Document.Query.selectByWebLog Table.Post
/// Return a post with no revisions or prior permalinks
let postWithoutLinks rdr =
{ fromData<Post> rdr with PriorPermalinks = [] }
/// Return a post with no revisions, prior permalinks, or text
let postWithoutText rdr =
{ postWithoutLinks rdr with Text = "" }
/// The SELECT statement to retrieve published posts with a web log ID parameter /// The SELECT statement to retrieve published posts with a web log ID parameter
let publishedPostByWebLog = let publishedPostByWebLog =
$"""{postByWebLog} AND {Query.whereByField (Field.EQ statName "") $"'{string Published}'"}""" $"""{postByWebLog} AND {Query.whereByField (Field.EQ statName "") $"'{string Published}'"}"""
@ -44,6 +52,13 @@ type SQLitePostData(conn: SqliteConnection, log: ILogger) =
// IMPLEMENTATION FUNCTIONS // IMPLEMENTATION FUNCTIONS
/// Add a post
let add (post: Post) = backgroundTask {
log.LogTrace "Post.add"
do! conn.insert Table.Post { post with Revisions = [] }
do! updatePostRevisions post.Id [] post.Revisions
}
/// Count posts in a status for the given web log /// Count posts in a status for the given web log
let countByStatus (status: PostStatus) webLogId = let countByStatus (status: PostStatus) webLogId =
log.LogTrace "Post.countByStatus" log.LogTrace "Post.countByStatus"
@ -68,7 +83,7 @@ type SQLitePostData(conn: SqliteConnection, log: ILogger) =
conn.customSingle conn.customSingle
$"""{Document.Query.selectByWebLog Table.Post} AND {Query.whereByField linkParam "@link"}""" $"""{Document.Query.selectByWebLog Table.Post} AND {Query.whereByField linkParam "@link"}"""
(addFieldParam "@link" linkParam [ webLogParam webLogId ]) (addFieldParam "@link" linkParam [ webLogParam webLogId ])
(fun rdr -> { fromData<Post> rdr with PriorPermalinks = [] }) postWithoutLinks
/// Find a complete post by its ID for the given web log /// Find a complete post by its ID for the given web log
let findFullById postId webLogId = backgroundTask { let findFullById postId webLogId = backgroundTask {
@ -123,7 +138,7 @@ type SQLitePostData(conn: SqliteConnection, log: ILogger) =
ORDER BY {publishField} DESC ORDER BY {publishField} DESC
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}" LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
(webLogParam webLogId :: catParams) (webLogParam webLogId :: catParams)
fromData<Post> postWithoutLinks
/// Get a page of posts for the given web log (excludes text and revisions) /// Get a page of posts for the given web log (excludes text and revisions)
let findPageOfPosts webLogId pageNbr postsPerPage = let findPageOfPosts webLogId pageNbr postsPerPage =
@ -133,7 +148,7 @@ type SQLitePostData(conn: SqliteConnection, log: ILogger) =
ORDER BY {publishField} DESC NULLS FIRST, data ->> '{nameof Post.Empty.UpdatedOn}' ORDER BY {publishField} DESC NULLS FIRST, data ->> '{nameof Post.Empty.UpdatedOn}'
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}" LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
[ webLogParam webLogId ] [ webLogParam webLogId ]
(fun rdr -> { fromData<Post> rdr with Text = "" }) postWithoutText
/// Get a page of published posts for the given web log (excludes revisions) /// Get a page of published posts for the given web log (excludes revisions)
let findPageOfPublishedPosts webLogId pageNbr postsPerPage = let findPageOfPublishedPosts webLogId pageNbr postsPerPage =
@ -143,7 +158,7 @@ type SQLitePostData(conn: SqliteConnection, log: ILogger) =
ORDER BY {publishField} DESC ORDER BY {publishField} DESC
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}" LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
[ webLogParam webLogId ] [ webLogParam webLogId ]
fromData<Post> postWithoutLinks
/// Get a page of tagged posts for the given web log (excludes revisions) /// Get a page of tagged posts for the given web log (excludes revisions)
let findPageOfTaggedPosts webLogId (tag : string) pageNbr postsPerPage = let findPageOfTaggedPosts webLogId (tag : string) pageNbr postsPerPage =
@ -154,7 +169,7 @@ type SQLitePostData(conn: SqliteConnection, log: ILogger) =
ORDER BY {publishField} DESC ORDER BY {publishField} DESC
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}" LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
(webLogParam webLogId :: tagParams) (webLogParam webLogId :: tagParams)
fromData<Post> postWithoutLinks
/// Find the next newest and oldest post from a publish date for the given web log /// Find the next newest and oldest post from a publish date for the given web log
let findSurroundingPosts webLogId (publishedOn : Instant) = backgroundTask { let findSurroundingPosts webLogId (publishedOn : Instant) = backgroundTask {
@ -163,27 +178,29 @@ type SQLitePostData(conn: SqliteConnection, log: ILogger) =
conn.customSingle conn.customSingle
$"{publishedPostByWebLog} AND {publishField} < @publishedOn ORDER BY {publishField} DESC LIMIT 1" $"{publishedPostByWebLog} AND {publishField} < @publishedOn ORDER BY {publishField} DESC LIMIT 1"
[ webLogParam webLogId; SqliteParameter("@publishedOn", instantParam publishedOn) ] [ webLogParam webLogId; SqliteParameter("@publishedOn", instantParam publishedOn) ]
fromData<Post> postWithoutLinks
let! newer = let! newer =
conn.customSingle conn.customSingle
$"{publishedPostByWebLog} AND {publishField} > @publishedOn ORDER BY {publishField} LIMIT 1" $"{publishedPostByWebLog} AND {publishField} > @publishedOn ORDER BY {publishField} LIMIT 1"
[ webLogParam webLogId; SqliteParameter("@publishedOn", instantParam publishedOn) ] [ webLogParam webLogId; SqliteParameter("@publishedOn", instantParam publishedOn) ]
fromData<Post> postWithoutLinks
return older, newer return older, newer
} }
/// Save a post /// Update a post
let save (post: Post) = backgroundTask { let update (post: Post) = backgroundTask {
log.LogTrace "Post.save" log.LogTrace "Post.update"
let! oldPost = findFullById post.Id post.WebLogId match! findFullById post.Id post.WebLogId with
do! conn.save Table.Post { post with Revisions = [] } | Some oldPost ->
do! updatePostRevisions post.Id (match oldPost with Some p -> p.Revisions | None -> []) post.Revisions do! conn.updateById Table.Post post.Id { post with Revisions = [] }
do! updatePostRevisions post.Id oldPost.Revisions post.Revisions
| None -> ()
} }
/// Restore posts from a backup /// Restore posts from a backup
let restore posts = backgroundTask { let restore posts = backgroundTask {
log.LogTrace "Post.restore" log.LogTrace "Post.restore"
for post in posts do do! save post for post in posts do do! add post
} }
/// Update prior permalinks for a post /// Update prior permalinks for a post
@ -196,7 +213,7 @@ type SQLitePostData(conn: SqliteConnection, log: ILogger) =
} }
interface IPostData with interface IPostData with
member _.Add post = save post member _.Add post = add post
member _.CountByStatus status webLogId = countByStatus status webLogId member _.CountByStatus status webLogId = countByStatus status webLogId
member _.Delete postId webLogId = delete postId webLogId member _.Delete postId webLogId = delete postId webLogId
member _.FindById postId webLogId = findById postId webLogId member _.FindById postId webLogId = findById postId webLogId
@ -213,5 +230,5 @@ type SQLitePostData(conn: SqliteConnection, log: ILogger) =
findPageOfTaggedPosts webLogId tag pageNbr postsPerPage findPageOfTaggedPosts webLogId tag pageNbr postsPerPage
member _.FindSurroundingPosts webLogId publishedOn = findSurroundingPosts webLogId publishedOn member _.FindSurroundingPosts webLogId publishedOn = findSurroundingPosts webLogId publishedOn
member _.Restore posts = restore posts member _.Restore posts = restore posts
member _.Update post = save post member _.Update post = update post
member _.UpdatePriorPermalinks postId webLogId permalinks = updatePriorPermalinks postId webLogId permalinks member _.UpdatePriorPermalinks postId webLogId permalinks = updatePriorPermalinks postId webLogId permalinks

View File

@ -24,9 +24,31 @@ let episode2 = PostId "l4_Eh4aFO06SqqJjOymNzA"
/// The ID of "Something May Happen" post /// The ID of "Something May Happen" post
let something = PostId "QweKbWQiOkqqrjEdgP9wwg" let something = PostId "QweKbWQiOkqqrjEdgP9wwg"
/// The published instant for "Something May Happen" post
let somethingPublished = Instant.FromDateTimeOffset(DateTimeOffset.Parse "2024-01-20T22:32:59Z")
/// The ID of "An Incomplete Thought" post
let incomplete = PostId "VweKbWQiOkqqrjEdgP9wwg"
/// The ID of "Test Post 1" post /// The ID of "Test Post 1" post
let testPost1 = PostId "RCsCU2puYEmkpzotoi8p4g" let testPost1 = PostId "RCsCU2puYEmkpzotoi8p4g"
/// The published instant for "Test Post 1" post
let testPost1Published = Instant.FromDateTimeOffset(DateTimeOffset.Parse "2024-01-20T22:17:29Z")
/// The category IDs for "Spitball" (parent) and "Moonshot"
let testCatIds = [ CategoryId "jw6N69YtTEWVHAO33jHU-w"; CategoryId "ScVpyu1e7UiP7bDdge3ZEw" ]
/// Ensure that a list of posts has text for each post
let ensureHasText (posts: Post list) =
for post in posts do Expect.isNotEmpty post.Text $"Text should not be blank (post ID {post.Id})"
/// Ensure that a list of posts has no revisions or prior permalinks
let ensureEmpty posts =
for post in posts do
Expect.isEmpty post.Revisions $"There should have been no revisions (post ID {post.Id})"
Expect.isEmpty post.PriorPermalinks $"There should have been no prior permalinks (post ID {post.Id})"
let ``Add succeeds`` (data: IData) = task { let ``Add succeeds`` (data: IData) = task {
let post = let post =
{ Id = PostId "a-new-post" { Id = PostId "a-new-post"
@ -109,8 +131,7 @@ let ``FindById succeeds when a post is found`` (data: IData) = task {
it.Metadata it.Metadata
[ { Name = "Density"; Value = "Non-existent" }; { Name = "Intensity"; Value = "Low" } ] [ { Name = "Density"; Value = "Non-existent" }; { Name = "Intensity"; Value = "Low" } ]
"Metadata is incorrect" "Metadata is incorrect"
Expect.isEmpty it.PriorPermalinks "Prior permalinks should have been empty" ensureEmpty [ it ]
Expect.isEmpty it.Revisions "Revisions should have been empty"
} }
let ``FindById succeeds when a post is not found (incorrect weblog)`` (data: IData) = task { let ``FindById succeeds when a post is not found (incorrect weblog)`` (data: IData) = task {
@ -128,8 +149,7 @@ let ``FindByPermalink succeeds when a post is found`` (data: IData) = task {
Expect.isSome post "A post should have been returned" Expect.isSome post "A post should have been returned"
let it = post.Value let it = post.Value
Expect.equal it.Id episode1 "The wrong post was retrieved" Expect.equal it.Id episode1 "The wrong post was retrieved"
Expect.isEmpty it.PriorPermalinks "Prior permalinks should have been empty" ensureEmpty [ it ]
Expect.isEmpty it.Revisions "Revisions should have been empty"
} }
let ``FindByPermalink succeeds when a post is not found (incorrect weblog)`` (data: IData) = task { let ``FindByPermalink succeeds when a post is not found (incorrect weblog)`` (data: IData) = task {
@ -173,8 +193,8 @@ let ``FindFullById succeeds when a post is not found`` (data: IData) = task {
let ``FindFullByWebLog succeeds when posts are found`` (data: IData) = task { let ``FindFullByWebLog succeeds when posts are found`` (data: IData) = task {
let! posts = data.Post.FindFullByWebLog rootId let! posts = data.Post.FindFullByWebLog rootId
Expect.hasLength posts 4 "There should have been 4 posts returned" Expect.hasLength posts 5 "There should have been 5 posts returned"
let allPosts = [ testPost1; episode1; episode2; something ] let allPosts = [ testPost1; episode1; episode2; something; incomplete ]
posts |> List.iter (fun it -> posts |> List.iter (fun it ->
Expect.contains allPosts it.Id $"Post ID {it.Id} unexpected" Expect.contains allPosts it.Id $"Post ID {it.Id} unexpected"
if it.Id = episode1 then if it.Id = episode1 then
@ -187,3 +207,225 @@ let ``FindFullByWebLog succeeds when posts are not found`` (data: IData) = task
let! posts = data.Post.FindFullByWebLog (WebLogId "nonexistent") let! posts = data.Post.FindFullByWebLog (WebLogId "nonexistent")
Expect.isEmpty posts "No posts should have been retrieved" Expect.isEmpty posts "No posts should have been retrieved"
} }
let ``FindPageOfCategorizedPosts succeeds when posts are found`` (data: IData) = task {
let! posts = data.Post.FindPageOfCategorizedPosts rootId testCatIds 1 1
Expect.hasLength posts 2 "There should be 2 posts returned"
Expect.equal posts[0].Id something "The wrong post was returned for page 1"
ensureEmpty posts
let! posts = data.Post.FindPageOfCategorizedPosts rootId testCatIds 2 1
Expect.hasLength posts 1 "There should be 1 post returned"
Expect.equal posts[0].Id testPost1 "The wrong post was returned for page 2"
ensureEmpty posts
}
let ``FindPageOfCategorizedPosts succeeds when finding a too-high page number`` (data: IData) = task {
let! posts = data.Post.FindPageOfCategorizedPosts rootId testCatIds 17 2
Expect.hasLength posts 0 "There should have been no posts returned (not enough posts)"
}
let ``FindPageOfCategorizedPosts succeeds when a category has no posts`` (data: IData) = task {
let! posts = data.Post.FindPageOfCategorizedPosts rootId [ CategoryId "nope" ] 1 1
Expect.hasLength posts 0 "There should have been no posts returned (none match)"
}
let ``FindPageOfPosts succeeds when posts are found`` (data: IData) = task {
let ensureNoText (posts: Post list) =
for post in posts do Expect.equal post.Text "" $"There should be no text (post ID {post.Id})"
let! posts = data.Post.FindPageOfPosts rootId 1 2
Expect.hasLength posts 3 "There should have been 3 posts returned for page 1"
Expect.equal posts[0].Id incomplete "Page 1, post 1 is incorrect"
Expect.equal posts[1].Id something "Page 1, post 2 is incorrect"
Expect.equal posts[2].Id episode2 "Page 1, post 3 is incorrect"
ensureNoText posts
ensureEmpty posts
let! posts = data.Post.FindPageOfPosts rootId 2 2
Expect.hasLength posts 3 "There should have been 3 posts returned for page 2"
Expect.equal posts[0].Id episode2 "Page 2, post 1 is incorrect"
Expect.equal posts[1].Id episode1 "Page 2, post 2 is incorrect"
Expect.equal posts[2].Id testPost1 "Page 2, post 3 is incorrect"
ensureNoText posts
ensureEmpty posts
let! posts = data.Post.FindPageOfPosts rootId 3 2
Expect.hasLength posts 1 "There should have been 1 post returned for page 3"
Expect.equal posts[0].Id testPost1 "Page 3, post 1 is incorrect"
ensureNoText posts
ensureEmpty posts
}
let ``FindPageOfPosts succeeds when finding a too-high page number`` (data: IData) = task {
let! posts = data.Post.FindPageOfPosts rootId 88 3
Expect.isEmpty posts "There should have been no posts returned (not enough posts)"
}
let ``FindPageOfPosts succeeds when there are no posts`` (data: IData) = task {
let! posts = data.Post.FindPageOfPosts (WebLogId "no-posts") 1 25
Expect.isEmpty posts "There should have been no posts returned (no posts)"
}
let ``FindPageOfPublishedPosts succeeds when posts are found`` (data: IData) = task {
let! posts = data.Post.FindPageOfPublishedPosts rootId 1 3
Expect.hasLength posts 4 "There should have been 4 posts returned for page 1"
Expect.equal posts[0].Id something "Page 1, post 1 is incorrect"
Expect.equal posts[1].Id episode2 "Page 1, post 2 is incorrect"
Expect.equal posts[2].Id episode1 "Page 1, post 3 is incorrect"
Expect.equal posts[3].Id testPost1 "Page 1, post 4 is incorrect"
ensureHasText posts
ensureEmpty posts
let! posts = data.Post.FindPageOfPublishedPosts rootId 2 2
Expect.hasLength posts 2 "There should have been 2 posts returned for page 2"
Expect.equal posts[0].Id episode1 "Page 2, post 1 is incorrect"
Expect.equal posts[1].Id testPost1 "Page 2, post 2 is incorrect"
ensureHasText posts
ensureEmpty posts
}
let ``FindPageOfPublishedPosts succeeds when finding a too-high page number`` (data: IData) = task {
let! posts = data.Post.FindPageOfPublishedPosts rootId 7 22
Expect.isEmpty posts "There should have been no posts returned (not enough posts)"
}
let ``FindPageOfPublishedPosts succeeds when there are no posts`` (data: IData) = task {
let! posts = data.Post.FindPageOfPublishedPosts (WebLogId "empty") 1 8
Expect.isEmpty posts "There should have been no posts returned (no posts)"
}
let ``FindPageOfTaggedPosts succeeds when posts are found`` (data: IData) = task {
let! posts = data.Post.FindPageOfTaggedPosts rootId "f#" 1 1
Expect.hasLength posts 2 "There should have been 2 posts returned"
Expect.equal posts[0].Id something "Page 1, post 1 is incorrect"
Expect.equal posts[1].Id testPost1 "Page 1, post 2 is incorrect"
ensureHasText posts
ensureEmpty posts
let! posts = data.Post.FindPageOfTaggedPosts rootId "f#" 2 1
Expect.hasLength posts 1 "There should have been 1 posts returned"
Expect.equal posts[0].Id testPost1 "Page 2, post 1 is incorrect"
ensureHasText posts
ensureEmpty posts
}
let ``FindPageOfTaggedPosts succeeds when posts are found (excluding drafts)`` (data: IData) = task {
let! posts = data.Post.FindPageOfTaggedPosts rootId "speculation" 1 10
Expect.hasLength posts 1 "There should have been 1 post returned"
Expect.equal posts[0].Id something "Post 1 is incorrect"
ensureHasText posts
ensureEmpty posts
}
let ``FindPageOfTaggedPosts succeeds when finding a too-high page number`` (data: IData) = task {
let! posts = data.Post.FindPageOfTaggedPosts rootId "f#" 436 18
Expect.isEmpty posts "There should have been no posts returned (not enough posts)"
}
let ``FindPageOfTaggedPosts succeeds when there are no posts`` (data: IData) = task {
let! posts = data.Post.FindPageOfTaggedPosts rootId "non-existent-tag" 1 8
Expect.isEmpty posts "There should have been no posts returned (no posts)"
}
let ``FindSurroundingPosts succeeds when there is no next newer post`` (data: IData) = task {
let! older, newer = data.Post.FindSurroundingPosts rootId somethingPublished
Expect.isSome older "There should have been an older post"
Expect.equal older.Value.Id episode2 "The next older post is incorrect"
ensureHasText [ older.Value ]
ensureEmpty [ older.Value ]
Expect.isNone newer "There should not have been a newer post"
}
let ``FindSurroundingPosts succeeds when there is no next older post`` (data: IData) = task {
let! older, newer = data.Post.FindSurroundingPosts rootId testPost1Published
Expect.isNone older "There should not have been an older post"
Expect.isSome newer "There should have been a newer post"
Expect.equal newer.Value.Id episode1 "The next newer post is incorrect"
ensureHasText [ newer.Value ]
ensureEmpty [ newer.Value ]
}
let ``FindSurroundingPosts succeeds when older and newer exist`` (data: IData) = task {
let! older, newer = data.Post.FindSurroundingPosts rootId episode1Published
Expect.isSome older "There should have been an older post"
Expect.equal older.Value.Id testPost1 "The next older post is incorrect"
Expect.isSome newer "There should have been a newer post"
Expect.equal newer.Value.Id episode2 "The next newer post is incorrect"
ensureHasText [ older.Value; newer.Value ]
ensureEmpty [ older.Value; newer.Value ]
}
let ``Update succeeds when the post exists`` (data: IData) = task {
let! before = data.Post.FindFullById (PostId "a-new-post") (WebLogId "test")
Expect.isSome before "The post to be updated should have been found"
do! data.Post.Update
{ before.Value with
AuthorId = WebLogUserId "someone-else"
Status = Draft
Title = "An Updated Test Post"
Permalink = Permalink "2021/updated-post.html"
PublishedOn = None
UpdatedOn = Noda.epoch + Duration.FromDays 4
Template = Some "other"
Text = "<p>Updated text here"
CategoryIds = [ CategoryId "c"; CategoryId "d"; CategoryId "e" ]
Tags = [ "alpha"; "beta"; "nu"; "zeta" ]
Episode = None
Metadata = [ { Name = "Howdy"; Value = "Pardner" } ]
PriorPermalinks = Permalink "2020/test-post.html" :: before.Value.PriorPermalinks
Revisions =
{ AsOf = Noda.epoch + Duration.FromDays 4; Text = Html "<p>Updated text here" }
:: before.Value.Revisions }
let! after = data.Post.FindFullById (PostId "a-new-post") (WebLogId "test")
Expect.isSome after "The updated post should have been found"
let post = after.Value
Expect.equal post.AuthorId (WebLogUserId "someone-else") "Updated author is incorrect"
Expect.equal post.Status Draft "Updated status is incorrect"
Expect.equal post.Title "An Updated Test Post" "Updated title is incorrect"
Expect.equal post.Permalink (Permalink "2021/updated-post.html") "Updated permalink is incorrect"
Expect.isNone post.PublishedOn "Updated post should not have had a published-on date/time"
Expect.equal post.UpdatedOn (Noda.epoch + Duration.FromDays 4) "Updated updated-on date/time is incorrect"
Expect.equal post.Template (Some "other") "Updated template is incorrect"
Expect.equal post.Text "<p>Updated text here" "Updated text is incorrect"
Expect.equal
post.CategoryIds [ CategoryId "c"; CategoryId "d"; CategoryId "e" ] "Updated category IDs are incorrect"
Expect.equal post.Tags [ "alpha"; "beta"; "nu"; "zeta" ] "Updated tags are incorrect"
Expect.isNone post.Episode "Update episode is incorrect"
Expect.equal post.Metadata [ { Name = "Howdy"; Value = "Pardner" } ] "Updated metadata is incorrect"
Expect.equal
post.PriorPermalinks
[ Permalink "2020/test-post.html"; Permalink "2020/test-post-a.html" ]
"Updated prior permalinks are incorrect"
Expect.equal
post.Revisions
[ { AsOf = Noda.epoch + Duration.FromDays 4; Text = Html "<p>Updated text here" }
{ AsOf = Noda.epoch + Duration.FromMinutes 1L; Text = Html "<p>Test text here" } ]
"Updated revisions are incorrect"
}
let ``Update succeeds when the post does not exist`` (data: IData) = task {
let postId = PostId "lost-post"
do! data.Post.Update { Post.Empty with Id = postId; WebLogId = rootId }
let! post = data.Post.FindById postId rootId
Expect.isNone post "A post should not have been retrieved"
}
let ``UpdatePriorPermalinks succeeds when the post exists`` (data: IData) = task {
let links = [ Permalink "2024/ep-1.html"; Permalink "2023/ep-1.html" ]
let! found = data.Post.UpdatePriorPermalinks episode1 rootId links
Expect.isTrue found "The permalinks should have been updated"
let! post = data.Post.FindFullById episode1 rootId
Expect.isSome post "The post should have been found"
Expect.equal post.Value.PriorPermalinks links "The prior permalinks were not correct"
}
let ``UpdatePriorPermalinks succeeds when the post does not exist`` (data: IData) = task {
let! found =
data.Post.UpdatePriorPermalinks (PostId "silence") WebLogId.Empty [ Permalink "a.html"; Permalink "b.html" ]
Expect.isFalse found "The permalinks should not have been updated"
}
let ``Delete succeeds when a post is deleted`` (data: IData) = task {
let! deleted = data.Post.Delete episode2 rootId
Expect.isTrue deleted "The post should have been deleted"
}
let ``Delete succeeds when a post is not deleted`` (data: IData) = task {
let! deleted = data.Post.Delete episode2 rootId // this was deleted above
Expect.isFalse deleted "A post should not have been deleted"
}

View File

@ -1,6 +1,5 @@
module PostgresDataTests module PostgresDataTests
open System
open BitBadger.Documents open BitBadger.Documents
open Expecto open Expecto
open Microsoft.Extensions.Logging.Abstractions open Microsoft.Extensions.Logging.Abstractions
@ -275,6 +274,88 @@ let postTests = testList "Post" [
do! PostDataTests.``FindFullByWebLog succeeds when posts are not found`` (mkData ()) do! PostDataTests.``FindFullByWebLog succeeds when posts are not found`` (mkData ())
} }
] ]
testList "FindPageOfCategorizedPosts" [
testTask "succeeds when posts are found" {
do! PostDataTests.``FindPageOfCategorizedPosts succeeds when posts are found`` (mkData ())
}
testTask "succeeds when finding a too-high page number" {
do! PostDataTests.``FindPageOfCategorizedPosts succeeds when finding a too-high page number`` (mkData ())
}
testTask "succeeds when a category has no posts" {
do! PostDataTests.``FindPageOfCategorizedPosts succeeds when a category has no posts`` (mkData ())
}
]
testList "FindPageOfPosts" [
testTask "succeeds when posts are found" {
do! PostDataTests.``FindPageOfPosts succeeds when posts are found`` (mkData ())
}
testTask "succeeds when finding a too-high page number" {
do! PostDataTests.``FindPageOfPosts succeeds when finding a too-high page number`` (mkData ())
}
testTask "succeeds when there are no posts" {
do! PostDataTests.``FindPageOfPosts succeeds when there are no posts`` (mkData ())
}
]
testList "FindPageOfPublishedPosts" [
testTask "succeeds when posts are found" {
do! PostDataTests.``FindPageOfPublishedPosts succeeds when posts are found`` (mkData ())
}
testTask "succeeds when finding a too-high page number" {
do! PostDataTests.``FindPageOfPublishedPosts succeeds when finding a too-high page number`` (mkData ())
}
testTask "succeeds when there are no posts" {
do! PostDataTests.``FindPageOfPublishedPosts succeeds when there are no posts`` (mkData ())
}
]
testList "FindPageOfTaggedPosts" [
testTask "succeeds when posts are found" {
do! PostDataTests.``FindPageOfTaggedPosts succeeds when posts are found`` (mkData ())
}
testTask "succeeds when posts are found (excluding drafts)" {
do! PostDataTests.``FindPageOfTaggedPosts succeeds when posts are found (excluding drafts)`` (mkData ())
}
testTask "succeeds when finding a too-high page number" {
do! PostDataTests.``FindPageOfTaggedPosts succeeds when finding a too-high page number`` (mkData ())
}
testTask "succeeds when there are no posts" {
do! PostDataTests.``FindPageOfTaggedPosts succeeds when there are no posts`` (mkData ())
}
]
testList "FindSurroundingPosts" [
testTask "succeeds when there is no next newer post" {
do! PostDataTests.``FindSurroundingPosts succeeds when there is no next newer post`` (mkData ())
}
testTask "succeeds when there is no next older post" {
do! PostDataTests.``FindSurroundingPosts succeeds when there is no next older post`` (mkData ())
}
testTask "succeeds when older and newer exist" {
do! PostDataTests.``FindSurroundingPosts succeeds when older and newer exist`` (mkData ())
}
]
testList "Update" [
testTask "succeeds when the post exists" {
do! PostDataTests.``Update succeeds when the post exists`` (mkData ())
}
testTask "succeeds when the post does not exist" {
do! PostDataTests.``Update succeeds when the post does not exist`` (mkData ())
}
]
testList "UpdatePriorPermalinks" [
testTask "succeeds when the post exists" {
do! PostDataTests.``UpdatePriorPermalinks succeeds when the post exists`` (mkData ())
}
testTask "succeeds when the post does not exist" {
do! PostDataTests.``UpdatePriorPermalinks succeeds when the post does not exist`` (mkData ())
}
]
testList "Delete" [
testTask "succeeds when a post is deleted" {
do! PostDataTests.``Delete succeeds when a post is deleted`` (mkData ())
}
testTask "succeeds when a post is not deleted" {
do! PostDataTests.``Delete succeeds when a post is not deleted`` (mkData ())
}
]
] ]
/// Drop the throwaway PostgreSQL database /// Drop the throwaway PostgreSQL database

View File

@ -274,6 +274,88 @@ let postTests = testList "Post" [
do! PostDataTests.``FindFullByWebLog succeeds when posts are not found`` data.Value do! PostDataTests.``FindFullByWebLog succeeds when posts are not found`` data.Value
} }
] ]
testList "FindPageOfCategorizedPosts" [
testTask "succeeds when posts are found" {
do! PostDataTests.``FindPageOfCategorizedPosts succeeds when posts are found`` data.Value
}
testTask "succeeds when finding a too-high page number" {
do! PostDataTests.``FindPageOfCategorizedPosts succeeds when finding a too-high page number`` data.Value
}
testTask "succeeds when a category has no posts" {
do! PostDataTests.``FindPageOfCategorizedPosts succeeds when a category has no posts`` data.Value
}
]
testList "FindPageOfPosts" [
testTask "succeeds when posts are found" {
do! PostDataTests.``FindPageOfPosts succeeds when posts are found`` data.Value
}
testTask "succeeds when finding a too-high page number" {
do! PostDataTests.``FindPageOfPosts succeeds when finding a too-high page number`` data.Value
}
testTask "succeeds when there are no posts" {
do! PostDataTests.``FindPageOfPosts succeeds when there are no posts`` data.Value
}
]
testList "FindPageOfPublishedPosts" [
testTask "succeeds when posts are found" {
do! PostDataTests.``FindPageOfPublishedPosts succeeds when posts are found`` data.Value
}
testTask "succeeds when finding a too-high page number" {
do! PostDataTests.``FindPageOfPublishedPosts succeeds when finding a too-high page number`` data.Value
}
testTask "succeeds when there are no posts" {
do! PostDataTests.``FindPageOfPublishedPosts succeeds when there are no posts`` data.Value
}
]
testList "FindPageOfTaggedPosts" [
testTask "succeeds when posts are found" {
do! PostDataTests.``FindPageOfTaggedPosts succeeds when posts are found`` data.Value
}
testTask "succeeds when posts are found (excluding drafts)" {
do! PostDataTests.``FindPageOfTaggedPosts succeeds when posts are found (excluding drafts)`` data.Value
}
testTask "succeeds when finding a too-high page number" {
do! PostDataTests.``FindPageOfTaggedPosts succeeds when finding a too-high page number`` data.Value
}
testTask "succeeds when there are no posts" {
do! PostDataTests.``FindPageOfTaggedPosts succeeds when there are no posts`` data.Value
}
]
testList "FindSurroundingPosts" [
testTask "succeeds when there is no next newer post" {
do! PostDataTests.``FindSurroundingPosts succeeds when there is no next newer post`` data.Value
}
testTask "succeeds when there is no next older post" {
do! PostDataTests.``FindSurroundingPosts succeeds when there is no next older post`` data.Value
}
testTask "succeeds when older and newer exist" {
do! PostDataTests.``FindSurroundingPosts succeeds when older and newer exist`` data.Value
}
]
testList "Update" [
testTask "succeeds when the post exists" {
do! PostDataTests.``Update succeeds when the post exists`` data.Value
}
testTask "succeeds when the post does not exist" {
do! PostDataTests.``Update succeeds when the post does not exist`` data.Value
}
]
testList "UpdatePriorPermalinks" [
testTask "succeeds when the post exists" {
do! PostDataTests.``UpdatePriorPermalinks succeeds when the post exists`` data.Value
}
testTask "succeeds when the post does not exist" {
do! PostDataTests.``UpdatePriorPermalinks succeeds when the post does not exist`` data.Value
}
]
testList "Delete" [
testTask "succeeds when a post is deleted" {
do! PostDataTests.``Delete succeeds when a post is deleted`` data.Value
}
testTask "succeeds when a post is not deleted" {
do! PostDataTests.``Delete succeeds when a post is not deleted`` data.Value
}
]
] ]
/// Drop the throwaway RethinkDB database /// Drop the throwaway RethinkDB database

View File

@ -390,6 +390,132 @@ let postTests = testList "Post" [
finally dispose data finally dispose data
} }
] ]
testList "FindPageOfCategorizedPosts" [
testTask "succeeds when posts are found" {
let data = mkData ()
try do! PostDataTests.``FindPageOfCategorizedPosts succeeds when posts are found`` data
finally dispose data
}
testTask "succeeds when finding a too-high page number" {
let data = mkData ()
try do! PostDataTests.``FindPageOfCategorizedPosts succeeds when finding a too-high page number`` data
finally dispose data
}
testTask "succeeds when a category has no posts" {
let data = mkData ()
try do! PostDataTests.``FindPageOfCategorizedPosts succeeds when a category has no posts`` data
finally dispose data
}
]
testList "FindPageOfPosts" [
testTask "succeeds when posts are found" {
let data = mkData ()
try do! PostDataTests.``FindPageOfPosts succeeds when posts are found`` data
finally dispose data
}
testTask "succeeds when finding a too-high page number" {
let data = mkData ()
try do! PostDataTests.``FindPageOfPosts succeeds when finding a too-high page number`` data
finally dispose data
}
testTask "succeeds when there are no posts" {
let data = mkData ()
try do! PostDataTests.``FindPageOfPosts succeeds when there are no posts`` data
finally dispose data
}
]
testList "FindPageOfPublishedPosts" [
testTask "succeeds when posts are found" {
let data = mkData ()
try do! PostDataTests.``FindPageOfPublishedPosts succeeds when posts are found`` data
finally dispose data
}
testTask "succeeds when finding a too-high page number" {
let data = mkData ()
try do! PostDataTests.``FindPageOfPublishedPosts succeeds when finding a too-high page number`` data
finally dispose data
}
testTask "succeeds when there are no posts" {
let data = mkData ()
try do! PostDataTests.``FindPageOfPublishedPosts succeeds when there are no posts`` data
finally dispose data
}
]
testList "FindPageOfTaggedPosts" [
testTask "succeeds when posts are found" {
let data = mkData ()
try do! PostDataTests.``FindPageOfTaggedPosts succeeds when posts are found`` data
finally dispose data
}
testTask "succeeds when posts are found (excluding drafts)" {
let data = mkData ()
try do! PostDataTests.``FindPageOfTaggedPosts succeeds when posts are found (excluding drafts)`` data
finally dispose data
}
testTask "succeeds when finding a too-high page number" {
let data = mkData ()
try do! PostDataTests.``FindPageOfTaggedPosts succeeds when finding a too-high page number`` data
finally dispose data
}
testTask "succeeds when there are no posts" {
let data = mkData ()
try do! PostDataTests.``FindPageOfTaggedPosts succeeds when there are no posts`` data
finally dispose data
}
]
testList "FindSurroundingPosts" [
testTask "succeeds when there is no next newer post" {
let data = mkData ()
try do! PostDataTests.``FindSurroundingPosts succeeds when there is no next newer post`` data
finally dispose data
}
testTask "succeeds when there is no next older post" {
let data = mkData ()
try do! PostDataTests.``FindSurroundingPosts succeeds when there is no next older post`` data
finally dispose data
}
testTask "succeeds when older and newer exist" {
let data = mkData ()
try do! PostDataTests.``FindSurroundingPosts succeeds when older and newer exist`` data
finally dispose data
}
]
testList "Update" [
testTask "succeeds when the post exists" {
let data = mkData ()
try do! PostDataTests.``Update succeeds when the post exists`` data
finally dispose data
}
testTask "succeeds when the post does not exist" {
let data = mkData ()
try do! PostDataTests.``Update succeeds when the post does not exist`` data
finally dispose data
}
]
testList "UpdatePriorPermalinks" [
testTask "succeeds when the post exists" {
let data = mkData ()
try do! PostDataTests.``UpdatePriorPermalinks succeeds when the post exists`` data
finally dispose data
}
testTask "succeeds when the post does not exist" {
let data = mkData ()
try do! PostDataTests.``UpdatePriorPermalinks succeeds when the post does not exist`` data
finally dispose data
}
]
testList "Delete" [
testTask "succeeds when a post is deleted" {
let data = mkData ()
try do! PostDataTests.``Delete succeeds when a post is deleted`` data
finally dispose data
}
testTask "succeeds when a post is not deleted" {
let data = mkData ()
try do! PostDataTests.``Delete succeeds when a post is not deleted`` data
finally dispose data
}
]
] ]
/// Delete the SQLite database /// Delete the SQLite database

View File

@ -333,13 +333,39 @@
"speculation" "speculation"
], ],
"Metadata": [], "Metadata": [],
"PriorPermalinks": [], "PriorPermalinks": [
"2024/some-thing.html"
],
"Revisions": [ "Revisions": [
{ {
"AsOf": "2024-01-20T22:32:59Z", "AsOf": "2024-01-20T22:32:59Z",
"Text": "HTML: <h2>Hmm</h2>" "Text": "HTML: <h2>Hmm</h2>"
} }
] ]
},
{
"Id": "VweKbWQiOkqqrjEdgP9wwg",
"WebLogId": "uSitJEuD3UyzWC9jgOHc8g",
"AuthorId": "5EM2rimH9kONpmd2zQkiVA",
"Status": "Draft",
"Title": "An Incomplete Thought",
"Permalink": "2024/still-cooking.html",
"UpdatedOn": "2024-01-24T22:35:00Z",
"Text": "<p>Think think think",
"CategoryIds": [
"jw6N69YtTEWVHAO33jHU-w"
],
"Tags": [
"speculation"
],
"Metadata": [],
"PriorPermalinks": [],
"Revisions": [
{
"AsOf": "2024-01-24T22:35:00Z",
"Text": "HTML: <p>Think think think"
}
]
} }
], ],
"Uploads": [] "Uploads": []