Add page-of-page and update tests
This commit is contained in:
parent
f60139db1c
commit
0a53e27f22
@ -35,6 +35,14 @@ type PostgresPageData(log: ILogger) =
|
||||
|
||||
// IMPLEMENTATION FUNCTIONS
|
||||
|
||||
/// Add a page
|
||||
let add (page: Page) = backgroundTask {
|
||||
log.LogTrace "Page.add"
|
||||
do! insert Table.Page { page with Revisions = [] }
|
||||
do! updatePageRevisions page.Id [] page.Revisions
|
||||
()
|
||||
}
|
||||
|
||||
/// Get all pages for a web log (without text, metadata, revisions, or prior permalinks)
|
||||
let all webLogId =
|
||||
log.LogTrace "Page.all"
|
||||
@ -134,7 +142,7 @@ type PostgresPageData(log: ILogger) =
|
||||
ORDER BY LOWER(data->>'{nameof Page.Empty.Title}')
|
||||
LIMIT @pageSize OFFSET @toSkip"
|
||||
[ webLogContains webLogId; "@pageSize", Sql.int 26; "@toSkip", Sql.int ((pageNbr - 1) * 25) ]
|
||||
fromData<Page>
|
||||
(fun row -> { fromData<Page> row with Metadata = []; PriorPermalinks = [] })
|
||||
|
||||
/// Restore pages from a backup
|
||||
let restore (pages: Page list) = backgroundTask {
|
||||
@ -151,12 +159,14 @@ type PostgresPageData(log: ILogger) =
|
||||
()
|
||||
}
|
||||
|
||||
/// Save a page
|
||||
let save (page: Page) = backgroundTask {
|
||||
log.LogTrace "Page.save"
|
||||
let! oldPage = findFullById page.Id page.WebLogId
|
||||
do! save Table.Page { page with Revisions = [] }
|
||||
do! updatePageRevisions page.Id (match oldPage with Some p -> p.Revisions | None -> []) page.Revisions
|
||||
/// Update a page
|
||||
let update (page: Page) = backgroundTask {
|
||||
log.LogTrace "Page.update"
|
||||
match! findFullById page.Id page.WebLogId with
|
||||
| Some oldPage ->
|
||||
do! Update.byId Table.Page page.Id { page with Revisions = [] }
|
||||
do! updatePageRevisions page.Id oldPage.Revisions page.Revisions
|
||||
| None -> ()
|
||||
()
|
||||
}
|
||||
|
||||
@ -171,7 +181,7 @@ type PostgresPageData(log: ILogger) =
|
||||
}
|
||||
|
||||
interface IPageData with
|
||||
member _.Add page = save page
|
||||
member _.Add page = add page
|
||||
member _.All webLogId = all webLogId
|
||||
member _.CountAll webLogId = countAll webLogId
|
||||
member _.CountListed webLogId = countListed webLogId
|
||||
@ -184,5 +194,5 @@ type PostgresPageData(log: ILogger) =
|
||||
member _.FindListed webLogId = findListed webLogId
|
||||
member _.FindPageOfPages webLogId pageNbr = findPageOfPages webLogId pageNbr
|
||||
member _.Restore pages = restore pages
|
||||
member _.Update page = save page
|
||||
member _.Update page = update page
|
||||
member _.UpdatePriorPermalinks pageId webLogId permalinks = updatePriorPermalinks pageId webLogId permalinks
|
||||
|
@ -512,9 +512,9 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
member _.FindPageOfPages webLogId pageNbr = rethink<Page list> {
|
||||
withTable Table.Page
|
||||
getAll [ webLogId ] (nameof Page.Empty.WebLogId)
|
||||
without [ nameof Page.Empty.Metadata
|
||||
nameof Page.Empty.PriorPermalinks
|
||||
nameof Page.Empty.Revisions ]
|
||||
merge (r.HashMap(nameof Page.Empty.Metadata, [||])
|
||||
.With(nameof Page.Empty.PriorPermalinks, [||])
|
||||
.With(nameof Page.Empty.Revisions, [||]))
|
||||
orderByFunc (fun row -> row[nameof Page.Empty.Title].Downcase())
|
||||
skip ((pageNbr - 1) * 25)
|
||||
limit 25
|
||||
|
@ -36,6 +36,13 @@ type SQLitePageData(conn: SqliteConnection, log: ILogger) =
|
||||
|
||||
// IMPLEMENTATION FUNCTIONS
|
||||
|
||||
/// Add a page
|
||||
let add (page: Page) = backgroundTask {
|
||||
log.LogTrace "Page.add"
|
||||
do! conn.insert Table.Page { page with Revisions = [] }
|
||||
do! updatePageRevisions page.Id [] page.Revisions
|
||||
}
|
||||
|
||||
/// Get all pages for a web log (without text, metadata, revisions, or prior permalinks)
|
||||
let all webLogId =
|
||||
log.LogTrace "Page.all"
|
||||
@ -133,18 +140,20 @@ type SQLitePageData(conn: SqliteConnection, log: ILogger) =
|
||||
[ webLogParam webLogId; SqliteParameter("@pageSize", 26); SqliteParameter("@toSkip", (pageNbr - 1) * 25) ]
|
||||
fromData<Page>
|
||||
|
||||
/// Save a page
|
||||
let save (page: Page) = backgroundTask {
|
||||
/// Update a page
|
||||
let update (page: Page) = backgroundTask {
|
||||
log.LogTrace "Page.update"
|
||||
let! oldPage = findFullById page.Id page.WebLogId
|
||||
do! conn.save Table.Page { page with Revisions = [] }
|
||||
do! updatePageRevisions page.Id (match oldPage with Some p -> p.Revisions | None -> []) page.Revisions
|
||||
match! findFullById page.Id page.WebLogId with
|
||||
| Some oldPage ->
|
||||
do! conn.updateById Table.Page page.Id { page with Revisions = [] }
|
||||
do! updatePageRevisions page.Id oldPage.Revisions page.Revisions
|
||||
| None -> ()
|
||||
}
|
||||
|
||||
/// Restore pages from a backup
|
||||
let restore pages = backgroundTask {
|
||||
log.LogTrace "Page.restore"
|
||||
for page in pages do do! save page
|
||||
for page in pages do do! add page
|
||||
}
|
||||
|
||||
/// Update a page's prior permalinks
|
||||
@ -158,7 +167,7 @@ type SQLitePageData(conn: SqliteConnection, log: ILogger) =
|
||||
}
|
||||
|
||||
interface IPageData with
|
||||
member _.Add page = save page
|
||||
member _.Add page = add page
|
||||
member _.All webLogId = all webLogId
|
||||
member _.CountAll webLogId = countAll webLogId
|
||||
member _.CountListed webLogId = countListed webLogId
|
||||
@ -171,5 +180,5 @@ type SQLitePageData(conn: SqliteConnection, log: ILogger) =
|
||||
member _.FindListed webLogId = findListed webLogId
|
||||
member _.FindPageOfPages webLogId pageNbr = findPageOfPages webLogId pageNbr
|
||||
member _.Restore pages = restore pages
|
||||
member _.Update page = save page
|
||||
member _.Update page = update page
|
||||
member _.UpdatePriorPermalinks pageId webLogId permalinks = updatePriorPermalinks pageId webLogId permalinks
|
||||
|
@ -186,3 +186,60 @@ let ``FindListed succeeds when pages are not found`` (data: IData) = task {
|
||||
let! pages = data.Page.FindListed (WebLogId "none")
|
||||
Expect.isEmpty pages "No pages should have been retrieved"
|
||||
}
|
||||
|
||||
let ``FindPageOfPages succeeds when pages are found`` (data: IData) = task {
|
||||
let! pages = data.Page.FindPageOfPages rootId 1
|
||||
Expect.hasLength pages 2 "There should have been 2 page returned"
|
||||
Expect.equal pages[0].Id coolPageId "Pages not sorted correctly"
|
||||
pages |> List.iteri (fun idx pg ->
|
||||
Expect.notEqual pg.Text "" $"Text for page {idx} should have been retrieved"
|
||||
Expect.isEmpty pg.Metadata $"Metadata for page {idx} should not have been retrieved"
|
||||
Expect.isEmpty pg.PriorPermalinks $"Prior permalinks for page {idx} should not have been retrieved"
|
||||
Expect.isEmpty pg.Revisions $"Revisions for page {idx} should not have been retrieved")
|
||||
}
|
||||
|
||||
let ``FindPageOfPages succeeds when pages are not found`` (data: IData) = task {
|
||||
let! pages = data.Page.FindPageOfPages rootId 2
|
||||
Expect.isEmpty pages "No pages should have been retrieved"
|
||||
}
|
||||
|
||||
let ``Update succeeds when the page exists`` (data: IData) = task {
|
||||
let! page = data.Page.FindFullById coolPageId rootId
|
||||
Expect.isSome page "A page should have been returned"
|
||||
do! data.Page.Update
|
||||
{ page.Value with
|
||||
Title = "This Is Neat"
|
||||
Permalink = Permalink "neat-page.html"
|
||||
UpdatedOn = page.Value.PublishedOn + Duration.FromHours 5
|
||||
IsInPageList = true
|
||||
Text = "<p>I have been updated"
|
||||
Metadata = [ List.head page.Value.Metadata ]
|
||||
PriorPermalinks = [ Permalink "a-cool-page.html" ]
|
||||
Revisions =
|
||||
{ AsOf = page.Value.PublishedOn + Duration.FromHours 5; Text = Html "<p>I have been updated" }
|
||||
:: page.Value.Revisions }
|
||||
let! updated = data.Page.FindFullById coolPageId rootId
|
||||
Expect.isSome updated "The updated page should have been returned"
|
||||
let pg = updated.Value
|
||||
Expect.equal pg.Title "This Is Neat" "Title is incorrect"
|
||||
Expect.equal pg.Permalink (Permalink "neat-page.html") "Permalink is incorrect"
|
||||
Expect.equal pg.PublishedOn coolPagePublished "Published On is incorrect"
|
||||
Expect.equal pg.UpdatedOn (coolPagePublished + Duration.FromHours 5) "Updated On is incorrect"
|
||||
Expect.isTrue pg.IsInPageList "Is in page list flag should have been set"
|
||||
Expect.equal pg.Text "<p>I have been updated" "Text is incorrect"
|
||||
Expect.hasLength pg.Metadata 1 "There should be 1 metadata item on this page"
|
||||
Expect.equal pg.Metadata[0].Name "Cool" "Meta item 0 name is incorrect"
|
||||
Expect.equal pg.Metadata[0].Value "true" "Meta item 0 value is incorrect"
|
||||
Expect.equal pg.PriorPermalinks [ Permalink "a-cool-page.html" ] "Prior permalinks are incorrect"
|
||||
Expect.hasLength pg.Revisions 2 "There should be 2 revisions"
|
||||
Expect.equal pg.Revisions[0].AsOf (coolPagePublished + Duration.FromHours 5) "As Of for revision 0 incorrect"
|
||||
Expect.equal pg.Revisions[0].Text (Html "<p>I have been updated") "Text for revision 0 is incorrect"
|
||||
Expect.equal pg.Revisions[1].AsOf coolPagePublished "As Of for revision 1 is incorrect"
|
||||
}
|
||||
|
||||
let ``Update succeeds when the page does not exist`` (data: IData) = task {
|
||||
let pageId = PageId "missing-page"
|
||||
do! data.Page.Update { Page.Empty with Id = pageId; WebLogId = rootId }
|
||||
let! page = data.Page.FindById pageId rootId
|
||||
Expect.isNone page "A page should not have been retrieved"
|
||||
}
|
||||
|
@ -185,6 +185,22 @@ let pageTests = testList "Page" [
|
||||
do! PageDataTests.``FindListed succeeds when pages are not found`` (mkData ())
|
||||
}
|
||||
]
|
||||
testList "FindPageOfPages" [
|
||||
testTask "succeeds when pages are found" {
|
||||
do! PageDataTests.``FindPageOfPages succeeds when pages are found`` (mkData ())
|
||||
}
|
||||
testTask "succeeds when a pages are not found" {
|
||||
do! PageDataTests.``FindPageOfPages succeeds when pages are not found`` (mkData ())
|
||||
}
|
||||
]
|
||||
testList "Update" [
|
||||
testTask "succeeds when the page exists" {
|
||||
do! PageDataTests.``Update succeeds when the page exists`` (mkData ())
|
||||
}
|
||||
testTask "succeeds when the page does not exist" {
|
||||
do! PageDataTests.``Update succeeds when the page does not exist`` (mkData ())
|
||||
}
|
||||
]
|
||||
]
|
||||
|
||||
/// Drop the throwaway PostgreSQL database
|
||||
|
@ -184,6 +184,22 @@ let pageTests = testList "Page" [
|
||||
do! PageDataTests.``FindListed succeeds when pages are not found`` data.Value
|
||||
}
|
||||
]
|
||||
testList "FindPageOfPages" [
|
||||
testTask "succeeds when pages are found" {
|
||||
do! PageDataTests.``FindPageOfPages succeeds when pages are found`` data.Value
|
||||
}
|
||||
testTask "succeeds when a pages are not found" {
|
||||
do! PageDataTests.``FindPageOfPages succeeds when pages are not found`` data.Value
|
||||
}
|
||||
]
|
||||
testList "Update" [
|
||||
testTask "succeeds when the page exists" {
|
||||
do! PageDataTests.``Update succeeds when the page exists`` data.Value
|
||||
}
|
||||
testTask "succeeds when the page does not exist" {
|
||||
do! PageDataTests.``Update succeeds when the page does not exist`` data.Value
|
||||
}
|
||||
]
|
||||
]
|
||||
|
||||
/// Drop the throwaway RethinkDB database
|
||||
|
@ -246,6 +246,18 @@ let pageTests = testList "Page" [
|
||||
finally dispose data
|
||||
}
|
||||
]
|
||||
testList "Update" [
|
||||
testTask "succeeds when the page exists" {
|
||||
let data = mkData ()
|
||||
try do! PageDataTests.``Update succeeds when the page exists`` data
|
||||
finally dispose data
|
||||
}
|
||||
testTask "succeeds when the page does not exist" {
|
||||
let data = mkData ()
|
||||
try do! PageDataTests.``Update succeeds when the page does not exist`` data
|
||||
finally dispose data
|
||||
}
|
||||
]
|
||||
]
|
||||
|
||||
/// Delete the SQLite database
|
||||
|
Loading…
Reference in New Issue
Block a user