PostgreSQL data works
- Add (de)serializer functions - Add NodaTime support functions
This commit is contained in:
parent
80e7e26d51
commit
2131bd096b
|
@ -149,4 +149,30 @@ module Json =
|
||||||
let _ = ser.ConfigureForNodaTime DateTimeZoneProviders.Tzdb
|
let _ = ser.ConfigureForNodaTime DateTimeZoneProviders.Tzdb
|
||||||
// Handles DUs with no associated data, as well as option fields
|
// Handles DUs with no associated data, as well as option fields
|
||||||
ser.Converters.Add (CompactUnionJsonConverter ())
|
ser.Converters.Add (CompactUnionJsonConverter ())
|
||||||
|
ser.NullValueHandling <- NullValueHandling.Ignore
|
||||||
|
ser.MissingMemberHandling <- MissingMemberHandling.Ignore
|
||||||
ser
|
ser
|
||||||
|
|
||||||
|
/// Serializer settings extracted from a JsonSerializer (a property sure would be nice...)
|
||||||
|
let mutable private serializerSettings : JsonSerializerSettings option = None
|
||||||
|
|
||||||
|
/// Extract settings from the serializer to be used in JsonConvert calls
|
||||||
|
let settings (ser : JsonSerializer) =
|
||||||
|
if Option.isNone serializerSettings then
|
||||||
|
serializerSettings <- JsonSerializerSettings (
|
||||||
|
ConstructorHandling = ser.ConstructorHandling,
|
||||||
|
ContractResolver = ser.ContractResolver,
|
||||||
|
Converters = ser.Converters,
|
||||||
|
DefaultValueHandling = ser.DefaultValueHandling,
|
||||||
|
DateFormatHandling = ser.DateFormatHandling,
|
||||||
|
MetadataPropertyHandling = ser.MetadataPropertyHandling,
|
||||||
|
MissingMemberHandling = ser.MissingMemberHandling,
|
||||||
|
NullValueHandling = ser.NullValueHandling,
|
||||||
|
ObjectCreationHandling = ser.ObjectCreationHandling,
|
||||||
|
ReferenceLoopHandling = ser.ReferenceLoopHandling,
|
||||||
|
SerializationBinder = ser.SerializationBinder,
|
||||||
|
TraceWriter = ser.TraceWriter,
|
||||||
|
TypeNameAssemblyFormatHandling = ser.TypeNameAssemblyFormatHandling,
|
||||||
|
TypeNameHandling = ser.TypeNameHandling)
|
||||||
|
|> Some
|
||||||
|
serializerSettings.Value
|
||||||
|
|
|
@ -3,6 +3,7 @@ namespace MyWebLog.Data
|
||||||
open System.Threading.Tasks
|
open System.Threading.Tasks
|
||||||
open MyWebLog
|
open MyWebLog
|
||||||
open MyWebLog.ViewModels
|
open MyWebLog.ViewModels
|
||||||
|
open Newtonsoft.Json
|
||||||
open NodaTime
|
open NodaTime
|
||||||
|
|
||||||
/// The result of a category deletion attempt
|
/// The result of a category deletion attempt
|
||||||
|
@ -326,6 +327,9 @@ type IData =
|
||||||
/// Web log user data functions
|
/// Web log user data functions
|
||||||
abstract member WebLogUser : IWebLogUserData
|
abstract member WebLogUser : IWebLogUserData
|
||||||
|
|
||||||
|
/// A JSON serializer for use in persistence
|
||||||
|
abstract member Serializer : JsonSerializer
|
||||||
|
|
||||||
/// Do any required start up data checks
|
/// Do any required start up data checks
|
||||||
abstract member StartUp : unit -> Task<unit>
|
abstract member StartUp : unit -> Task<unit>
|
||||||
|
|
|
@ -36,7 +36,7 @@ module private Helpers =
|
||||||
|
|
||||||
/// Create a parameter for the expire-at time
|
/// Create a parameter for the expire-at time
|
||||||
let expireParam =
|
let expireParam =
|
||||||
typedParam "@expireAt"
|
typedParam "expireAt"
|
||||||
|
|
||||||
|
|
||||||
/// A distributed cache implementation in PostgreSQL used to handle sessions for myWebLog
|
/// A distributed cache implementation in PostgreSQL used to handle sessions for myWebLog
|
||||||
|
|
|
@ -38,8 +38,9 @@ type PostgresCategoryData (conn : NpgsqlConnection) =
|
||||||
ordered
|
ordered
|
||||||
|> Seq.filter (fun cat -> cat.ParentNames |> Array.contains it.Name)
|
|> Seq.filter (fun cat -> cat.ParentNames |> Array.contains it.Name)
|
||||||
|> Seq.map (fun cat -> cat.Id)
|
|> Seq.map (fun cat -> cat.Id)
|
||||||
|
|> Seq.append (Seq.singleton it.Id)
|
||||||
|> List.ofSeq
|
|> List.ofSeq
|
||||||
|> inClause "id" id
|
|> inClause "AND pc.category_id" "id" id
|
||||||
let postCount =
|
let postCount =
|
||||||
Sql.existingConnection conn
|
Sql.existingConnection conn
|
||||||
|> Sql.query $"
|
|> Sql.query $"
|
||||||
|
@ -48,7 +49,7 @@ type PostgresCategoryData (conn : NpgsqlConnection) =
|
||||||
INNER JOIN post_category pc ON pc.post_id = p.id
|
INNER JOIN post_category pc ON pc.post_id = p.id
|
||||||
WHERE p.web_log_id = @webLogId
|
WHERE p.web_log_id = @webLogId
|
||||||
AND p.status = 'Published'
|
AND p.status = 'Published'
|
||||||
AND pc.category_id IN ({catIdSql})"
|
{catIdSql}"
|
||||||
|> Sql.parameters (webLogIdParam webLogId :: catIdParams)
|
|> Sql.parameters (webLogIdParam webLogId :: catIdParams)
|
||||||
|> Sql.executeRowAsync Map.toCount
|
|> Sql.executeRowAsync Map.toCount
|
||||||
|> Async.AwaitTask
|
|> Async.AwaitTask
|
||||||
|
|
|
@ -5,6 +5,7 @@ module MyWebLog.Data.Postgres.PostgresHelpers
|
||||||
open System
|
open System
|
||||||
open System.Threading.Tasks
|
open System.Threading.Tasks
|
||||||
open MyWebLog
|
open MyWebLog
|
||||||
|
open MyWebLog.Data
|
||||||
open Newtonsoft.Json
|
open Newtonsoft.Json
|
||||||
open NodaTime
|
open NodaTime
|
||||||
open Npgsql
|
open Npgsql
|
||||||
|
@ -21,30 +22,36 @@ let countName = "the_count"
|
||||||
let existsName = "does_exist"
|
let existsName = "does_exist"
|
||||||
|
|
||||||
/// Create the SQL and parameters for an IN clause
|
/// Create the SQL and parameters for an IN clause
|
||||||
let inClause<'T> name (valueFunc: 'T -> string) (items : 'T list) =
|
let inClause<'T> colNameAndPrefix paramName (valueFunc: 'T -> string) (items : 'T list) =
|
||||||
let mutable idx = 0
|
if List.isEmpty items then "", []
|
||||||
items
|
else
|
||||||
|> List.skip 1
|
let mutable idx = 0
|
||||||
|> List.fold (fun (itemS, itemP) it ->
|
items
|
||||||
idx <- idx + 1
|
|> List.skip 1
|
||||||
$"{itemS}, @%s{name}{idx}", ($"@%s{name}{idx}", Sql.string (valueFunc it)) :: itemP)
|
|> List.fold (fun (itemS, itemP) it ->
|
||||||
(Seq.ofList items
|
idx <- idx + 1
|
||||||
|> Seq.map (fun it -> $"@%s{name}0", [ $"@%s{name}0", Sql.string (valueFunc it) ])
|
$"{itemS}, @%s{paramName}{idx}", ($"@%s{paramName}{idx}", Sql.string (valueFunc it)) :: itemP)
|
||||||
|> Seq.head)
|
(Seq.ofList items
|
||||||
|
|> Seq.map (fun it ->
|
||||||
|
$"%s{colNameAndPrefix} IN (@%s{paramName}0", [ $"@%s{paramName}0", Sql.string (valueFunc it) ])
|
||||||
|
|> Seq.head)
|
||||||
|
|> function sql, ps -> $"{sql})", ps
|
||||||
|
|
||||||
/// Create the SQL and parameters for the array equivalent of an IN clause
|
/// Create the SQL and parameters for the array equivalent of an IN clause
|
||||||
let arrayInClause<'T> name (valueFunc : 'T -> string) (items : 'T list) =
|
let arrayInClause<'T> name (valueFunc : 'T -> string) (items : 'T list) =
|
||||||
let mutable idx = 0
|
if List.isEmpty items then "TRUE = FALSE", []
|
||||||
items
|
else
|
||||||
|> List.skip 1
|
let mutable idx = 0
|
||||||
|> List.fold (fun (itemS, itemP) it ->
|
items
|
||||||
idx <- idx + 1
|
|> List.skip 1
|
||||||
$"{itemS} OR %s{name} && ARRAY[@{name}{idx}]",
|
|> List.fold (fun (itemS, itemP) it ->
|
||||||
($"@{name}{idx}", Sql.string (valueFunc it)) :: itemP)
|
idx <- idx + 1
|
||||||
(Seq.ofList items
|
$"{itemS} OR %s{name} && ARRAY[@{name}{idx}]",
|
||||||
|> Seq.map (fun it ->
|
($"@{name}{idx}", Sql.string (valueFunc it)) :: itemP)
|
||||||
$"{name} && ARRAY[@{name}0]", [ $"@{name}0", Sql.string (valueFunc it) ])
|
(Seq.ofList items
|
||||||
|> Seq.head)
|
|> Seq.map (fun it ->
|
||||||
|
$"{name} && ARRAY[@{name}0]", [ $"@{name}0", Sql.string (valueFunc it) ])
|
||||||
|
|> Seq.head)
|
||||||
|
|
||||||
/// Get the first result of the given query
|
/// Get the first result of the given query
|
||||||
let tryHead<'T> (query : Task<'T list>) = backgroundTask {
|
let tryHead<'T> (query : Task<'T list>) = backgroundTask {
|
||||||
|
@ -83,32 +90,11 @@ module Map =
|
||||||
row.int countName
|
row.int countName
|
||||||
|
|
||||||
/// Create a custom feed from the current row
|
/// Create a custom feed from the current row
|
||||||
let toCustomFeed (row : RowReader) : CustomFeed =
|
let toCustomFeed (ser : JsonSerializer) (row : RowReader) : CustomFeed =
|
||||||
{ Id = row.string "id" |> CustomFeedId
|
{ Id = row.string "id" |> CustomFeedId
|
||||||
Source = row.string "source" |> CustomFeedSource.parse
|
Source = row.string "source" |> CustomFeedSource.parse
|
||||||
Path = row.string "path" |> Permalink
|
Path = row.string "path" |> Permalink
|
||||||
Podcast =
|
Podcast = row.stringOrNone "podcast" |> Option.map (Utils.deserialize ser)
|
||||||
match row.stringOrNone "title" with
|
|
||||||
| Some title ->
|
|
||||||
Some {
|
|
||||||
Title = title
|
|
||||||
Subtitle = row.stringOrNone "subtitle"
|
|
||||||
ItemsInFeed = row.int "items_in_feed"
|
|
||||||
Summary = row.string "summary"
|
|
||||||
DisplayedAuthor = row.string "displayed_author"
|
|
||||||
Email = row.string "email"
|
|
||||||
ImageUrl = row.string "image_url" |> Permalink
|
|
||||||
AppleCategory = row.string "apple_category"
|
|
||||||
AppleSubcategory = row.stringOrNone "apple_subcategory"
|
|
||||||
Explicit = row.string "explicit" |> ExplicitRating.parse
|
|
||||||
DefaultMediaType = row.stringOrNone "default_media_type"
|
|
||||||
MediaBaseUrl = row.stringOrNone "media_base_url"
|
|
||||||
PodcastGuid = row.uuidOrNone "podcast_guid"
|
|
||||||
FundingUrl = row.stringOrNone "funding_url"
|
|
||||||
FundingText = row.stringOrNone "funding_text"
|
|
||||||
Medium = row.stringOrNone "medium" |> Option.map PodcastMedium.parse
|
|
||||||
}
|
|
||||||
| None -> None
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get a true/false value as to whether an item exists
|
/// Get a true/false value as to whether an item exists
|
||||||
|
@ -126,7 +112,7 @@ module Map =
|
||||||
Permalink (row.string "permalink")
|
Permalink (row.string "permalink")
|
||||||
|
|
||||||
/// Create a page from the current row
|
/// Create a page from the current row
|
||||||
let toPage (row : RowReader) : Page =
|
let toPage (ser : JsonSerializer) (row : RowReader) : Page =
|
||||||
{ Page.empty with
|
{ Page.empty with
|
||||||
Id = row.string "id" |> PageId
|
Id = row.string "id" |> PageId
|
||||||
WebLogId = row.string "web_log_id" |> WebLogId
|
WebLogId = row.string "web_log_id" |> WebLogId
|
||||||
|
@ -140,12 +126,12 @@ module Map =
|
||||||
Template = row.stringOrNone "template"
|
Template = row.stringOrNone "template"
|
||||||
Text = row.string "page_text"
|
Text = row.string "page_text"
|
||||||
Metadata = row.stringOrNone "meta_items"
|
Metadata = row.stringOrNone "meta_items"
|
||||||
|> Option.map JsonConvert.DeserializeObject<MetaItem list>
|
|> Option.map (Utils.deserialize ser)
|
||||||
|> Option.defaultValue []
|
|> Option.defaultValue []
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a post from the current row
|
/// Create a post from the current row
|
||||||
let toPost (row : RowReader) : Post =
|
let toPost (ser : JsonSerializer) (row : RowReader) : Post =
|
||||||
{ Post.empty with
|
{ Post.empty with
|
||||||
Id = row.string "id" |> PostId
|
Id = row.string "id" |> PostId
|
||||||
WebLogId = row.string "web_log_id" |> WebLogId
|
WebLogId = row.string "web_log_id" |> WebLogId
|
||||||
|
@ -158,6 +144,7 @@ module Map =
|
||||||
UpdatedOn = row.fieldValue<Instant> "updated_on"
|
UpdatedOn = row.fieldValue<Instant> "updated_on"
|
||||||
Template = row.stringOrNone "template"
|
Template = row.stringOrNone "template"
|
||||||
Text = row.string "post_text"
|
Text = row.string "post_text"
|
||||||
|
Episode = row.stringOrNone "episode" |> Option.map (Utils.deserialize ser)
|
||||||
CategoryIds = row.stringArrayOrNone "category_ids"
|
CategoryIds = row.stringArrayOrNone "category_ids"
|
||||||
|> Option.map (Array.map CategoryId >> List.ofArray)
|
|> Option.map (Array.map CategoryId >> List.ofArray)
|
||||||
|> Option.defaultValue []
|
|> Option.defaultValue []
|
||||||
|
@ -165,10 +152,8 @@ module Map =
|
||||||
|> Option.map List.ofArray
|
|> Option.map List.ofArray
|
||||||
|> Option.defaultValue []
|
|> Option.defaultValue []
|
||||||
Metadata = row.stringOrNone "meta_items"
|
Metadata = row.stringOrNone "meta_items"
|
||||||
|> Option.map JsonConvert.DeserializeObject<MetaItem list>
|
|> Option.map (Utils.deserialize ser)
|
||||||
|> Option.defaultValue []
|
|> Option.defaultValue []
|
||||||
Episode = row.stringOrNone "episode"
|
|
||||||
|> Option.map JsonConvert.DeserializeObject<Episode>
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a revision from the current row
|
/// Create a revision from the current row
|
||||||
|
|
|
@ -7,7 +7,7 @@ open Npgsql
|
||||||
open Npgsql.FSharp
|
open Npgsql.FSharp
|
||||||
|
|
||||||
/// PostgreSQL myWebLog page data implementation
|
/// PostgreSQL myWebLog page data implementation
|
||||||
type PostgresPageData (conn : NpgsqlConnection) =
|
type PostgresPageData (conn : NpgsqlConnection, ser : JsonSerializer) =
|
||||||
|
|
||||||
// SUPPORT FUNCTIONS
|
// SUPPORT FUNCTIONS
|
||||||
|
|
||||||
|
@ -21,16 +21,19 @@ type PostgresPageData (conn : NpgsqlConnection) =
|
||||||
return { page with Revisions = revisions }
|
return { page with Revisions = revisions }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Shorthand to map to a page
|
||||||
|
let toPage = Map.toPage ser
|
||||||
|
|
||||||
/// Return a page with no text or revisions
|
/// Return a page with no text or revisions
|
||||||
let pageWithoutText row =
|
let pageWithoutText row =
|
||||||
{ Map.toPage row with Text = "" }
|
{ toPage row with Text = "" }
|
||||||
|
|
||||||
/// The INSERT statement for a page revision
|
/// The INSERT statement for a page revision
|
||||||
let revInsert = "INSERT INTO page_revision VALUES (@pageId, @asOf, @text)"
|
let revInsert = "INSERT INTO page_revision VALUES (@pageId, @asOf, @text)"
|
||||||
|
|
||||||
/// Parameters for a revision INSERT statement
|
/// Parameters for a revision INSERT statement
|
||||||
let revParams pageId rev = [
|
let revParams pageId rev = [
|
||||||
typedParam "@asOf" rev.AsOf
|
typedParam "asOf" rev.AsOf
|
||||||
"@pageId", Sql.string (PageId.toString pageId)
|
"@pageId", Sql.string (PageId.toString pageId)
|
||||||
"@text", Sql.string (MarkupText.toString rev.Text)
|
"@text", Sql.string (MarkupText.toString rev.Text)
|
||||||
]
|
]
|
||||||
|
@ -47,7 +50,7 @@ type PostgresPageData (conn : NpgsqlConnection) =
|
||||||
toDelete
|
toDelete
|
||||||
|> List.map (fun it -> [
|
|> List.map (fun it -> [
|
||||||
"@pageId", Sql.string (PageId.toString pageId)
|
"@pageId", Sql.string (PageId.toString pageId)
|
||||||
typedParam "@asOf" it.AsOf
|
typedParam "asOf" it.AsOf
|
||||||
])
|
])
|
||||||
if not (List.isEmpty toAdd) then
|
if not (List.isEmpty toAdd) then
|
||||||
revInsert, toAdd |> List.map (revParams pageId)
|
revInsert, toAdd |> List.map (revParams pageId)
|
||||||
|
@ -94,7 +97,7 @@ type PostgresPageData (conn : NpgsqlConnection) =
|
||||||
Sql.existingConnection conn
|
Sql.existingConnection conn
|
||||||
|> Sql.query "SELECT * FROM page WHERE id = @id AND web_log_id = @webLogId"
|
|> Sql.query "SELECT * FROM page WHERE id = @id AND web_log_id = @webLogId"
|
||||||
|> Sql.parameters [ "@id", Sql.string (PageId.toString pageId); webLogIdParam webLogId ]
|
|> Sql.parameters [ "@id", Sql.string (PageId.toString pageId); webLogIdParam webLogId ]
|
||||||
|> Sql.executeAsync Map.toPage
|
|> Sql.executeAsync toPage
|
||||||
|> tryHead
|
|> tryHead
|
||||||
|
|
||||||
/// Find a complete page by its ID
|
/// Find a complete page by its ID
|
||||||
|
@ -126,7 +129,7 @@ type PostgresPageData (conn : NpgsqlConnection) =
|
||||||
Sql.existingConnection conn
|
Sql.existingConnection conn
|
||||||
|> Sql.query "SELECT * FROM page WHERE web_log_id = @webLogId AND permalink = @link"
|
|> Sql.query "SELECT * FROM page WHERE web_log_id = @webLogId AND permalink = @link"
|
||||||
|> Sql.parameters [ webLogIdParam webLogId; "@link", Sql.string (Permalink.toString permalink) ]
|
|> Sql.parameters [ webLogIdParam webLogId; "@link", Sql.string (Permalink.toString permalink) ]
|
||||||
|> Sql.executeAsync Map.toPage
|
|> Sql.executeAsync toPage
|
||||||
|> tryHead
|
|> tryHead
|
||||||
|
|
||||||
/// Find the current permalink within a set of potential prior permalinks for the given web log
|
/// Find the current permalink within a set of potential prior permalinks for the given web log
|
||||||
|
@ -148,7 +151,7 @@ type PostgresPageData (conn : NpgsqlConnection) =
|
||||||
Sql.existingConnection conn
|
Sql.existingConnection conn
|
||||||
|> Sql.query "SELECT * FROM page WHERE web_log_id = @webLogId"
|
|> Sql.query "SELECT * FROM page WHERE web_log_id = @webLogId"
|
||||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||||
|> Sql.executeAsync Map.toPage
|
|> Sql.executeAsync toPage
|
||||||
let! revisions =
|
let! revisions =
|
||||||
Sql.existingConnection conn
|
Sql.existingConnection conn
|
||||||
|> Sql.query
|
|> Sql.query
|
||||||
|
@ -182,7 +185,7 @@ type PostgresPageData (conn : NpgsqlConnection) =
|
||||||
ORDER BY LOWER(title)
|
ORDER BY LOWER(title)
|
||||||
LIMIT @pageSize OFFSET @toSkip"
|
LIMIT @pageSize OFFSET @toSkip"
|
||||||
|> Sql.parameters [ webLogIdParam webLogId; "@pageSize", Sql.int 26; "@toSkip", Sql.int ((pageNbr - 1) * 25) ]
|
|> Sql.parameters [ webLogIdParam webLogId; "@pageSize", Sql.int 26; "@toSkip", Sql.int ((pageNbr - 1) * 25) ]
|
||||||
|> Sql.executeAsync Map.toPage
|
|> Sql.executeAsync toPage
|
||||||
|
|
||||||
/// The INSERT statement for a page
|
/// The INSERT statement for a page
|
||||||
let pageInsert =
|
let pageInsert =
|
||||||
|
@ -204,10 +207,10 @@ type PostgresPageData (conn : NpgsqlConnection) =
|
||||||
"@isInPageList", Sql.bool page.IsInPageList
|
"@isInPageList", Sql.bool page.IsInPageList
|
||||||
"@template", Sql.stringOrNone page.Template
|
"@template", Sql.stringOrNone page.Template
|
||||||
"@text", Sql.string page.Text
|
"@text", Sql.string page.Text
|
||||||
"@metaItems", Sql.jsonb (JsonConvert.SerializeObject page.Metadata)
|
"@metaItems", Sql.jsonb (Utils.serialize ser page.Metadata)
|
||||||
"@priorPermalinks", Sql.stringArray (page.PriorPermalinks |> List.map Permalink.toString |> Array.ofList)
|
"@priorPermalinks", Sql.stringArray (page.PriorPermalinks |> List.map Permalink.toString |> Array.ofList)
|
||||||
typedParam "@publishedOn" page.PublishedOn
|
typedParam "publishedOn" page.PublishedOn
|
||||||
typedParam "@updatedOn" page.UpdatedOn
|
typedParam "updatedOn" page.UpdatedOn
|
||||||
]
|
]
|
||||||
|
|
||||||
/// Restore pages from a backup
|
/// Restore pages from a backup
|
||||||
|
@ -237,7 +240,7 @@ type PostgresPageData (conn : NpgsqlConnection) =
|
||||||
updated_on = EXCLUDED.updated_on,
|
updated_on = EXCLUDED.updated_on,
|
||||||
is_in_page_list = EXCLUDED.is_in_page_list,
|
is_in_page_list = EXCLUDED.is_in_page_list,
|
||||||
template = EXCLUDED.template,
|
template = EXCLUDED.template,
|
||||||
page_text = EXCLUDED.text,
|
page_text = EXCLUDED.page_text,
|
||||||
meta_items = EXCLUDED.meta_items"
|
meta_items = EXCLUDED.meta_items"
|
||||||
|> Sql.parameters (pageParams page)
|
|> Sql.parameters (pageParams page)
|
||||||
|> Sql.executeNonQueryAsync
|
|> Sql.executeNonQueryAsync
|
||||||
|
|
|
@ -8,7 +8,7 @@ open Npgsql
|
||||||
open Npgsql.FSharp
|
open Npgsql.FSharp
|
||||||
|
|
||||||
/// PostgreSQL myWebLog post data implementation
|
/// PostgreSQL myWebLog post data implementation
|
||||||
type PostgresPostData (conn : NpgsqlConnection) =
|
type PostgresPostData (conn : NpgsqlConnection, ser : JsonSerializer) =
|
||||||
|
|
||||||
// SUPPORT FUNCTIONS
|
// SUPPORT FUNCTIONS
|
||||||
|
|
||||||
|
@ -25,11 +25,14 @@ type PostgresPostData (conn : NpgsqlConnection) =
|
||||||
/// The SELECT statement for a post that will include category IDs
|
/// The SELECT statement for a post that will include category IDs
|
||||||
let selectPost =
|
let selectPost =
|
||||||
"SELECT *, ARRAY(SELECT cat.category_id FROM post_category cat WHERE cat.post_id = p.id) AS category_ids
|
"SELECT *, ARRAY(SELECT cat.category_id FROM post_category cat WHERE cat.post_id = p.id) AS category_ids
|
||||||
FROM post"
|
FROM post p"
|
||||||
|
|
||||||
|
/// Shorthand for mapping to a post
|
||||||
|
let toPost = Map.toPost ser
|
||||||
|
|
||||||
/// Return a post with no revisions, prior permalinks, or text
|
/// Return a post with no revisions, prior permalinks, or text
|
||||||
let postWithoutText row =
|
let postWithoutText row =
|
||||||
{ Map.toPost row with Text = "" }
|
{ toPost row with Text = "" }
|
||||||
|
|
||||||
/// The INSERT statement for a post/category cross-reference
|
/// The INSERT statement for a post/category cross-reference
|
||||||
let catInsert = "INSERT INTO post_category VALUES (@postId, @categoryId)"
|
let catInsert = "INSERT INTO post_category VALUES (@postId, @categoryId)"
|
||||||
|
@ -61,7 +64,7 @@ type PostgresPostData (conn : NpgsqlConnection) =
|
||||||
|
|
||||||
/// The parameters for adding a post revision
|
/// The parameters for adding a post revision
|
||||||
let revParams postId rev = [
|
let revParams postId rev = [
|
||||||
typedParam "@asOf" rev.AsOf
|
typedParam "asOf" rev.AsOf
|
||||||
"@postId", Sql.string (PostId.toString postId)
|
"@postId", Sql.string (PostId.toString postId)
|
||||||
"@text", Sql.string (MarkupText.toString rev.Text)
|
"@text", Sql.string (MarkupText.toString rev.Text)
|
||||||
]
|
]
|
||||||
|
@ -78,7 +81,7 @@ type PostgresPostData (conn : NpgsqlConnection) =
|
||||||
toDelete
|
toDelete
|
||||||
|> List.map (fun it -> [
|
|> List.map (fun it -> [
|
||||||
"@postId", Sql.string (PostId.toString postId)
|
"@postId", Sql.string (PostId.toString postId)
|
||||||
typedParam "@asOf" it.AsOf
|
typedParam "asOf" it.AsOf
|
||||||
])
|
])
|
||||||
if not (List.isEmpty toAdd) then
|
if not (List.isEmpty toAdd) then
|
||||||
revInsert, toAdd |> List.map (revParams postId)
|
revInsert, toAdd |> List.map (revParams postId)
|
||||||
|
@ -107,7 +110,7 @@ type PostgresPostData (conn : NpgsqlConnection) =
|
||||||
Sql.existingConnection conn
|
Sql.existingConnection conn
|
||||||
|> Sql.query $"{selectPost} WHERE id = @id AND web_log_id = @webLogId"
|
|> Sql.query $"{selectPost} WHERE id = @id AND web_log_id = @webLogId"
|
||||||
|> Sql.parameters [ "@id", Sql.string (PostId.toString postId); webLogIdParam webLogId ]
|
|> Sql.parameters [ "@id", Sql.string (PostId.toString postId); webLogIdParam webLogId ]
|
||||||
|> Sql.executeAsync Map.toPost
|
|> Sql.executeAsync toPost
|
||||||
|> tryHead
|
|> tryHead
|
||||||
|
|
||||||
/// Find a post by its permalink for the given web log (excluding revisions and prior permalinks)
|
/// Find a post by its permalink for the given web log (excluding revisions and prior permalinks)
|
||||||
|
@ -115,7 +118,7 @@ type PostgresPostData (conn : NpgsqlConnection) =
|
||||||
Sql.existingConnection conn
|
Sql.existingConnection conn
|
||||||
|> Sql.query $"{selectPost} WHERE web_log_id = @webLogId AND permalink = @link"
|
|> Sql.query $"{selectPost} WHERE web_log_id = @webLogId AND permalink = @link"
|
||||||
|> Sql.parameters [ webLogIdParam webLogId; "@link", Sql.string (Permalink.toString permalink) ]
|
|> Sql.parameters [ webLogIdParam webLogId; "@link", Sql.string (Permalink.toString permalink) ]
|
||||||
|> Sql.executeAsync Map.toPost
|
|> Sql.executeAsync toPost
|
||||||
|> tryHead
|
|> tryHead
|
||||||
|
|
||||||
/// Find a complete post by its ID for the given web log
|
/// Find a complete post by its ID for the given web log
|
||||||
|
@ -150,7 +153,7 @@ type PostgresPostData (conn : NpgsqlConnection) =
|
||||||
let linkSql, linkParams = arrayInClause "prior_permalinks" Permalink.toString permalinks
|
let linkSql, linkParams = arrayInClause "prior_permalinks" Permalink.toString permalinks
|
||||||
return!
|
return!
|
||||||
Sql.existingConnection conn
|
Sql.existingConnection conn
|
||||||
|> Sql.query $"SELECT permalink FROM post WHERE web_log_id = @webLogId AND ({linkSql}"
|
|> Sql.query $"SELECT permalink FROM post WHERE web_log_id = @webLogId AND ({linkSql})"
|
||||||
|> Sql.parameters (webLogIdParam webLogId :: linkParams)
|
|> Sql.parameters (webLogIdParam webLogId :: linkParams)
|
||||||
|> Sql.executeAsync Map.toPermalink
|
|> Sql.executeAsync Map.toPermalink
|
||||||
|> tryHead
|
|> tryHead
|
||||||
|
@ -162,7 +165,7 @@ type PostgresPostData (conn : NpgsqlConnection) =
|
||||||
Sql.existingConnection conn
|
Sql.existingConnection conn
|
||||||
|> Sql.query $"{selectPost} WHERE web_log_id = @webLogId"
|
|> Sql.query $"{selectPost} WHERE web_log_id = @webLogId"
|
||||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||||
|> Sql.executeAsync Map.toPost
|
|> Sql.executeAsync toPost
|
||||||
let! revisions =
|
let! revisions =
|
||||||
Sql.existingConnection conn
|
Sql.existingConnection conn
|
||||||
|> Sql.query
|
|> Sql.query
|
||||||
|
@ -181,21 +184,21 @@ type PostgresPostData (conn : NpgsqlConnection) =
|
||||||
|
|
||||||
/// Get a page of categorized posts for the given web log (excludes revisions)
|
/// Get a page of categorized posts for the given web log (excludes revisions)
|
||||||
let findPageOfCategorizedPosts webLogId categoryIds pageNbr postsPerPage =
|
let findPageOfCategorizedPosts webLogId categoryIds pageNbr postsPerPage =
|
||||||
let catSql, catParams = inClause "catId" CategoryId.toString categoryIds
|
let catSql, catParams = inClause "AND pc.category_id" "catId" CategoryId.toString categoryIds
|
||||||
Sql.existingConnection conn
|
Sql.existingConnection conn
|
||||||
|> Sql.query $"
|
|> Sql.query $"
|
||||||
{selectPost} p
|
{selectPost}
|
||||||
INNER JOIN post_category pc ON pc.post_id = p.id
|
INNER JOIN post_category pc ON pc.post_id = p.id
|
||||||
WHERE p.web_log_id = @webLogId
|
WHERE p.web_log_id = @webLogId
|
||||||
AND p.status = @status
|
AND p.status = @status
|
||||||
AND pc.category_id IN ({catSql})
|
{catSql}
|
||||||
ORDER BY published_on DESC
|
ORDER BY published_on DESC
|
||||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||||
|> Sql.parameters
|
|> Sql.parameters
|
||||||
[ webLogIdParam webLogId
|
[ webLogIdParam webLogId
|
||||||
"@status", Sql.string (PostStatus.toString Published)
|
"@status", Sql.string (PostStatus.toString Published)
|
||||||
yield! catParams ]
|
yield! catParams ]
|
||||||
|> Sql.executeAsync Map.toPost
|
|> Sql.executeAsync toPost
|
||||||
|
|
||||||
/// Get a page of posts for the given web log (excludes text and revisions)
|
/// Get a page of posts for the given web log (excludes text and revisions)
|
||||||
let findPageOfPosts webLogId pageNbr postsPerPage =
|
let findPageOfPosts webLogId pageNbr postsPerPage =
|
||||||
|
@ -218,7 +221,7 @@ type PostgresPostData (conn : NpgsqlConnection) =
|
||||||
ORDER BY published_on DESC
|
ORDER BY published_on DESC
|
||||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||||
|> Sql.parameters [ webLogIdParam webLogId; "@status", Sql.string (PostStatus.toString Published) ]
|
|> Sql.parameters [ webLogIdParam webLogId; "@status", Sql.string (PostStatus.toString Published) ]
|
||||||
|> Sql.executeAsync Map.toPost
|
|> Sql.executeAsync toPost
|
||||||
|
|
||||||
/// Get a page of tagged posts for the given web log (excludes revisions and prior permalinks)
|
/// Get a page of tagged posts for the given web log (excludes revisions and prior permalinks)
|
||||||
let findPageOfTaggedPosts webLogId (tag : string) pageNbr postsPerPage =
|
let findPageOfTaggedPosts webLogId (tag : string) pageNbr postsPerPage =
|
||||||
|
@ -227,7 +230,7 @@ type PostgresPostData (conn : NpgsqlConnection) =
|
||||||
{selectPost}
|
{selectPost}
|
||||||
WHERE web_log_id = @webLogId
|
WHERE web_log_id = @webLogId
|
||||||
AND status = @status
|
AND status = @status
|
||||||
AND tag && ARRAY[@tag]
|
AND tags && ARRAY[@tag]
|
||||||
ORDER BY published_on DESC
|
ORDER BY published_on DESC
|
||||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||||
|> Sql.parameters
|
|> Sql.parameters
|
||||||
|
@ -235,13 +238,13 @@ type PostgresPostData (conn : NpgsqlConnection) =
|
||||||
"@status", Sql.string (PostStatus.toString Published)
|
"@status", Sql.string (PostStatus.toString Published)
|
||||||
"@tag", Sql.string tag
|
"@tag", Sql.string tag
|
||||||
]
|
]
|
||||||
|> Sql.executeAsync Map.toPost
|
|> Sql.executeAsync toPost
|
||||||
|
|
||||||
/// Find the next newest and oldest post from a publish date for the given web log
|
/// Find the next newest and oldest post from a publish date for the given web log
|
||||||
let findSurroundingPosts webLogId (publishedOn : Instant) = backgroundTask {
|
let findSurroundingPosts webLogId (publishedOn : Instant) = backgroundTask {
|
||||||
let queryParams = Sql.parameters [
|
let queryParams () = Sql.parameters [
|
||||||
webLogIdParam webLogId
|
webLogIdParam webLogId
|
||||||
typedParam "@publishedOn" publishedOn
|
typedParam "publishedOn" publishedOn
|
||||||
"@status", Sql.string (PostStatus.toString Published)
|
"@status", Sql.string (PostStatus.toString Published)
|
||||||
]
|
]
|
||||||
let! older =
|
let! older =
|
||||||
|
@ -253,8 +256,8 @@ type PostgresPostData (conn : NpgsqlConnection) =
|
||||||
AND published_on < @publishedOn
|
AND published_on < @publishedOn
|
||||||
ORDER BY published_on DESC
|
ORDER BY published_on DESC
|
||||||
LIMIT 1"
|
LIMIT 1"
|
||||||
|> queryParams
|
|> queryParams ()
|
||||||
|> Sql.executeAsync Map.toPost
|
|> Sql.executeAsync toPost
|
||||||
let! newer =
|
let! newer =
|
||||||
Sql.existingConnection conn
|
Sql.existingConnection conn
|
||||||
|> Sql.query $"
|
|> Sql.query $"
|
||||||
|
@ -264,8 +267,8 @@ type PostgresPostData (conn : NpgsqlConnection) =
|
||||||
AND published_on > @publishedOn
|
AND published_on > @publishedOn
|
||||||
ORDER BY published_on
|
ORDER BY published_on
|
||||||
LIMIT 1"
|
LIMIT 1"
|
||||||
|> queryParams
|
|> queryParams ()
|
||||||
|> Sql.executeAsync Map.toPost
|
|> Sql.executeAsync toPost
|
||||||
return List.tryHead older, List.tryHead newer
|
return List.tryHead older, List.tryHead newer
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -289,14 +292,14 @@ type PostgresPostData (conn : NpgsqlConnection) =
|
||||||
"@permalink", Sql.string (Permalink.toString post.Permalink)
|
"@permalink", Sql.string (Permalink.toString post.Permalink)
|
||||||
"@template", Sql.stringOrNone post.Template
|
"@template", Sql.stringOrNone post.Template
|
||||||
"@text", Sql.string post.Text
|
"@text", Sql.string post.Text
|
||||||
"@episode", Sql.jsonbOrNone (post.Episode |> Option.map JsonConvert.SerializeObject)
|
|
||||||
"@priorPermalinks", Sql.stringArray (post.PriorPermalinks |> List.map Permalink.toString |> Array.ofList)
|
"@priorPermalinks", Sql.stringArray (post.PriorPermalinks |> List.map Permalink.toString |> Array.ofList)
|
||||||
|
"@episode", Sql.jsonbOrNone (post.Episode |> Option.map (Utils.serialize ser))
|
||||||
"@tags", Sql.stringArrayOrNone (if List.isEmpty post.Tags then None else Some (Array.ofList post.Tags))
|
"@tags", Sql.stringArrayOrNone (if List.isEmpty post.Tags then None else Some (Array.ofList post.Tags))
|
||||||
"@metaItems",
|
"@metaItems",
|
||||||
if List.isEmpty post.Metadata then None else Some (JsonConvert.SerializeObject post.Metadata)
|
if List.isEmpty post.Metadata then None else Some (Utils.serialize ser post.Metadata)
|
||||||
|> Sql.jsonbOrNone
|
|> Sql.jsonbOrNone
|
||||||
optParam "@publishedOn" post.PublishedOn
|
optParam "publishedOn" post.PublishedOn
|
||||||
typedParam "@updatedOn" post.UpdatedOn
|
typedParam "updatedOn" post.UpdatedOn
|
||||||
]
|
]
|
||||||
|
|
||||||
/// Save a post
|
/// Save a post
|
||||||
|
@ -314,7 +317,7 @@ type PostgresPostData (conn : NpgsqlConnection) =
|
||||||
published_on = EXCLUDED.published_on,
|
published_on = EXCLUDED.published_on,
|
||||||
updated_on = EXCLUDED.updated_on,
|
updated_on = EXCLUDED.updated_on,
|
||||||
template = EXCLUDED.template,
|
template = EXCLUDED.template,
|
||||||
post_text = EXCLUDED.text,
|
post_text = EXCLUDED.post_text,
|
||||||
tags = EXCLUDED.tags,
|
tags = EXCLUDED.tags,
|
||||||
meta_items = EXCLUDED.meta_items,
|
meta_items = EXCLUDED.meta_items,
|
||||||
episode = EXCLUDED.episode"
|
episode = EXCLUDED.episode"
|
||||||
|
|
|
@ -54,9 +54,9 @@ type PostgresTagMapData (conn : NpgsqlConnection) =
|
||||||
|
|
||||||
/// Find any tag mappings in a list of tags for the given web log
|
/// Find any tag mappings in a list of tags for the given web log
|
||||||
let findMappingForTags tags webLogId =
|
let findMappingForTags tags webLogId =
|
||||||
let tagSql, tagParams = inClause "tag" id tags
|
let tagSql, tagParams = inClause "AND tag" "tag" id tags
|
||||||
Sql.existingConnection conn
|
Sql.existingConnection conn
|
||||||
|> Sql.query $"SELECT * FROM tag_map WHERE web_log_id = @webLogId AND tag IN ({tagSql}"
|
|> Sql.query $"SELECT * FROM tag_map WHERE web_log_id = @webLogId {tagSql}"
|
||||||
|> Sql.parameters (webLogIdParam webLogId :: tagParams)
|
|> Sql.parameters (webLogIdParam webLogId :: tagParams)
|
||||||
|> Sql.executeAsync Map.toTagMap
|
|> Sql.executeAsync Map.toTagMap
|
||||||
|
|
||||||
|
|
|
@ -193,7 +193,7 @@ type PostgresThemeAssetData (conn : NpgsqlConnection) =
|
||||||
[ "@themeId", Sql.string themeId
|
[ "@themeId", Sql.string themeId
|
||||||
"@path", Sql.string path
|
"@path", Sql.string path
|
||||||
"@data", Sql.bytea asset.Data
|
"@data", Sql.bytea asset.Data
|
||||||
typedParam "@updatedOn" asset.UpdatedOn ]
|
typedParam "updatedOn" asset.UpdatedOn ]
|
||||||
|> Sql.executeNonQueryAsync
|
|> Sql.executeNonQueryAsync
|
||||||
()
|
()
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@ type PostgresUploadData (conn : NpgsqlConnection) =
|
||||||
/// Parameters for adding an uploaded file
|
/// Parameters for adding an uploaded file
|
||||||
let upParams (upload : Upload) = [
|
let upParams (upload : Upload) = [
|
||||||
webLogIdParam upload.WebLogId
|
webLogIdParam upload.WebLogId
|
||||||
typedParam "@updatedOn" upload.UpdatedOn
|
typedParam "updatedOn" upload.UpdatedOn
|
||||||
"@id", Sql.string (UploadId.toString upload.Id)
|
"@id", Sql.string (UploadId.toString upload.Id)
|
||||||
"@path", Sql.string (Permalink.toString upload.Path)
|
"@path", Sql.string (Permalink.toString upload.Path)
|
||||||
"@data", Sql.bytea upload.Data
|
"@data", Sql.bytea upload.Data
|
||||||
|
|
|
@ -2,11 +2,12 @@
|
||||||
|
|
||||||
open MyWebLog
|
open MyWebLog
|
||||||
open MyWebLog.Data
|
open MyWebLog.Data
|
||||||
|
open Newtonsoft.Json
|
||||||
open Npgsql
|
open Npgsql
|
||||||
open Npgsql.FSharp
|
open Npgsql.FSharp
|
||||||
|
|
||||||
/// PostgreSQL myWebLog web log data implementation
|
/// PostgreSQL myWebLog web log data implementation
|
||||||
type PostgresWebLogData (conn : NpgsqlConnection) =
|
type PostgresWebLogData (conn : NpgsqlConnection, ser : JsonSerializer) =
|
||||||
|
|
||||||
// SUPPORT FUNCTIONS
|
// SUPPORT FUNCTIONS
|
||||||
|
|
||||||
|
@ -36,15 +37,16 @@ type PostgresWebLogData (conn : NpgsqlConnection) =
|
||||||
yield! rssParams webLog
|
yield! rssParams webLog
|
||||||
]
|
]
|
||||||
|
|
||||||
/// The SELECT statement for custom feeds, which includes podcast feed settings if present
|
/// Shorthand to map a result to a custom feed
|
||||||
let feedSelect = "SELECT f.*, p.* FROM web_log_feed f LEFT JOIN web_log_feed_podcast p ON p.feed_id = f.id"
|
let toCustomFeed =
|
||||||
|
Map.toCustomFeed ser
|
||||||
|
|
||||||
/// Get the current custom feeds for a web log
|
/// Get the current custom feeds for a web log
|
||||||
let getCustomFeeds (webLog : WebLog) =
|
let getCustomFeeds (webLog : WebLog) =
|
||||||
Sql.existingConnection conn
|
Sql.existingConnection conn
|
||||||
|> Sql.query $"{feedSelect} WHERE f.web_log_id = @webLogId"
|
|> Sql.query "SELECT * FROM web_log_feed WHERE web_log_id = @webLogId"
|
||||||
|> Sql.parameters [ webLogIdParam webLog.Id ]
|
|> Sql.parameters [ webLogIdParam webLog.Id ]
|
||||||
|> Sql.executeAsync Map.toCustomFeed
|
|> Sql.executeAsync toCustomFeed
|
||||||
|
|
||||||
/// Append custom feeds to a web log
|
/// Append custom feeds to a web log
|
||||||
let appendCustomFeeds (webLog : WebLog) = backgroundTask {
|
let appendCustomFeeds (webLog : WebLog) = backgroundTask {
|
||||||
|
@ -52,33 +54,13 @@ type PostgresWebLogData (conn : NpgsqlConnection) =
|
||||||
return { webLog with Rss = { webLog.Rss with CustomFeeds = feeds } }
|
return { webLog with Rss = { webLog.Rss with CustomFeeds = feeds } }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The parameters to save a podcast feed
|
|
||||||
let podcastParams feedId (podcast : PodcastOptions) = [
|
|
||||||
"@feedId", Sql.string (CustomFeedId.toString feedId)
|
|
||||||
"@title", Sql.string podcast.Title
|
|
||||||
"@subtitle", Sql.stringOrNone podcast.Subtitle
|
|
||||||
"@itemsInFeed", Sql.int podcast.ItemsInFeed
|
|
||||||
"@summary", Sql.string podcast.Summary
|
|
||||||
"@displayedAuthor", Sql.string podcast.DisplayedAuthor
|
|
||||||
"@email", Sql.string podcast.Email
|
|
||||||
"@imageUrl", Sql.string (Permalink.toString podcast.ImageUrl)
|
|
||||||
"@appleCategory", Sql.string podcast.AppleCategory
|
|
||||||
"@appleSubcategory", Sql.stringOrNone podcast.AppleSubcategory
|
|
||||||
"@explicit", Sql.string (ExplicitRating.toString podcast.Explicit)
|
|
||||||
"@defaultMediaType", Sql.stringOrNone podcast.DefaultMediaType
|
|
||||||
"@mediaBaseUrl", Sql.stringOrNone podcast.MediaBaseUrl
|
|
||||||
"@podcastGuid", Sql.uuidOrNone podcast.PodcastGuid
|
|
||||||
"@fundingUrl", Sql.stringOrNone podcast.FundingUrl
|
|
||||||
"@fundingText", Sql.stringOrNone podcast.FundingText
|
|
||||||
"@medium", Sql.stringOrNone (podcast.Medium |> Option.map PodcastMedium.toString)
|
|
||||||
]
|
|
||||||
|
|
||||||
/// The parameters to save a custom feed
|
/// The parameters to save a custom feed
|
||||||
let feedParams webLogId (feed : CustomFeed) = [
|
let feedParams webLogId (feed : CustomFeed) = [
|
||||||
webLogIdParam webLogId
|
webLogIdParam webLogId
|
||||||
"@id", Sql.string (CustomFeedId.toString feed.Id)
|
"@id", Sql.string (CustomFeedId.toString feed.Id)
|
||||||
"@source", Sql.string (CustomFeedSource.toString feed.Source)
|
"@source", Sql.string (CustomFeedSource.toString feed.Source)
|
||||||
"@path", Sql.string (Permalink.toString feed.Path)
|
"@path", Sql.string (Permalink.toString feed.Path)
|
||||||
|
"@podcast", Sql.jsonbOrNone (feed.Podcast |> Option.map (Utils.serialize ser))
|
||||||
]
|
]
|
||||||
|
|
||||||
/// Update the custom feeds for a web log
|
/// Update the custom feeds for a web log
|
||||||
|
@ -93,55 +75,18 @@ type PostgresWebLogData (conn : NpgsqlConnection) =
|
||||||
Sql.existingConnection conn
|
Sql.existingConnection conn
|
||||||
|> Sql.executeTransactionAsync [
|
|> Sql.executeTransactionAsync [
|
||||||
if not (List.isEmpty toDelete) then
|
if not (List.isEmpty toDelete) then
|
||||||
"DELETE FROM web_log_feed_podcast WHERE feed_id = @id;
|
"DELETE FROM web_log_feed WHERE id = @id",
|
||||||
DELETE FROM web_log_feed WHERE id = @id",
|
|
||||||
toDelete |> List.map (fun it -> [ "@id", Sql.string (CustomFeedId.toString it.Id) ])
|
toDelete |> List.map (fun it -> [ "@id", Sql.string (CustomFeedId.toString it.Id) ])
|
||||||
if not (List.isEmpty toAddOrUpdate) then
|
if not (List.isEmpty toAddOrUpdate) then
|
||||||
"INSERT INTO web_log_feed (
|
"INSERT INTO web_log_feed (
|
||||||
id, web_log_id, source, path
|
id, web_log_id, source, path, podcast
|
||||||
) VALUES (
|
) VALUES (
|
||||||
@id, @webLogId, @source, @path
|
@id, @webLogId, @source, @path, @podcast
|
||||||
) ON CONFLICT (id) DO UPDATE
|
) ON CONFLICT (id) DO UPDATE
|
||||||
SET source = EXCLUDED.source,
|
SET source = EXCLUDED.source,
|
||||||
path = EXCLUDED.path",
|
path = EXCLUDED.path,
|
||||||
|
podcast = EXCLUDED.podcast",
|
||||||
toAddOrUpdate |> List.map (feedParams webLog.Id)
|
toAddOrUpdate |> List.map (feedParams webLog.Id)
|
||||||
let podcasts = toAddOrUpdate |> List.filter (fun it -> Option.isSome it.Podcast)
|
|
||||||
if not (List.isEmpty podcasts) then
|
|
||||||
"INSERT INTO web_log_feed_podcast (
|
|
||||||
feed_id, title, subtitle, items_in_feed, summary, displayed_author, email, image_url,
|
|
||||||
apple_category, apple_subcategory, explicit, default_media_type, media_base_url,
|
|
||||||
podcast_guid, funding_url, funding_text, medium
|
|
||||||
) VALUES (
|
|
||||||
@feedId, @title, @subtitle, @itemsInFeed, @summary, @displayedAuthor, @email, @imageUrl,
|
|
||||||
@appleCategory, @appleSubcategory, @explicit, @defaultMediaType, @mediaBaseUrl,
|
|
||||||
@podcastGuid, @fundingUrl, @fundingText, @medium
|
|
||||||
) ON CONFLICT (feed_id) DO UPDATE
|
|
||||||
SET title = EXCLUDED.title,
|
|
||||||
subtitle = EXCLUDED.subtitle,
|
|
||||||
items_in_feed = EXCLUDED.items_in_feed,
|
|
||||||
summary = EXCLUDED.summary,
|
|
||||||
displayed_author = EXCLUDED.displayed_author,
|
|
||||||
email = EXCLUDED.email,
|
|
||||||
image_url = EXCLUDED.image_url,
|
|
||||||
apple_category = EXCLUDED.apple_category,
|
|
||||||
apple_subcategory = EXCLUDED.apple_subcategory,
|
|
||||||
explicit = EXCLUDED.explicit,
|
|
||||||
default_media_type = EXCLUDED.default_media_type,
|
|
||||||
media_base_url = EXCLUDED.media_base_url,
|
|
||||||
podcast_guid = EXCLUDED.podcast_guid,
|
|
||||||
funding_url = EXCLUDED.funding_url,
|
|
||||||
funding_text = EXCLUDED.funding_text,
|
|
||||||
medium = EXCLUDED.medium",
|
|
||||||
podcasts |> List.map (fun it -> podcastParams it.Id it.Podcast.Value)
|
|
||||||
let hadPodcasts =
|
|
||||||
toAddOrUpdate
|
|
||||||
|> List.filter (fun it ->
|
|
||||||
match feeds |> List.tryFind (fun feed -> feed.Id = it.Id) with
|
|
||||||
| Some feed -> Option.isSome feed.Podcast && Option.isNone it.Podcast
|
|
||||||
| None -> false)
|
|
||||||
if not (List.isEmpty hadPodcasts) then
|
|
||||||
"DELETE FROM web_log_feed_podcast WHERE feed_id = @id",
|
|
||||||
hadPodcasts |> List.map (fun it -> [ "@id", Sql.string (CustomFeedId.toString it.Id) ])
|
|
||||||
]
|
]
|
||||||
()
|
()
|
||||||
}
|
}
|
||||||
|
@ -173,8 +118,8 @@ type PostgresWebLogData (conn : NpgsqlConnection) =
|
||||||
|> Sql.executeAsync Map.toWebLog
|
|> Sql.executeAsync Map.toWebLog
|
||||||
let! feeds =
|
let! feeds =
|
||||||
Sql.existingConnection conn
|
Sql.existingConnection conn
|
||||||
|> Sql.query feedSelect
|
|> Sql.query "SELECT * FROM web_log_feed"
|
||||||
|> Sql.executeAsync (fun row -> WebLogId (row.string "web_log_id"), Map.toCustomFeed row)
|
|> Sql.executeAsync (fun row -> WebLogId (row.string "web_log_id"), toCustomFeed row)
|
||||||
return
|
return
|
||||||
webLogs
|
webLogs
|
||||||
|> List.map (fun it ->
|
|> List.map (fun it ->
|
||||||
|
@ -191,20 +136,19 @@ type PostgresWebLogData (conn : NpgsqlConnection) =
|
||||||
let pageSubQuery = subQuery "page"
|
let pageSubQuery = subQuery "page"
|
||||||
let! _ =
|
let! _ =
|
||||||
Sql.existingConnection conn
|
Sql.existingConnection conn
|
||||||
|> Sql.query $"""
|
|> Sql.query $"
|
||||||
DELETE FROM post_comment WHERE post_id IN {postSubQuery};
|
DELETE FROM post_comment WHERE post_id IN {postSubQuery};
|
||||||
DELETE FROM post_revision WHERE post_id IN {postSubQuery};
|
DELETE FROM post_revision WHERE post_id IN {postSubQuery};
|
||||||
DELETE FROM post_category WHERE post_id IN {postSubQuery};
|
DELETE FROM post_category WHERE post_id IN {postSubQuery};
|
||||||
DELETE FROM post WHERE web_log_id = @webLogId;
|
DELETE FROM post WHERE web_log_id = @webLogId;
|
||||||
DELETE FROM page_revision WHERE page_id IN {pageSubQuery};
|
DELETE FROM page_revision WHERE page_id IN {pageSubQuery};
|
||||||
DELETE FROM page WHERE web_log_id = @webLogId;
|
DELETE FROM page WHERE web_log_id = @webLogId;
|
||||||
DELETE FROM category WHERE web_log_id = @webLogId;
|
DELETE FROM category WHERE web_log_id = @webLogId;
|
||||||
DELETE FROM tag_map WHERE web_log_id = @webLogId;
|
DELETE FROM tag_map WHERE web_log_id = @webLogId;
|
||||||
DELETE FROM upload WHERE web_log_id = @webLogId;
|
DELETE FROM upload WHERE web_log_id = @webLogId;
|
||||||
DELETE FROM web_log_user WHERE web_log_id = @webLogId;
|
DELETE FROM web_log_user WHERE web_log_id = @webLogId;
|
||||||
DELETE FROM web_log_feed_podcast WHERE feed_id IN {subQuery "web_log_feed"};
|
DELETE FROM web_log_feed WHERE web_log_id = @webLogId;
|
||||||
DELETE FROM web_log_feed WHERE web_log_id = @webLogId;
|
DELETE FROM web_log WHERE id = @webLogId"
|
||||||
DELETE FROM web_log WHERE id = @webLogId"""
|
|
||||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||||
|> Sql.executeNonQueryAsync
|
|> Sql.executeNonQueryAsync
|
||||||
()
|
()
|
||||||
|
|
|
@ -30,8 +30,8 @@ type PostgresWebLogUserData (conn : NpgsqlConnection) =
|
||||||
"@salt", Sql.uuid user.Salt
|
"@salt", Sql.uuid user.Salt
|
||||||
"@url", Sql.stringOrNone user.Url
|
"@url", Sql.stringOrNone user.Url
|
||||||
"@accessLevel", Sql.string (AccessLevel.toString user.AccessLevel)
|
"@accessLevel", Sql.string (AccessLevel.toString user.AccessLevel)
|
||||||
typedParam "@createdOn" user.CreatedOn
|
typedParam "createdOn" user.CreatedOn
|
||||||
optParam "@lastSeenOn" user.LastSeenOn
|
optParam "lastSeenOn" user.LastSeenOn
|
||||||
]
|
]
|
||||||
|
|
||||||
/// Find a user by their ID for the given web log
|
/// Find a user by their ID for the given web log
|
||||||
|
@ -83,10 +83,10 @@ type PostgresWebLogUserData (conn : NpgsqlConnection) =
|
||||||
|
|
||||||
/// Find the names of users by their IDs for the given web log
|
/// Find the names of users by their IDs for the given web log
|
||||||
let findNames webLogId userIds = backgroundTask {
|
let findNames webLogId userIds = backgroundTask {
|
||||||
let idSql, idParams = inClause "id" WebLogUserId.toString userIds
|
let idSql, idParams = inClause "AND id" "id" WebLogUserId.toString userIds
|
||||||
let! users =
|
let! users =
|
||||||
Sql.existingConnection conn
|
Sql.existingConnection conn
|
||||||
|> Sql.query $"SELECT * FROM web_log_user WHERE web_log_id = @webLogId AND id IN ({idSql})"
|
|> Sql.query $"SELECT * FROM web_log_user WHERE web_log_id = @webLogId {idSql}"
|
||||||
|> Sql.parameters (webLogIdParam webLogId :: idParams)
|
|> Sql.parameters (webLogIdParam webLogId :: idParams)
|
||||||
|> Sql.executeAsync Map.toWebLogUser
|
|> Sql.executeAsync Map.toWebLogUser
|
||||||
return
|
return
|
||||||
|
@ -111,7 +111,7 @@ type PostgresWebLogUserData (conn : NpgsqlConnection) =
|
||||||
|> Sql.query "UPDATE web_log_user SET last_seen_on = @lastSeenOn WHERE id = @id AND web_log_id = @webLogId"
|
|> Sql.query "UPDATE web_log_user SET last_seen_on = @lastSeenOn WHERE id = @id AND web_log_id = @webLogId"
|
||||||
|> Sql.parameters
|
|> Sql.parameters
|
||||||
[ webLogIdParam webLogId
|
[ webLogIdParam webLogId
|
||||||
typedParam "@lastSeenOn" (Utils.now ())
|
typedParam "lastSeenOn" (Noda.now ())
|
||||||
"@id", Sql.string (WebLogUserId.toString userId) ]
|
"@id", Sql.string (WebLogUserId.toString userId) ]
|
||||||
|> Sql.executeNonQueryAsync
|
|> Sql.executeNonQueryAsync
|
||||||
()
|
()
|
||||||
|
|
|
@ -2,24 +2,27 @@
|
||||||
|
|
||||||
open Microsoft.Extensions.Logging
|
open Microsoft.Extensions.Logging
|
||||||
open MyWebLog.Data.Postgres
|
open MyWebLog.Data.Postgres
|
||||||
|
open Newtonsoft.Json
|
||||||
open Npgsql
|
open Npgsql
|
||||||
open Npgsql.FSharp
|
open Npgsql.FSharp
|
||||||
|
|
||||||
/// Data implementation for PostgreSQL
|
/// Data implementation for PostgreSQL
|
||||||
type PostgresData (conn : NpgsqlConnection, log : ILogger<PostgresData>) =
|
type PostgresData (conn : NpgsqlConnection, log : ILogger<PostgresData>, ser : JsonSerializer) =
|
||||||
|
|
||||||
interface IData with
|
interface IData with
|
||||||
|
|
||||||
member _.Category = PostgresCategoryData conn
|
member _.Category = PostgresCategoryData conn
|
||||||
member _.Page = PostgresPageData conn
|
member _.Page = PostgresPageData (conn, ser)
|
||||||
member _.Post = PostgresPostData conn
|
member _.Post = PostgresPostData (conn, ser)
|
||||||
member _.TagMap = PostgresTagMapData conn
|
member _.TagMap = PostgresTagMapData conn
|
||||||
member _.Theme = PostgresThemeData conn
|
member _.Theme = PostgresThemeData conn
|
||||||
member _.ThemeAsset = PostgresThemeAssetData conn
|
member _.ThemeAsset = PostgresThemeAssetData conn
|
||||||
member _.Upload = PostgresUploadData conn
|
member _.Upload = PostgresUploadData conn
|
||||||
member _.WebLog = PostgresWebLogData conn
|
member _.WebLog = PostgresWebLogData (conn, ser)
|
||||||
member _.WebLogUser = PostgresWebLogUserData conn
|
member _.WebLogUser = PostgresWebLogUserData conn
|
||||||
|
|
||||||
|
member _.Serializer = ser
|
||||||
|
|
||||||
member _.StartUp () = backgroundTask {
|
member _.StartUp () = backgroundTask {
|
||||||
|
|
||||||
let _ = NpgsqlConnection.GlobalTypeMapper.UseNodaTime ()
|
let _ = NpgsqlConnection.GlobalTypeMapper.UseNodaTime ()
|
||||||
|
@ -77,27 +80,9 @@ type PostgresData (conn : NpgsqlConnection, log : ILogger<PostgresData>) =
|
||||||
id TEXT NOT NULL PRIMARY KEY,
|
id TEXT NOT NULL PRIMARY KEY,
|
||||||
web_log_id TEXT NOT NULL REFERENCES web_log (id),
|
web_log_id TEXT NOT NULL REFERENCES web_log (id),
|
||||||
source TEXT NOT NULL,
|
source TEXT NOT NULL,
|
||||||
path TEXT NOT NULL)"
|
path TEXT NOT NULL,
|
||||||
|
podcast JSONB)"
|
||||||
"CREATE INDEX web_log_feed_web_log_idx ON web_log_feed (web_log_id)"
|
"CREATE INDEX web_log_feed_web_log_idx ON web_log_feed (web_log_id)"
|
||||||
if needsTable "web_log_feed_podcast" then
|
|
||||||
"CREATE TABLE web_log_feed_podcast (
|
|
||||||
feed_id TEXT NOT NULL PRIMARY KEY REFERENCES web_log_feed (id),
|
|
||||||
title TEXT NOT NULL,
|
|
||||||
subtitle TEXT,
|
|
||||||
items_in_feed INTEGER NOT NULL,
|
|
||||||
summary TEXT NOT NULL,
|
|
||||||
displayed_author TEXT NOT NULL,
|
|
||||||
email TEXT NOT NULL,
|
|
||||||
image_url TEXT NOT NULL,
|
|
||||||
apple_category TEXT NOT NULL,
|
|
||||||
apple_subcategory TEXT,
|
|
||||||
explicit TEXT NOT NULL,
|
|
||||||
default_media_type TEXT,
|
|
||||||
media_base_url TEXT,
|
|
||||||
podcast_guid TEXT,
|
|
||||||
funding_url TEXT,
|
|
||||||
funding_text TEXT,
|
|
||||||
medium TEXT)"
|
|
||||||
|
|
||||||
// Category table
|
// Category table
|
||||||
if needsTable "category" then
|
if needsTable "category" then
|
||||||
|
@ -120,7 +105,7 @@ type PostgresData (conn : NpgsqlConnection, log : ILogger<PostgresData>) =
|
||||||
last_name TEXT NOT NULL,
|
last_name TEXT NOT NULL,
|
||||||
preferred_name TEXT NOT NULL,
|
preferred_name TEXT NOT NULL,
|
||||||
password_hash TEXT NOT NULL,
|
password_hash TEXT NOT NULL,
|
||||||
salt TEXT NOT NULL,
|
salt UUID NOT NULL,
|
||||||
url TEXT,
|
url TEXT,
|
||||||
access_level TEXT NOT NULL,
|
access_level TEXT NOT NULL,
|
||||||
created_on TIMESTAMPTZ NOT NULL,
|
created_on TIMESTAMPTZ NOT NULL,
|
||||||
|
|
|
@ -1079,7 +1079,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
|
||||||
do! rethink {
|
do! rethink {
|
||||||
withTable Table.WebLogUser
|
withTable Table.WebLogUser
|
||||||
get userId
|
get userId
|
||||||
update [ nameof WebLogUser.empty.LastSeenOn, Utils.now () :> obj ]
|
update [ nameof WebLogUser.empty.LastSeenOn, Noda.now () :> obj ]
|
||||||
write; withRetryOnce; ignoreResult conn
|
write; withRetryOnce; ignoreResult conn
|
||||||
}
|
}
|
||||||
| None -> ()
|
| None -> ()
|
||||||
|
@ -1102,6 +1102,9 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
member _.Serializer =
|
||||||
|
Net.Converter.Serializer
|
||||||
|
|
||||||
member _.StartUp () = backgroundTask {
|
member _.StartUp () = backgroundTask {
|
||||||
let! dbs = rethink<string list> { dbList; result; withRetryOnce conn }
|
let! dbs = rethink<string list> { dbList; result; withRetryOnce conn }
|
||||||
if not (dbs |> List.contains config.Database) then
|
if not (dbs |> List.contains config.Database) then
|
||||||
|
|
|
@ -122,7 +122,7 @@ type SQLiteWebLogUserData (conn : SqliteConnection) =
|
||||||
AND web_log_id = @webLogId"
|
AND web_log_id = @webLogId"
|
||||||
addWebLogId cmd webLogId
|
addWebLogId cmd webLogId
|
||||||
[ cmd.Parameters.AddWithValue ("@id", WebLogUserId.toString userId)
|
[ cmd.Parameters.AddWithValue ("@id", WebLogUserId.toString userId)
|
||||||
cmd.Parameters.AddWithValue ("@lastSeenOn", instantParam (Utils.now ()))
|
cmd.Parameters.AddWithValue ("@lastSeenOn", instantParam (Noda.now ()))
|
||||||
] |> ignore
|
] |> ignore
|
||||||
let! _ = cmd.ExecuteNonQueryAsync ()
|
let! _ = cmd.ExecuteNonQueryAsync ()
|
||||||
()
|
()
|
||||||
|
|
|
@ -3,9 +3,10 @@ namespace MyWebLog.Data
|
||||||
open Microsoft.Data.Sqlite
|
open Microsoft.Data.Sqlite
|
||||||
open Microsoft.Extensions.Logging
|
open Microsoft.Extensions.Logging
|
||||||
open MyWebLog.Data.SQLite
|
open MyWebLog.Data.SQLite
|
||||||
|
open Newtonsoft.Json
|
||||||
|
|
||||||
/// SQLite myWebLog data implementation
|
/// SQLite myWebLog data implementation
|
||||||
type SQLiteData (conn : SqliteConnection, log : ILogger<SQLiteData>) =
|
type SQLiteData (conn : SqliteConnection, log : ILogger<SQLiteData>, ser : JsonSerializer) =
|
||||||
|
|
||||||
/// The connection for this instance
|
/// The connection for this instance
|
||||||
member _.Conn = conn
|
member _.Conn = conn
|
||||||
|
@ -31,6 +32,8 @@ type SQLiteData (conn : SqliteConnection, log : ILogger<SQLiteData>) =
|
||||||
member _.WebLog = SQLiteWebLogData conn
|
member _.WebLog = SQLiteWebLogData conn
|
||||||
member _.WebLogUser = SQLiteWebLogUserData conn
|
member _.WebLogUser = SQLiteWebLogUserData conn
|
||||||
|
|
||||||
|
member _.Serializer = ser
|
||||||
|
|
||||||
member _.StartUp () = backgroundTask {
|
member _.StartUp () = backgroundTask {
|
||||||
|
|
||||||
use cmd = conn.CreateCommand ()
|
use cmd = conn.CreateCommand ()
|
||||||
|
|
|
@ -37,6 +37,13 @@ let diffPermalinks oldLinks newLinks =
|
||||||
let diffRevisions oldRevs newRevs =
|
let diffRevisions oldRevs newRevs =
|
||||||
diffLists oldRevs newRevs (fun (rev : Revision) -> $"{rev.AsOf.ToUnixTimeTicks ()}|{MarkupText.toString rev.Text}")
|
diffLists oldRevs newRevs (fun (rev : Revision) -> $"{rev.AsOf.ToUnixTimeTicks ()}|{MarkupText.toString rev.Text}")
|
||||||
|
|
||||||
/// Get the current instant
|
open MyWebLog.Converters
|
||||||
let now () =
|
open Newtonsoft.Json
|
||||||
NodaTime.SystemClock.Instance.GetCurrentInstant ()
|
|
||||||
|
/// Serialize an object to JSON
|
||||||
|
let serialize<'T> ser (item : 'T) =
|
||||||
|
JsonConvert.SerializeObject (item, Json.settings ser)
|
||||||
|
|
||||||
|
/// Deserialize a JSON string
|
||||||
|
let deserialize<'T> (ser : JsonSerializer) value =
|
||||||
|
JsonConvert.DeserializeObject<'T> (value, Json.settings ser)
|
||||||
|
|
|
@ -83,7 +83,7 @@ module Comment =
|
||||||
Email = ""
|
Email = ""
|
||||||
Url = None
|
Url = None
|
||||||
Status = Pending
|
Status = Pending
|
||||||
PostedOn = Instant.MinValue
|
PostedOn = Noda.epoch
|
||||||
Text = ""
|
Text = ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -141,8 +141,8 @@ module Page =
|
||||||
AuthorId = WebLogUserId.empty
|
AuthorId = WebLogUserId.empty
|
||||||
Title = ""
|
Title = ""
|
||||||
Permalink = Permalink.empty
|
Permalink = Permalink.empty
|
||||||
PublishedOn = Instant.MinValue
|
PublishedOn = Noda.epoch
|
||||||
UpdatedOn = Instant.MinValue
|
UpdatedOn = Noda.epoch
|
||||||
IsInPageList = false
|
IsInPageList = false
|
||||||
Template = None
|
Template = None
|
||||||
Text = ""
|
Text = ""
|
||||||
|
@ -216,7 +216,7 @@ module Post =
|
||||||
Title = ""
|
Title = ""
|
||||||
Permalink = Permalink.empty
|
Permalink = Permalink.empty
|
||||||
PublishedOn = None
|
PublishedOn = None
|
||||||
UpdatedOn = Instant.MinValue
|
UpdatedOn = Noda.epoch
|
||||||
Text = ""
|
Text = ""
|
||||||
Template = None
|
Template = None
|
||||||
CategoryIds = []
|
CategoryIds = []
|
||||||
|
@ -301,7 +301,7 @@ module ThemeAsset =
|
||||||
/// An empty theme asset
|
/// An empty theme asset
|
||||||
let empty =
|
let empty =
|
||||||
{ Id = ThemeAssetId (ThemeId "", "")
|
{ Id = ThemeAssetId (ThemeId "", "")
|
||||||
UpdatedOn = Instant.MinValue
|
UpdatedOn = Noda.epoch
|
||||||
Data = [||]
|
Data = [||]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -332,7 +332,7 @@ module Upload =
|
||||||
{ Id = UploadId.empty
|
{ Id = UploadId.empty
|
||||||
WebLogId = WebLogId.empty
|
WebLogId = WebLogId.empty
|
||||||
Path = Permalink.empty
|
Path = Permalink.empty
|
||||||
UpdatedOn = Instant.MinValue
|
UpdatedOn = Noda.epoch
|
||||||
Data = [||]
|
Data = [||]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -473,7 +473,7 @@ module WebLogUser =
|
||||||
Salt = Guid.Empty
|
Salt = Guid.Empty
|
||||||
Url = None
|
Url = None
|
||||||
AccessLevel = Author
|
AccessLevel = Author
|
||||||
CreatedOn = Instant.FromUnixTimeSeconds 0L
|
CreatedOn = Noda.epoch
|
||||||
LastSeenOn = None
|
LastSeenOn = None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -13,6 +13,19 @@ module private Helpers =
|
||||||
Convert.ToBase64String(Guid.NewGuid().ToByteArray ()).Replace('/', '_').Replace('+', '-').Substring (0, 22)
|
Convert.ToBase64String(Guid.NewGuid().ToByteArray ()).Replace('/', '_').Replace('+', '-').Substring (0, 22)
|
||||||
|
|
||||||
|
|
||||||
|
/// Functions to support NodaTime manipulation
|
||||||
|
module Noda =
|
||||||
|
|
||||||
|
/// The clock to use when getting "now" (will make mutable for testing)
|
||||||
|
let clock : IClock = SystemClock.Instance
|
||||||
|
|
||||||
|
/// The Unix epoch
|
||||||
|
let epoch = Instant.FromUnixTimeSeconds 0L
|
||||||
|
|
||||||
|
/// The current Instant, with fractional seconds truncated
|
||||||
|
let now () = Instant.FromUnixTimeSeconds (clock.GetCurrentInstant().ToUnixTimeSeconds ())
|
||||||
|
|
||||||
|
|
||||||
/// A user's access level
|
/// A user's access level
|
||||||
type AccessLevel =
|
type AccessLevel =
|
||||||
/// The user may create and publish posts and edit the ones they have created
|
/// The user may create and publish posts and edit the ones they have created
|
||||||
|
@ -291,7 +304,7 @@ module Revision =
|
||||||
|
|
||||||
/// An empty revision
|
/// An empty revision
|
||||||
let empty =
|
let empty =
|
||||||
{ AsOf = Instant.MinValue
|
{ AsOf = Noda.epoch
|
||||||
Text = Html ""
|
Text = Html ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,6 @@ module Extensions =
|
||||||
open Microsoft.AspNetCore.Antiforgery
|
open Microsoft.AspNetCore.Antiforgery
|
||||||
open Microsoft.Extensions.Configuration
|
open Microsoft.Extensions.Configuration
|
||||||
open Microsoft.Extensions.DependencyInjection
|
open Microsoft.Extensions.DependencyInjection
|
||||||
open NodaTime
|
|
||||||
|
|
||||||
/// Hold variable for the configured generator string
|
/// Hold variable for the configured generator string
|
||||||
let mutable private generatorString : string option = None
|
let mutable private generatorString : string option = None
|
||||||
|
@ -21,9 +20,6 @@ module Extensions =
|
||||||
/// The anti-CSRF service
|
/// The anti-CSRF service
|
||||||
member this.AntiForgery = this.RequestServices.GetRequiredService<IAntiforgery> ()
|
member this.AntiForgery = this.RequestServices.GetRequiredService<IAntiforgery> ()
|
||||||
|
|
||||||
/// The system clock
|
|
||||||
member this.Clock = this.RequestServices.GetRequiredService<IClock> ()
|
|
||||||
|
|
||||||
/// The cross-site request forgery token set for this request
|
/// The cross-site request forgery token set for this request
|
||||||
member this.CsrfTokenSet = this.AntiForgery.GetAndStoreTokens this
|
member this.CsrfTokenSet = this.AntiForgery.GetAndStoreTokens this
|
||||||
|
|
||||||
|
@ -60,7 +56,6 @@ module Extensions =
|
||||||
defaultArg (this.UserAccessLevel |> Option.map (AccessLevel.hasAccess level)) false
|
defaultArg (this.UserAccessLevel |> Option.map (AccessLevel.hasAccess level)) false
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
open System.Collections.Concurrent
|
open System.Collections.Concurrent
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|
|
@ -13,23 +13,22 @@ module Dashboard =
|
||||||
// GET /admin/dashboard
|
// GET /admin/dashboard
|
||||||
let user : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
let user : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||||
let getCount (f : WebLogId -> Task<int>) = f ctx.WebLog.Id
|
let getCount (f : WebLogId -> Task<int>) = f ctx.WebLog.Id
|
||||||
let data = ctx.Data
|
let data = ctx.Data
|
||||||
let posts = getCount (data.Post.CountByStatus Published)
|
let! posts = getCount (data.Post.CountByStatus Published)
|
||||||
let drafts = getCount (data.Post.CountByStatus Draft)
|
let! drafts = getCount (data.Post.CountByStatus Draft)
|
||||||
let pages = getCount data.Page.CountAll
|
let! pages = getCount data.Page.CountAll
|
||||||
let listed = getCount data.Page.CountListed
|
let! listed = getCount data.Page.CountListed
|
||||||
let cats = getCount data.Category.CountAll
|
let! cats = getCount data.Category.CountAll
|
||||||
let topCats = getCount data.Category.CountTopLevel
|
let! topCats = getCount data.Category.CountTopLevel
|
||||||
let! _ = Task.WhenAll (posts, drafts, pages, listed, cats, topCats)
|
|
||||||
return!
|
return!
|
||||||
hashForPage "Dashboard"
|
hashForPage "Dashboard"
|
||||||
|> addToHash ViewContext.Model {
|
|> addToHash ViewContext.Model {
|
||||||
Posts = posts.Result
|
Posts = posts
|
||||||
Drafts = drafts.Result
|
Drafts = drafts
|
||||||
Pages = pages.Result
|
Pages = pages
|
||||||
ListedPages = listed.Result
|
ListedPages = listed
|
||||||
Categories = cats.Result
|
Categories = cats
|
||||||
TopLevelCategories = topCats.Result
|
TopLevelCategories = topCats
|
||||||
}
|
}
|
||||||
|> adminView "dashboard" next ctx
|
|> adminView "dashboard" next ctx
|
||||||
}
|
}
|
||||||
|
|
|
@ -145,7 +145,7 @@ let restoreRevision (pgId, revDate) : HttpHandler = requireAccess Author >=> fun
|
||||||
| Some pg, Some rev when canEdit pg.AuthorId ctx ->
|
| Some pg, Some rev when canEdit pg.AuthorId ctx ->
|
||||||
do! ctx.Data.Page.Update
|
do! ctx.Data.Page.Update
|
||||||
{ pg with
|
{ pg with
|
||||||
Revisions = { rev with AsOf = ctx.Clock.GetCurrentInstant () }
|
Revisions = { rev with AsOf = Noda.now () }
|
||||||
:: (pg.Revisions |> List.filter (fun r -> r.AsOf <> rev.AsOf))
|
:: (pg.Revisions |> List.filter (fun r -> r.AsOf <> rev.AsOf))
|
||||||
}
|
}
|
||||||
do! addMessage ctx { UserMessage.success with Message = "Revision restored successfully" }
|
do! addMessage ctx { UserMessage.success with Message = "Revision restored successfully" }
|
||||||
|
@ -171,7 +171,7 @@ let deleteRevision (pgId, revDate) : HttpHandler = requireAccess Author >=> fun
|
||||||
let save : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
let save : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||||
let! model = ctx.BindFormAsync<EditPageModel> ()
|
let! model = ctx.BindFormAsync<EditPageModel> ()
|
||||||
let data = ctx.Data
|
let data = ctx.Data
|
||||||
let now = ctx.Clock.GetCurrentInstant ()
|
let now = Noda.now ()
|
||||||
let tryPage =
|
let tryPage =
|
||||||
if model.IsNew then
|
if model.IsNew then
|
||||||
{ Page.empty with
|
{ Page.empty with
|
||||||
|
|
|
@ -350,7 +350,7 @@ let restoreRevision (postId, revDate) : HttpHandler = requireAccess Author >=> f
|
||||||
| Some post, Some rev when canEdit post.AuthorId ctx ->
|
| Some post, Some rev when canEdit post.AuthorId ctx ->
|
||||||
do! ctx.Data.Post.Update
|
do! ctx.Data.Post.Update
|
||||||
{ post with
|
{ post with
|
||||||
Revisions = { rev with AsOf = ctx.Clock.GetCurrentInstant () }
|
Revisions = { rev with AsOf = Noda.now () }
|
||||||
:: (post.Revisions |> List.filter (fun r -> r.AsOf <> rev.AsOf))
|
:: (post.Revisions |> List.filter (fun r -> r.AsOf <> rev.AsOf))
|
||||||
}
|
}
|
||||||
do! addMessage ctx { UserMessage.success with Message = "Revision restored successfully" }
|
do! addMessage ctx { UserMessage.success with Message = "Revision restored successfully" }
|
||||||
|
@ -388,7 +388,7 @@ let save : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||||
| Some post when canEdit post.AuthorId ctx ->
|
| Some post when canEdit post.AuthorId ctx ->
|
||||||
let priorCats = post.CategoryIds
|
let priorCats = post.CategoryIds
|
||||||
let updatedPost =
|
let updatedPost =
|
||||||
model.UpdatePost post (ctx.Clock.GetCurrentInstant ())
|
model.UpdatePost post (Noda.now ())
|
||||||
|> function
|
|> function
|
||||||
| post ->
|
| post ->
|
||||||
if model.SetPublished then
|
if model.SetPublished then
|
||||||
|
|
|
@ -145,7 +145,7 @@ let save : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||||
let upload = Seq.head ctx.Request.Form.Files
|
let upload = Seq.head ctx.Request.Form.Files
|
||||||
let fileName = String.Concat (makeSlug (Path.GetFileNameWithoutExtension upload.FileName),
|
let fileName = String.Concat (makeSlug (Path.GetFileNameWithoutExtension upload.FileName),
|
||||||
Path.GetExtension(upload.FileName).ToLowerInvariant ())
|
Path.GetExtension(upload.FileName).ToLowerInvariant ())
|
||||||
let now = ctx.Clock.GetCurrentInstant ()
|
let now = Noda.now ()
|
||||||
let localNow = WebLog.localTime ctx.WebLog now
|
let localNow = WebLog.localTime ctx.WebLog now
|
||||||
let year = localNow.ToString "yyyy"
|
let year = localNow.ToString "yyyy"
|
||||||
let month = localNow.ToString "MM"
|
let month = localNow.ToString "MM"
|
||||||
|
|
|
@ -203,7 +203,7 @@ let save : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task {
|
||||||
{ WebLogUser.empty with
|
{ WebLogUser.empty with
|
||||||
Id = WebLogUserId.create ()
|
Id = WebLogUserId.create ()
|
||||||
WebLogId = ctx.WebLog.Id
|
WebLogId = ctx.WebLog.Id
|
||||||
CreatedOn = ctx.Clock.GetCurrentInstant ()
|
CreatedOn = Noda.now ()
|
||||||
} |> someTask
|
} |> someTask
|
||||||
else data.WebLogUser.FindById (WebLogUserId model.Id) ctx.WebLog.Id
|
else data.WebLogUser.FindById (WebLogUserId model.Id) ctx.WebLog.Id
|
||||||
match! tryUser with
|
match! tryUser with
|
||||||
|
|
|
@ -156,7 +156,6 @@ let loadTheme (args : string[]) (sp : IServiceProvider) = task {
|
||||||
/// Back up a web log's data
|
/// Back up a web log's data
|
||||||
module Backup =
|
module Backup =
|
||||||
|
|
||||||
open System.Threading.Tasks
|
|
||||||
open MyWebLog.Converters
|
open MyWebLog.Converters
|
||||||
open Newtonsoft.Json
|
open Newtonsoft.Json
|
||||||
|
|
||||||
|
@ -252,7 +251,7 @@ module Backup =
|
||||||
Uploads : EncodedUpload list
|
Uploads : EncodedUpload list
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a JSON serializer (uses RethinkDB data implementation's JSON converters)
|
/// Create a JSON serializer
|
||||||
let private getSerializer prettyOutput =
|
let private getSerializer prettyOutput =
|
||||||
let serializer = Json.configure (JsonSerializer.CreateDefault ())
|
let serializer = Json.configure (JsonSerializer.CreateDefault ())
|
||||||
if prettyOutput then serializer.Formatting <- Formatting.Indented
|
if prettyOutput then serializer.Formatting <- Formatting.Indented
|
||||||
|
@ -382,7 +381,8 @@ module Backup =
|
||||||
printfn ""
|
printfn ""
|
||||||
printfn "- Importing theme..."
|
printfn "- Importing theme..."
|
||||||
do! data.Theme.Save restore.Theme
|
do! data.Theme.Save restore.Theme
|
||||||
let! _ = restore.Assets |> List.map (EncodedAsset.toAsset >> data.ThemeAsset.Save) |> Task.WhenAll
|
restore.Assets
|
||||||
|
|> List.iter (EncodedAsset.toAsset >> data.ThemeAsset.Save >> Async.AwaitTask >> Async.RunSynchronously)
|
||||||
|
|
||||||
// Restore web log data
|
// Restore web log data
|
||||||
|
|
||||||
|
@ -393,19 +393,22 @@ module Backup =
|
||||||
do! data.WebLogUser.Restore restore.Users
|
do! data.WebLogUser.Restore restore.Users
|
||||||
|
|
||||||
printfn "- Restoring categories and tag mappings..."
|
printfn "- Restoring categories and tag mappings..."
|
||||||
do! data.TagMap.Restore restore.TagMappings
|
if not (List.isEmpty restore.TagMappings) then do! data.TagMap.Restore restore.TagMappings
|
||||||
do! data.Category.Restore restore.Categories
|
if not (List.isEmpty restore.Categories) then do! data.Category.Restore restore.Categories
|
||||||
|
|
||||||
printfn "- Restoring pages..."
|
printfn "- Restoring pages..."
|
||||||
do! data.Page.Restore restore.Pages
|
if not (List.isEmpty restore.Pages) then
|
||||||
|
printfn "here"
|
||||||
|
do! data.Page.Restore restore.Pages
|
||||||
|
|
||||||
printfn "- Restoring posts..."
|
printfn "- Restoring posts..."
|
||||||
do! data.Post.Restore restore.Posts
|
if not (List.isEmpty restore.Posts) then do! data.Post.Restore restore.Posts
|
||||||
|
|
||||||
// TODO: comments not yet implemented
|
// TODO: comments not yet implemented
|
||||||
|
|
||||||
printfn "- Restoring uploads..."
|
printfn "- Restoring uploads..."
|
||||||
do! data.Upload.Restore (restore.Uploads |> List.map EncodedUpload.toUpload)
|
if not (List.isEmpty restore.Uploads) then
|
||||||
|
do! data.Upload.Restore (restore.Uploads |> List.map EncodedUpload.toUpload)
|
||||||
|
|
||||||
displayStats "Restored for <>NAME<>:" restore.WebLog restore
|
displayStats "Restored for <>NAME<>:" restore.WebLog restore
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,28 +30,28 @@ open System
|
||||||
open Microsoft.Extensions.DependencyInjection
|
open Microsoft.Extensions.DependencyInjection
|
||||||
open MyWebLog.Data
|
open MyWebLog.Data
|
||||||
open Newtonsoft.Json
|
open Newtonsoft.Json
|
||||||
open NodaTime
|
|
||||||
open Npgsql
|
open Npgsql
|
||||||
|
|
||||||
/// Logic to obtain a data connection and implementation based on configured values
|
/// Logic to obtain a data connection and implementation based on configured values
|
||||||
module DataImplementation =
|
module DataImplementation =
|
||||||
|
|
||||||
open MyWebLog.Converters
|
open MyWebLog.Converters
|
||||||
|
// open Npgsql.Logging
|
||||||
open RethinkDb.Driver.FSharp
|
open RethinkDb.Driver.FSharp
|
||||||
open RethinkDb.Driver.Net
|
open RethinkDb.Driver.Net
|
||||||
|
|
||||||
/// Get the configured data implementation
|
/// Get the configured data implementation
|
||||||
let get (sp : IServiceProvider) : IData * JsonSerializer =
|
let get (sp : IServiceProvider) : IData =
|
||||||
let config = sp.GetRequiredService<IConfiguration> ()
|
let config = sp.GetRequiredService<IConfiguration> ()
|
||||||
let await it = (Async.AwaitTask >> Async.RunSynchronously) it
|
let await it = (Async.AwaitTask >> Async.RunSynchronously) it
|
||||||
let connStr name = config.GetConnectionString name
|
let connStr name = config.GetConnectionString name
|
||||||
let hasConnStr name = (connStr >> isNull >> not) name
|
let hasConnStr name = (connStr >> isNull >> not) name
|
||||||
let createSQLite connStr : IData * JsonSerializer =
|
let createSQLite connStr : IData =
|
||||||
let log = sp.GetRequiredService<ILogger<SQLiteData>> ()
|
let log = sp.GetRequiredService<ILogger<SQLiteData>> ()
|
||||||
let conn = new SqliteConnection (connStr)
|
let conn = new SqliteConnection (connStr)
|
||||||
log.LogInformation $"Using SQLite database {conn.DataSource}"
|
log.LogInformation $"Using SQLite database {conn.DataSource}"
|
||||||
await (SQLiteData.setUpConnection conn)
|
await (SQLiteData.setUpConnection conn)
|
||||||
SQLiteData (conn, log), Json.configure (JsonSerializer.CreateDefault ())
|
SQLiteData (conn, log, Json.configure (JsonSerializer.CreateDefault ()))
|
||||||
|
|
||||||
if hasConnStr "SQLite" then
|
if hasConnStr "SQLite" then
|
||||||
createSQLite (connStr "SQLite")
|
createSQLite (connStr "SQLite")
|
||||||
|
@ -60,12 +60,13 @@ module DataImplementation =
|
||||||
let _ = Json.configure Converter.Serializer
|
let _ = Json.configure Converter.Serializer
|
||||||
let rethinkCfg = DataConfig.FromUri (connStr "RethinkDB")
|
let rethinkCfg = DataConfig.FromUri (connStr "RethinkDB")
|
||||||
let conn = await (rethinkCfg.CreateConnectionAsync log)
|
let conn = await (rethinkCfg.CreateConnectionAsync log)
|
||||||
RethinkDbData (conn, rethinkCfg, log), Converter.Serializer
|
RethinkDbData (conn, rethinkCfg, log)
|
||||||
elif hasConnStr "PostgreSQL" then
|
elif hasConnStr "PostgreSQL" then
|
||||||
let log = sp.GetRequiredService<ILogger<PostgresData>> ()
|
let log = sp.GetRequiredService<ILogger<PostgresData>> ()
|
||||||
|
// NpgsqlLogManager.Provider <- ConsoleLoggingProvider NpgsqlLogLevel.Debug
|
||||||
let conn = new NpgsqlConnection (connStr "PostgreSQL")
|
let conn = new NpgsqlConnection (connStr "PostgreSQL")
|
||||||
log.LogInformation $"Using PostgreSQL database {conn.Host}:{conn.Port}/{conn.Database}"
|
log.LogInformation $"Using PostgreSQL database {conn.Host}:{conn.Port}/{conn.Database}"
|
||||||
PostgresData (conn, log), Json.configure (JsonSerializer.CreateDefault ())
|
PostgresData (conn, log, Json.configure (JsonSerializer.CreateDefault ()))
|
||||||
else
|
else
|
||||||
createSQLite "Data Source=./myweblog.db;Cache=Shared"
|
createSQLite "Data Source=./myweblog.db;Cache=Shared"
|
||||||
|
|
||||||
|
@ -118,9 +119,8 @@ let rec main args =
|
||||||
let _ = builder.Services.AddAntiforgery ()
|
let _ = builder.Services.AddAntiforgery ()
|
||||||
|
|
||||||
let sp = builder.Services.BuildServiceProvider ()
|
let sp = builder.Services.BuildServiceProvider ()
|
||||||
let data, serializer = DataImplementation.get sp
|
let data = DataImplementation.get sp
|
||||||
let _ = builder.Services.AddSingleton<JsonSerializer> serializer
|
let _ = builder.Services.AddSingleton<JsonSerializer> data.Serializer
|
||||||
let _ = builder.Services.AddSingleton<IClock> SystemClock.Instance
|
|
||||||
|
|
||||||
task {
|
task {
|
||||||
do! data.StartUp ()
|
do! data.StartUp ()
|
||||||
|
|
Loading…
Reference in New Issue
Block a user