Compare commits
27 Commits
main
...
version-th
Author | SHA1 | Date | |
---|---|---|---|
7374440621 | |||
60a22747ac | |||
9b295263f9 | |||
bc1d17d916 | |||
cba1bbfa28 | |||
8b190a6c23 | |||
e33966b3df | |||
3ad6b5a521 | |||
210dd41cee | |||
fa4e1d327a | |||
c19f92889e | |||
1f7d415868 | |||
ba5e27e011 | |||
161a61823f | |||
d1840f63e5 | |||
87fbb1a8c7 | |||
e8953d6072 | |||
dc30716b83 | |||
88841fd3f8 | |||
870f87cb17 | |||
0032d15c0a | |||
95be82cc84 | |||
d047035173 | |||
cc3e41ddc5 | |||
d4c0e4e26c | |||
fbc4e891bd | |||
cd450a05e5 |
10
build.fs
10
build.fs
@ -23,7 +23,7 @@ let version =
|
||||
let appVersion = generator.Replace("\"Generator\": \"", "")
|
||||
let appVersion = appVersion.Substring (0, appVersion.IndexOf "\"")
|
||||
appVersion.Split ' ' |> Array.last
|
||||
|
||||
|
||||
/// Zip a theme distributed with myWebLog
|
||||
let zipTheme (name : string) (_ : TargetParameter) =
|
||||
let path = $"src/{name}-theme"
|
||||
@ -33,9 +33,9 @@ let zipTheme (name : string) (_ : TargetParameter) =
|
||||
|> Zip.zipSpec $"{releasePath}/{name}-theme.zip"
|
||||
|
||||
/// Frameworks supported by this build
|
||||
let frameworks = [ "net6.0"; "net8.0" ]
|
||||
let frameworks = [ "net8.0"; "net9.0" ]
|
||||
|
||||
/// Publish the project for the given runtime ID
|
||||
/// Publish the project for the given runtime ID
|
||||
let publishFor rid (_ : TargetParameter) =
|
||||
frameworks
|
||||
|> List.iter (fun fwk ->
|
||||
@ -65,7 +65,7 @@ let packageFor rid (_ : TargetParameter) =
|
||||
Target.create "Clean" (fun _ ->
|
||||
!! "src/**/bin"
|
||||
++ "src/**/obj"
|
||||
|> Shell.cleanDirs
|
||||
|> Shell.cleanDirs
|
||||
Shell.cleanDir releasePath
|
||||
)
|
||||
|
||||
@ -87,7 +87,7 @@ Target.create "RepackageLinux" (fun _ ->
|
||||
frameworks
|
||||
|> List.iter (fun fwk ->
|
||||
let zipArchive = $"{releasePath}/myWebLog-{version}.{fwk}.linux-x64.zip"
|
||||
let sh command args =
|
||||
let sh command args =
|
||||
CreateProcess.fromRawCommand command args
|
||||
|> CreateProcess.redirectOutput
|
||||
|> Proc.run
|
||||
|
@ -1,9 +1,11 @@
|
||||
<Project>
|
||||
<PropertyGroup>
|
||||
<TargetFrameworks>net6.0;net8.0</TargetFrameworks>
|
||||
<TargetFrameworks>net8.0;net9.0</TargetFrameworks>
|
||||
<DebugType>embedded</DebugType>
|
||||
<AssemblyVersion>2.2.0.0</AssemblyVersion>
|
||||
<FileVersion>2.2.0.0</FileVersion>
|
||||
<Version>2.2.0</Version>
|
||||
<GenerateDocumentationFile>true</GenerateDocumentationFile>
|
||||
<AssemblyVersion>3.0.0.0</AssemblyVersion>
|
||||
<FileVersion>3.0.0.0</FileVersion>
|
||||
<Version>3.0.0</Version>
|
||||
<VersionSuffix>beta1</VersionSuffix>
|
||||
</PropertyGroup>
|
||||
</Project>
|
||||
|
@ -1,14 +1,15 @@
|
||||
/// Converters for discriminated union types
|
||||
/// <summary>Converters for discriminated union types</summary>
|
||||
module MyWebLog.Converters
|
||||
|
||||
open MyWebLog
|
||||
open System
|
||||
|
||||
/// JSON.NET converters for discriminated union types
|
||||
/// <summary>JSON.NET converters for discriminated union types</summary>
|
||||
module Json =
|
||||
|
||||
|
||||
open Newtonsoft.Json
|
||||
|
||||
|
||||
/// <summary>Converter for the <see cref="CategoryId" /> type</summary>
|
||||
type CategoryIdConverter() =
|
||||
inherit JsonConverter<CategoryId>()
|
||||
override _.WriteJson(writer: JsonWriter, value: CategoryId, _: JsonSerializer) =
|
||||
@ -16,6 +17,7 @@ module Json =
|
||||
override _.ReadJson(reader: JsonReader, _: Type, _: CategoryId, _: bool, _: JsonSerializer) =
|
||||
(string >> CategoryId) reader.Value
|
||||
|
||||
/// <summary>Converter for the <see cref="CommentId" /> type</summary>
|
||||
type CommentIdConverter() =
|
||||
inherit JsonConverter<CommentId>()
|
||||
override _.WriteJson(writer: JsonWriter, value: CommentId, _: JsonSerializer) =
|
||||
@ -23,6 +25,7 @@ module Json =
|
||||
override _.ReadJson(reader: JsonReader, _: Type, _: CommentId, _: bool, _: JsonSerializer) =
|
||||
(string >> CommentId) reader.Value
|
||||
|
||||
/// <summary>Converter for the <see cref="CommentStatus" /> type</summary>
|
||||
type CommentStatusConverter() =
|
||||
inherit JsonConverter<CommentStatus>()
|
||||
override _.WriteJson(writer: JsonWriter, value: CommentStatus, _: JsonSerializer) =
|
||||
@ -30,6 +33,7 @@ module Json =
|
||||
override _.ReadJson(reader: JsonReader, _: Type, _: CommentStatus, _: bool, _: JsonSerializer) =
|
||||
(string >> CommentStatus.Parse) reader.Value
|
||||
|
||||
/// <summary>Converter for the <see cref="CustomFeedId" /> type</summary>
|
||||
type CustomFeedIdConverter() =
|
||||
inherit JsonConverter<CustomFeedId>()
|
||||
override _.WriteJson(writer: JsonWriter, value: CustomFeedId, _: JsonSerializer) =
|
||||
@ -37,27 +41,39 @@ module Json =
|
||||
override _.ReadJson(reader: JsonReader, _: Type, _: CustomFeedId, _: bool, _: JsonSerializer) =
|
||||
(string >> CustomFeedId) reader.Value
|
||||
|
||||
/// <summary>Converter for the <see cref="CustomFeedSource" /> type</summary>
|
||||
type CustomFeedSourceConverter() =
|
||||
inherit JsonConverter<CustomFeedSource>()
|
||||
override _.WriteJson(writer: JsonWriter, value: CustomFeedSource, _: JsonSerializer) =
|
||||
writer.WriteValue(string value)
|
||||
override _.ReadJson(reader: JsonReader, _: Type, _: CustomFeedSource, _: bool, _: JsonSerializer) =
|
||||
(string >> CustomFeedSource.Parse) reader.Value
|
||||
|
||||
|
||||
/// <summary>Converter for the <see cref="ExplicitRating" /> type</summary>
|
||||
type ExplicitRatingConverter() =
|
||||
inherit JsonConverter<ExplicitRating>()
|
||||
override _.WriteJson(writer: JsonWriter, value: ExplicitRating, _: JsonSerializer) =
|
||||
writer.WriteValue(string value)
|
||||
override _.ReadJson(reader: JsonReader, _: Type, _: ExplicitRating, _: bool, _: JsonSerializer) =
|
||||
(string >> ExplicitRating.Parse) reader.Value
|
||||
|
||||
|
||||
/// <summary>Converter for the <see cref="MarkupText" /> type</summary>
|
||||
type MarkupTextConverter() =
|
||||
inherit JsonConverter<MarkupText>()
|
||||
override _.WriteJson(writer: JsonWriter, value: MarkupText, _: JsonSerializer) =
|
||||
writer.WriteValue(string value)
|
||||
override _.ReadJson(reader: JsonReader, _: Type, _: MarkupText, _: bool, _: JsonSerializer) =
|
||||
(string >> MarkupText.Parse) reader.Value
|
||||
|
||||
|
||||
/// <summary>Converter for the <see cref="OpenGraphType" /> type</summary>
|
||||
type OpenGraphTypeConverter() =
|
||||
inherit JsonConverter<OpenGraphType>()
|
||||
override _.WriteJson(writer: JsonWriter, value: OpenGraphType, _: JsonSerializer) =
|
||||
writer.WriteValue(string value)
|
||||
override _.ReadJson(reader: JsonReader, _: Type, _: OpenGraphType, _: bool, _: JsonSerializer) =
|
||||
(string >> OpenGraphType.Parse) reader.Value
|
||||
|
||||
/// <summary>Converter for the <see cref="Permalink" /> type</summary>
|
||||
type PermalinkConverter() =
|
||||
inherit JsonConverter<Permalink>()
|
||||
override _.WriteJson(writer: JsonWriter, value: Permalink, _: JsonSerializer) =
|
||||
@ -65,6 +81,7 @@ module Json =
|
||||
override _.ReadJson(reader: JsonReader, _: Type, _: Permalink, _: bool, _: JsonSerializer) =
|
||||
(string >> Permalink) reader.Value
|
||||
|
||||
/// <summary>Converter for the <see cref="PageId" /> type</summary>
|
||||
type PageIdConverter() =
|
||||
inherit JsonConverter<PageId>()
|
||||
override _.WriteJson(writer: JsonWriter, value: PageId, _: JsonSerializer) =
|
||||
@ -72,6 +89,7 @@ module Json =
|
||||
override _.ReadJson(reader: JsonReader, _: Type, _: PageId, _: bool, _: JsonSerializer) =
|
||||
(string >> PageId) reader.Value
|
||||
|
||||
/// <summary>Converter for the <see cref="PodcastMedium" /> type</summary>
|
||||
type PodcastMediumConverter() =
|
||||
inherit JsonConverter<PodcastMedium>()
|
||||
override _.WriteJson(writer: JsonWriter, value: PodcastMedium, _: JsonSerializer) =
|
||||
@ -79,6 +97,7 @@ module Json =
|
||||
override _.ReadJson(reader: JsonReader, _: Type, _: PodcastMedium, _: bool, _: JsonSerializer) =
|
||||
(string >> PodcastMedium.Parse) reader.Value
|
||||
|
||||
/// <summary>Converter for the <see cref="PostId" /> type</summary>
|
||||
type PostIdConverter() =
|
||||
inherit JsonConverter<PostId>()
|
||||
override _.WriteJson(writer: JsonWriter, value: PostId, _: JsonSerializer) =
|
||||
@ -86,6 +105,7 @@ module Json =
|
||||
override _.ReadJson(reader: JsonReader, _: Type, _: PostId, _: bool, _: JsonSerializer) =
|
||||
(string >> PostId) reader.Value
|
||||
|
||||
/// <summary>Converter for the <see cref="TagMapId" /> type</summary>
|
||||
type TagMapIdConverter() =
|
||||
inherit JsonConverter<TagMapId>()
|
||||
override _.WriteJson(writer: JsonWriter, value: TagMapId, _: JsonSerializer) =
|
||||
@ -93,6 +113,7 @@ module Json =
|
||||
override _.ReadJson(reader: JsonReader, _: Type, _: TagMapId, _: bool, _: JsonSerializer) =
|
||||
(string >> TagMapId) reader.Value
|
||||
|
||||
/// <summary>Converter for the <see cref="ThemeAssetId" /> type</summary>
|
||||
type ThemeAssetIdConverter() =
|
||||
inherit JsonConverter<ThemeAssetId>()
|
||||
override _.WriteJson(writer: JsonWriter, value: ThemeAssetId, _: JsonSerializer) =
|
||||
@ -100,20 +121,23 @@ module Json =
|
||||
override _.ReadJson(reader: JsonReader, _: Type, _: ThemeAssetId, _: bool, _: JsonSerializer) =
|
||||
(string >> ThemeAssetId.Parse) reader.Value
|
||||
|
||||
/// <summary>Converter for the <see cref="ThemeId" /> type</summary>
|
||||
type ThemeIdConverter() =
|
||||
inherit JsonConverter<ThemeId>()
|
||||
override _.WriteJson(writer: JsonWriter, value: ThemeId, _: JsonSerializer) =
|
||||
writer.WriteValue(string value)
|
||||
override _.ReadJson(reader: JsonReader, _: Type, _: ThemeId, _: bool, _: JsonSerializer) =
|
||||
(string >> ThemeId) reader.Value
|
||||
|
||||
|
||||
/// <summary>Converter for the <see cref="UploadId" /> type</summary>
|
||||
type UploadIdConverter() =
|
||||
inherit JsonConverter<UploadId>()
|
||||
override _.WriteJson(writer: JsonWriter, value: UploadId, _: JsonSerializer) =
|
||||
writer.WriteValue(string value)
|
||||
override _.ReadJson(reader: JsonReader, _: Type, _: UploadId, _: bool, _: JsonSerializer) =
|
||||
(string >> UploadId) reader.Value
|
||||
|
||||
|
||||
/// <summary>Converter for the <see cref="WebLogId" /> type</summary>
|
||||
type WebLogIdConverter() =
|
||||
inherit JsonConverter<WebLogId>()
|
||||
override _.WriteJson(writer: JsonWriter, value: WebLogId, _: JsonSerializer) =
|
||||
@ -121,6 +145,7 @@ module Json =
|
||||
override _.ReadJson(reader: JsonReader, _: Type, _: WebLogId, _: bool, _: JsonSerializer) =
|
||||
(string >> WebLogId) reader.Value
|
||||
|
||||
/// <summary>Converter for the <see cref="WebLogUserId" /> type</summary>
|
||||
type WebLogUserIdConverter() =
|
||||
inherit JsonConverter<WebLogUserId> ()
|
||||
override _.WriteJson(writer: JsonWriter, value: WebLogUserId, _: JsonSerializer) =
|
||||
@ -131,9 +156,9 @@ module Json =
|
||||
open Microsoft.FSharpLu.Json
|
||||
open NodaTime
|
||||
open NodaTime.Serialization.JsonNet
|
||||
|
||||
/// Configure a serializer to use these converters
|
||||
let configure (ser : JsonSerializer) =
|
||||
|
||||
/// <summary>Configure a serializer to use these converters (and other settings)</summary>
|
||||
let configure (ser: JsonSerializer) =
|
||||
// Our converters
|
||||
[ CategoryIdConverter() :> JsonConverter
|
||||
CommentIdConverter()
|
||||
@ -142,6 +167,7 @@ module Json =
|
||||
CustomFeedSourceConverter()
|
||||
ExplicitRatingConverter()
|
||||
MarkupTextConverter()
|
||||
OpenGraphTypeConverter()
|
||||
PermalinkConverter()
|
||||
PageIdConverter()
|
||||
PodcastMediumConverter()
|
||||
@ -160,12 +186,14 @@ module Json =
|
||||
ser.NullValueHandling <- NullValueHandling.Ignore
|
||||
ser.MissingMemberHandling <- MissingMemberHandling.Ignore
|
||||
ser
|
||||
|
||||
|
||||
/// Serializer settings extracted from a JsonSerializer (a property sure would be nice...)
|
||||
let mutable private serializerSettings : JsonSerializerSettings option = None
|
||||
|
||||
/// Extract settings from the serializer to be used in JsonConvert calls
|
||||
let settings (ser : JsonSerializer) =
|
||||
|
||||
/// <summary>Extract settings from the serializer to be used in <c>JsonConvert</c> calls</summary>
|
||||
/// <param name="ser">The serializer from which settings will be extracted if required</param>
|
||||
/// <returns>The serializer settings to use for <c>JsonConvert</c> calls</returns>
|
||||
let settings (ser: JsonSerializer) =
|
||||
if Option.isNone serializerSettings then
|
||||
serializerSettings <- JsonSerializerSettings (
|
||||
ConstructorHandling = ser.ConstructorHandling,
|
||||
|
@ -5,17 +5,17 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="BitBadger.Documents.Postgres" Version="3.1.0" />
|
||||
<PackageReference Include="BitBadger.Documents.Sqlite" Version="3.1.0" />
|
||||
<PackageReference Include="Microsoft.Data.Sqlite" Version="8.0.6" />
|
||||
<PackageReference Include="Microsoft.Extensions.Caching.Abstractions" Version="8.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="8.0.0" />
|
||||
<PackageReference Include="BitBadger.Documents.Postgres" Version="4.1.0" />
|
||||
<PackageReference Include="BitBadger.Documents.Sqlite" Version="4.1.0" />
|
||||
<PackageReference Include="Microsoft.Data.Sqlite" Version="9.0.6" />
|
||||
<PackageReference Include="Microsoft.Extensions.Caching.Abstractions" Version="9.0.6" />
|
||||
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="9.0.6" />
|
||||
<PackageReference Include="Microsoft.FSharpLu.Json" Version="0.11.7" />
|
||||
<PackageReference Include="NodaTime.Serialization.JsonNet" Version="3.1.0" />
|
||||
<PackageReference Include="Npgsql.NodaTime" Version="8.0.3" />
|
||||
<PackageReference Include="NodaTime.Serialization.JsonNet" Version="3.2.0" />
|
||||
<PackageReference Include="Npgsql.NodaTime" Version="9.0.3" />
|
||||
<PackageReference Include="RethinkDb.Driver" Version="2.3.150" />
|
||||
<PackageReference Include="RethinkDb.Driver.FSharp" Version="0.9.0-beta-07" />
|
||||
<PackageReference Update="FSharp.Core" Version="8.0.300" />
|
||||
<PackageReference Update="FSharp.Core" Version="9.0.300" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
@ -19,38 +19,41 @@ type PostgresCategoryData(log: ILogger) =
|
||||
let countTopLevel webLogId =
|
||||
log.LogTrace "Category.countTopLevel"
|
||||
Custom.scalar
|
||||
$"""{Query.Count.byContains Table.Category}
|
||||
AND {Query.whereByField (Field.NEX (nameof Category.Empty.ParentId)) ""}"""
|
||||
$"""{Query.byContains (Query.count Table.Category)}
|
||||
AND {Query.whereByFields Any [ Field.NotExists (nameof Category.Empty.ParentId) ]}"""
|
||||
[ webLogContains webLogId ]
|
||||
toCount
|
||||
|
||||
/// Find all categories for the given web log
|
||||
let findByWebLog webLogId =
|
||||
log.LogTrace "Category.findByWebLog"
|
||||
Find.byContains<Category> Table.Category (webLogDoc webLogId)
|
||||
|
||||
/// Retrieve all categories for the given web log in a DotLiquid-friendly format
|
||||
let findAllForView webLogId = backgroundTask {
|
||||
log.LogTrace "Category.findAllForView"
|
||||
let! cats =
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Category} ORDER BY LOWER(data ->> '{nameof Category.Empty.Name}')"
|
||||
[ webLogContains webLogId ]
|
||||
fromData<Category>
|
||||
let ordered = Utils.orderByHierarchy cats None None []
|
||||
let counts =
|
||||
let! cats = findByWebLog webLogId
|
||||
let ordered = Utils.orderByHierarchy (cats |> List.sortBy _.Name.ToLowerInvariant()) None None []
|
||||
let counts =
|
||||
ordered
|
||||
|> Seq.map (fun it ->
|
||||
// Parent category post counts include posts in subcategories
|
||||
let catIdSql, catIdParams =
|
||||
let catIdField =
|
||||
ordered
|
||||
|> Seq.filter (fun cat -> cat.ParentNames |> Array.contains it.Name)
|
||||
|> Seq.map _.Id
|
||||
|> Seq.append (Seq.singleton it.Id)
|
||||
|> List.ofSeq
|
||||
|> arrayContains (nameof Post.Empty.CategoryIds) id
|
||||
|> Field.InArray (nameof Post.Empty.CategoryIds) Table.Post
|
||||
let query =
|
||||
(Query.statementWhere
|
||||
(Query.count Table.Post)
|
||||
$"""{Query.whereDataContains "@criteria"} AND {Query.whereByFields All [ catIdField ]}""")
|
||||
.Replace("(*)", $"(DISTINCT data->>'{nameof Post.Empty.Id}')")
|
||||
let postCount =
|
||||
Custom.scalar
|
||||
$"""SELECT COUNT(DISTINCT data ->> '{nameof Post.Empty.Id}') AS it
|
||||
FROM {Table.Post}
|
||||
WHERE {Query.whereDataContains "@criteria"}
|
||||
AND {catIdSql}"""
|
||||
[ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |}; catIdParams ]
|
||||
query
|
||||
(addFieldParams
|
||||
[ catIdField ] [ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |} ])
|
||||
toCount
|
||||
|> Async.AwaitTask
|
||||
|> Async.RunSynchronously
|
||||
@ -71,11 +74,6 @@ type PostgresCategoryData(log: ILogger) =
|
||||
log.LogTrace "Category.findById"
|
||||
Document.findByIdAndWebLog<CategoryId, Category> Table.Category catId webLogId
|
||||
|
||||
/// Find all categories for the given web log
|
||||
let findByWebLog webLogId =
|
||||
log.LogTrace "Category.findByWebLog"
|
||||
Document.findByWebLog<Category> Table.Category webLogId
|
||||
|
||||
/// Delete a category
|
||||
let delete catId webLogId = backgroundTask {
|
||||
log.LogTrace "Category.delete"
|
||||
@ -87,14 +85,14 @@ type PostgresCategoryData(log: ILogger) =
|
||||
if hasChildren then
|
||||
let childQuery, childParams =
|
||||
if cat.ParentId.IsSome then
|
||||
Query.Patch.byId Table.Category,
|
||||
Query.byId (Query.patch Table.Category) "",
|
||||
children
|
||||
|> List.map (fun child -> [ idParam child.Id; jsonParam "@data" {| ParentId = cat.ParentId |} ])
|
||||
else
|
||||
Query.RemoveFields.byId Table.Category,
|
||||
Query.byId (Query.removeFields Table.Category) "",
|
||||
children
|
||||
|> List.map (fun child ->
|
||||
[ idParam child.Id; fieldNameParam [ nameof Category.Empty.ParentId ] ])
|
||||
[ idParam child.Id; fieldNameParams [ nameof Category.Empty.ParentId ] ])
|
||||
let! _ =
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
@ -103,7 +101,7 @@ type PostgresCategoryData(log: ILogger) =
|
||||
// Delete the category off all posts where it is assigned
|
||||
let! posts =
|
||||
Custom.list
|
||||
$"SELECT data FROM {Table.Post} WHERE data -> '{nameof Post.Empty.CategoryIds}' @> @id"
|
||||
$"SELECT data FROM {Table.Post} WHERE data->'{nameof Post.Empty.CategoryIds}' @> @id"
|
||||
[ jsonParam "@id" [| string catId |] ]
|
||||
fromData<Post>
|
||||
if not (List.isEmpty posts) then
|
||||
@ -111,7 +109,7 @@ type PostgresCategoryData(log: ILogger) =
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.executeTransactionAsync
|
||||
[ Query.Patch.byId Table.Post,
|
||||
[ Query.byId (Query.patch Table.Post) "",
|
||||
posts
|
||||
|> List.map (fun post ->
|
||||
[ idParam post.Id
|
||||
|
@ -83,28 +83,7 @@ let webLogContains webLogId =
|
||||
|
||||
/// A SQL string to select data from a table with the given JSON document contains criteria
|
||||
let selectWithCriteria tableName =
|
||||
$"""{Query.selectFromTable tableName} WHERE {Query.whereDataContains "@criteria"}"""
|
||||
|
||||
/// Create the SQL and parameters for an IN clause
|
||||
let inClause<'T> colNameAndPrefix paramName (items: 'T list) =
|
||||
if List.isEmpty items then "", []
|
||||
else
|
||||
let mutable idx = 0
|
||||
items
|
||||
|> List.skip 1
|
||||
|> List.fold (fun (itemS, itemP) it ->
|
||||
idx <- idx + 1
|
||||
$"{itemS}, @%s{paramName}{idx}", ($"@%s{paramName}{idx}", Sql.string (string it)) :: itemP)
|
||||
(Seq.ofList items
|
||||
|> Seq.map (fun it ->
|
||||
$"%s{colNameAndPrefix} IN (@%s{paramName}0", [ $"@%s{paramName}0", Sql.string (string it) ])
|
||||
|> Seq.head)
|
||||
|> function sql, ps -> $"{sql})", ps
|
||||
|
||||
/// Create the SQL and parameters for match-any array query
|
||||
let arrayContains<'T> name (valueFunc: 'T -> string) (items: 'T list) =
|
||||
$"data['{name}'] ?| @{name}Values",
|
||||
($"@{name}Values", Sql.stringArray (items |> List.map valueFunc |> Array.ofList))
|
||||
Query.byContains (Query.find tableName)
|
||||
|
||||
/// Get the first result of the given query
|
||||
let tryHead<'T> (query: Task<'T list>) = backgroundTask {
|
||||
@ -162,13 +141,9 @@ module Document =
|
||||
/// Find a document by its ID for the given web log
|
||||
let findByIdAndWebLog<'TKey, 'TDoc> table (key: 'TKey) webLogId =
|
||||
Custom.single
|
||||
$"""{Query.selectFromTable table} WHERE {Query.whereById "@id"} AND {Query.whereDataContains "@criteria"}"""
|
||||
$"""{Query.find table} WHERE {Query.whereById "@id"} AND {Query.whereDataContains "@criteria"}"""
|
||||
[ "@id", Sql.string (string key); webLogContains webLogId ]
|
||||
fromData<'TDoc>
|
||||
|
||||
/// Find documents for the given web log
|
||||
let findByWebLog<'TDoc> table webLogId : Task<'TDoc list> =
|
||||
Find.byContains table (webLogDoc webLogId)
|
||||
|
||||
|
||||
/// Functions to support revisions
|
||||
@ -186,7 +161,7 @@ module Revisions =
|
||||
Custom.list
|
||||
$"""SELECT pr.*
|
||||
FROM %s{revTable} pr
|
||||
INNER JOIN %s{entityTable} p ON p.data ->> '{nameof Post.Empty.Id}' = pr.{entityTable}_id
|
||||
INNER JOIN %s{entityTable} p ON p.data->>'{nameof Post.Empty.Id}' = pr.{entityTable}_id
|
||||
WHERE p.{Query.whereDataContains "@criteria"}
|
||||
ORDER BY as_of DESC"""
|
||||
[ webLogContains webLogId ]
|
||||
|
@ -33,6 +33,10 @@ type PostgresPageData(log: ILogger) =
|
||||
log.LogTrace "Page.pageExists"
|
||||
Document.existsByWebLog Table.Page pageId webLogId
|
||||
|
||||
/// The query to get all pages ordered by title
|
||||
let sortedPages =
|
||||
selectWithCriteria Table.Page + Query.orderBy [ Field.Named $"i:{nameof Page.Empty.Title}" ] PostgreSQL
|
||||
|
||||
// IMPLEMENTATION FUNCTIONS
|
||||
|
||||
/// Add a page
|
||||
@ -47,7 +51,7 @@ type PostgresPageData(log: ILogger) =
|
||||
let all webLogId =
|
||||
log.LogTrace "Page.all"
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Page} ORDER BY LOWER(data ->> '{nameof Page.Empty.Title}')"
|
||||
sortedPages
|
||||
[ webLogContains webLogId ]
|
||||
(fun row -> { fromData<Page> row with Text = ""; Metadata = []; PriorPermalinks = [] })
|
||||
|
||||
@ -86,8 +90,8 @@ type PostgresPageData(log: ILogger) =
|
||||
match! pageExists pageId webLogId with
|
||||
| true ->
|
||||
do! Custom.nonQuery
|
||||
$"""DELETE FROM {Table.PageRevision} WHERE page_id = @id;
|
||||
DELETE FROM {Table.Page} WHERE {Query.whereById "@id"}"""
|
||||
$"""{Query.delete Table.PageRevision} WHERE page_id = @id;
|
||||
{Query.delete Table.Page} WHERE {Query.whereById "@id"}"""
|
||||
[ idParam pageId ]
|
||||
return true
|
||||
| false -> return false
|
||||
@ -107,21 +111,19 @@ type PostgresPageData(log: ILogger) =
|
||||
log.LogTrace "Page.findCurrentPermalink"
|
||||
if List.isEmpty permalinks then return None
|
||||
else
|
||||
let linkSql, linkParam = arrayContains (nameof Page.Empty.PriorPermalinks) string permalinks
|
||||
return!
|
||||
Custom.single
|
||||
$"""SELECT data ->> '{nameof Page.Empty.Permalink}' AS permalink
|
||||
FROM page
|
||||
WHERE {Query.whereDataContains "@criteria"}
|
||||
AND {linkSql}"""
|
||||
[ webLogContains webLogId; linkParam ]
|
||||
Map.toPermalink
|
||||
let linkField = Field.InArray (nameof Page.Empty.PriorPermalinks) Table.Page (List.map string permalinks)
|
||||
let query =
|
||||
(Query.statementWhere
|
||||
(Query.find Table.Page)
|
||||
$"""{Query.whereDataContains "@criteria"} AND {Query.whereByFields All [ linkField ]}""")
|
||||
.Replace("SELECT data", $"SELECT data->>'{nameof Page.Empty.Permalink}' AS permalink")
|
||||
return! Custom.single query (addFieldParams [ linkField ] [ webLogContains webLogId ]) Map.toPermalink
|
||||
}
|
||||
|
||||
/// Get all complete pages for the given web log
|
||||
let findFullByWebLog webLogId = backgroundTask {
|
||||
log.LogTrace "Page.findFullByWebLog"
|
||||
let! pages = Document.findByWebLog<Page> Table.Page webLogId
|
||||
let! pages = Find.byContains<Page> Table.Page (webLogDoc webLogId)
|
||||
let! revisions = Revisions.findByWebLog Table.PageRevision Table.Page PageId webLogId
|
||||
return
|
||||
pages
|
||||
@ -133,17 +135,13 @@ type PostgresPageData(log: ILogger) =
|
||||
let findListed webLogId =
|
||||
log.LogTrace "Page.findListed"
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Page} ORDER BY LOWER(data ->> '{nameof Page.Empty.Title}')"
|
||||
[ jsonParam "@criteria" {| webLogDoc webLogId with IsInPageList = true |} ]
|
||||
pageWithoutText
|
||||
sortedPages [ jsonParam "@criteria" {| webLogDoc webLogId with IsInPageList = true |} ] pageWithoutText
|
||||
|
||||
/// Get a page of pages for the given web log (without revisions)
|
||||
let findPageOfPages webLogId pageNbr =
|
||||
log.LogTrace "Page.findPageOfPages"
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Page}
|
||||
ORDER BY LOWER(data->>'{nameof Page.Empty.Title}')
|
||||
LIMIT @pageSize OFFSET @toSkip"
|
||||
$"{sortedPages} LIMIT @pageSize OFFSET @toSkip"
|
||||
[ webLogContains webLogId; "@pageSize", Sql.int 26; "@toSkip", Sql.int ((pageNbr - 1) * 25) ]
|
||||
(fun row -> { fromData<Page> row with Metadata = []; PriorPermalinks = [] })
|
||||
|
||||
|
@ -84,9 +84,9 @@ type PostgresPostData(log: ILogger) =
|
||||
match! postExists postId webLogId with
|
||||
| true ->
|
||||
do! Custom.nonQuery
|
||||
$"""DELETE FROM {Table.PostComment} WHERE {Query.whereDataContains "@criteria"};
|
||||
DELETE FROM {Table.PostRevision} WHERE post_id = @id;
|
||||
DELETE FROM {Table.Post} WHERE {Query.whereById "@id"}"""
|
||||
$"""{Query.delete Table.PostComment} WHERE {Query.whereDataContains "@criteria"};
|
||||
{Query.delete Table.PostRevision} WHERE post_id = @id;
|
||||
{Query.delete Table.Post} WHERE {Query.whereById "@id"}"""
|
||||
[ idParam postId; jsonParam "@criteria" {| PostId = postId |} ]
|
||||
return true
|
||||
| false -> return false
|
||||
@ -97,21 +97,19 @@ type PostgresPostData(log: ILogger) =
|
||||
log.LogTrace "Post.findCurrentPermalink"
|
||||
if List.isEmpty permalinks then return None
|
||||
else
|
||||
let linkSql, linkParam = arrayContains (nameof Post.Empty.PriorPermalinks) string permalinks
|
||||
return!
|
||||
Custom.single
|
||||
$"""SELECT data ->> '{nameof Post.Empty.Permalink}' AS permalink
|
||||
FROM {Table.Post}
|
||||
WHERE {Query.whereDataContains "@criteria"}
|
||||
AND {linkSql}"""
|
||||
[ webLogContains webLogId; linkParam ]
|
||||
Map.toPermalink
|
||||
let linkField = Field.InArray (nameof Post.Empty.PriorPermalinks) Table.Post (List.map string permalinks)
|
||||
let query =
|
||||
(Query.statementWhere
|
||||
(Query.find Table.Post)
|
||||
$"""{Query.whereDataContains "@criteria"} AND {Query.whereByFields All [ linkField ]}""")
|
||||
.Replace("SELECT data", $"SELECT data->>'{nameof Post.Empty.Permalink}' AS permalink")
|
||||
return! Custom.single query (addFieldParams [ linkField ] [ webLogContains webLogId ]) Map.toPermalink
|
||||
}
|
||||
|
||||
/// Get all complete posts for the given web log
|
||||
let findFullByWebLog webLogId = backgroundTask {
|
||||
log.LogTrace "Post.findFullByWebLog"
|
||||
let! posts = Document.findByWebLog<Post> Table.Post webLogId
|
||||
let! posts = Find.byContains<Post> Table.Post (webLogDoc webLogId)
|
||||
let! revisions = Revisions.findByWebLog Table.PostRevision Table.Post PostId webLogId
|
||||
return
|
||||
posts
|
||||
@ -122,23 +120,26 @@ type PostgresPostData(log: ILogger) =
|
||||
/// Get a page of categorized posts for the given web log (excludes revisions)
|
||||
let findPageOfCategorizedPosts webLogId (categoryIds: CategoryId list) pageNbr postsPerPage =
|
||||
log.LogTrace "Post.findPageOfCategorizedPosts"
|
||||
let catSql, catParam = arrayContains (nameof Post.Empty.CategoryIds) string categoryIds
|
||||
let catIdField = Field.InArray (nameof Post.Empty.CategoryIds) Table.Post (List.map string categoryIds)
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Post}
|
||||
AND {catSql}
|
||||
ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' DESC
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
[ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |}; catParam ]
|
||||
$"""{selectWithCriteria Table.Post}
|
||||
AND {Query.whereByFields All [ catIdField ]}
|
||||
{Query.orderBy [ Field.Named $"{nameof Post.Empty.PublishedOn} DESC" ] PostgreSQL}
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"""
|
||||
(addFieldParams [ catIdField] [ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |} ])
|
||||
postWithoutLinks
|
||||
|
||||
/// Get a page of posts for the given web log (excludes text and revisions)
|
||||
let findPageOfPosts webLogId pageNbr postsPerPage =
|
||||
log.LogTrace "Post.findPageOfPosts"
|
||||
let order =
|
||||
Query.orderBy
|
||||
[ Field.Named $"{nameof Post.Empty.PublishedOn} DESC NULLS FIRST"
|
||||
Field.Named (nameof Post.Empty.UpdatedOn) ]
|
||||
PostgreSQL
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Post}
|
||||
ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' DESC NULLS FIRST,
|
||||
data ->> '{nameof Post.Empty.UpdatedOn}'
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
$"{selectWithCriteria Table.Post}{order}
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
[ webLogContains webLogId ]
|
||||
postWithoutText
|
||||
|
||||
@ -146,9 +147,9 @@ type PostgresPostData(log: ILogger) =
|
||||
let findPageOfPublishedPosts webLogId pageNbr postsPerPage =
|
||||
log.LogTrace "Post.findPageOfPublishedPosts"
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Post}
|
||||
ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' DESC
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
$"""{selectWithCriteria Table.Post}
|
||||
{Query.orderBy [ Field.Named $"{nameof Post.Empty.PublishedOn} DESC" ] PostgreSQL}
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"""
|
||||
[ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |} ]
|
||||
postWithoutLinks
|
||||
|
||||
@ -156,10 +157,10 @@ type PostgresPostData(log: ILogger) =
|
||||
let findPageOfTaggedPosts webLogId (tag: string) pageNbr postsPerPage =
|
||||
log.LogTrace "Post.findPageOfTaggedPosts"
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Post}
|
||||
AND data['{nameof Post.Empty.Tags}'] @> @tag
|
||||
ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' DESC
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
$"""{selectWithCriteria Table.Post}
|
||||
AND data['{nameof Post.Empty.Tags}'] @> @tag
|
||||
{Query.orderBy [ Field.Named $"{nameof Post.Empty.PublishedOn} DESC" ] PostgreSQL}
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"""
|
||||
[ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |}; jsonParam "@tag" [| tag |] ]
|
||||
postWithoutLinks
|
||||
|
||||
@ -170,10 +171,10 @@ type PostgresPostData(log: ILogger) =
|
||||
[ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |}
|
||||
"@publishedOn", Sql.timestamptz (publishedOn.ToDateTimeOffset()) ]
|
||||
let query op direction =
|
||||
$"{selectWithCriteria Table.Post}
|
||||
AND (data ->> '{nameof Post.Empty.PublishedOn}')::timestamp with time zone %s{op} @publishedOn
|
||||
ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' %s{direction}
|
||||
LIMIT 1"
|
||||
$"""{selectWithCriteria Table.Post}
|
||||
AND (data->>'{nameof Post.Empty.PublishedOn}')::timestamp with time zone %s{op} @publishedOn
|
||||
{Query.orderBy [ Field.Named $"{nameof Post.Empty.PublishedOn} %s{direction}" ] PostgreSQL}
|
||||
LIMIT 1"""
|
||||
let! older = Custom.list (query "<" "DESC") (queryParams ()) postWithoutLinks
|
||||
let! newer = Custom.list (query ">" "") (queryParams ()) postWithoutLinks
|
||||
return List.tryHead older, List.tryHead newer
|
||||
|
@ -33,18 +33,15 @@ type PostgresTagMapData(log: ILogger) =
|
||||
/// Get all tag mappings for the given web log
|
||||
let findByWebLog webLogId =
|
||||
log.LogTrace "TagMap.findByWebLog"
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.TagMap} ORDER BY data ->> 'tag'"
|
||||
[ webLogContains webLogId ]
|
||||
fromData<TagMap>
|
||||
Find.byContainsOrdered<TagMap> Table.TagMap (webLogDoc webLogId) [ Field.Named (nameof TagMap.Empty.Tag) ]
|
||||
|
||||
/// Find any tag mappings in a list of tags for the given web log
|
||||
let findMappingForTags tags webLogId =
|
||||
let findMappingForTags (tags: string list) webLogId =
|
||||
log.LogTrace "TagMap.findMappingForTags"
|
||||
let tagSql, tagParam = arrayContains (nameof TagMap.Empty.Tag) id tags
|
||||
let tagField = Field.InArray (nameof TagMap.Empty.Tag) Table.TagMap tags
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.TagMap} AND {tagSql}"
|
||||
[ webLogContains webLogId; tagParam ]
|
||||
$"{selectWithCriteria Table.TagMap} AND {Query.whereByFields All [ tagField ]}"
|
||||
(addFieldParams [ tagField ] [ webLogContains webLogId ])
|
||||
fromData<TagMap>
|
||||
|
||||
/// Save a tag mapping
|
||||
|
@ -17,11 +17,11 @@ type PostgresThemeData(log: ILogger) =
|
||||
/// Retrieve all themes (except 'admin'; excludes template text)
|
||||
let all () =
|
||||
log.LogTrace "Theme.all"
|
||||
let fields = [ Field.NotEqual (nameof Theme.Empty.Id) "admin" ]
|
||||
Custom.list
|
||||
$"{Query.selectFromTable Table.Theme}
|
||||
WHERE data ->> '{nameof Theme.Empty.Id}' <> 'admin'
|
||||
ORDER BY data ->> '{nameof Theme.Empty.Id}'"
|
||||
[]
|
||||
(Query.byFields (Query.find Table.Theme) Any fields
|
||||
+ Query.orderBy [ Field.Named (nameof Theme.Empty.Id) ] PostgreSQL)
|
||||
(addFieldParams fields [])
|
||||
withoutTemplateText
|
||||
|
||||
/// Does a given theme exist?
|
||||
@ -37,7 +37,7 @@ type PostgresThemeData(log: ILogger) =
|
||||
/// Find a theme by its ID (excludes the text of templates)
|
||||
let findByIdWithoutText (themeId: ThemeId) =
|
||||
log.LogTrace "Theme.findByIdWithoutText"
|
||||
Custom.single (Query.Find.byId Table.Theme) [ idParam themeId ] withoutTemplateText
|
||||
Custom.single (Query.byId (Query.find Table.Theme) (string themeId)) [ idParam themeId ] withoutTemplateText
|
||||
|
||||
/// Delete a theme by its ID
|
||||
let delete themeId = backgroundTask {
|
||||
@ -45,8 +45,8 @@ type PostgresThemeData(log: ILogger) =
|
||||
match! exists themeId with
|
||||
| true ->
|
||||
do! Custom.nonQuery
|
||||
$"""DELETE FROM {Table.ThemeAsset} WHERE theme_id = @id;
|
||||
DELETE FROM {Table.Theme} WHERE {Query.whereById "@id"}"""
|
||||
$"""{Query.delete Table.ThemeAsset} WHERE theme_id = @id;
|
||||
{Query.delete Table.Theme} WHERE {Query.whereById "@id"}"""
|
||||
[ idParam themeId ]
|
||||
return true
|
||||
| false -> return false
|
||||
@ -77,7 +77,7 @@ type PostgresThemeAssetData(log: ILogger) =
|
||||
/// Delete all assets for the given theme
|
||||
let deleteByTheme (themeId: ThemeId) =
|
||||
log.LogTrace "ThemeAsset.deleteByTheme"
|
||||
Custom.nonQuery $"DELETE FROM {Table.ThemeAsset} WHERE theme_id = @id" [ idParam themeId ]
|
||||
Custom.nonQuery $"{Query.delete Table.ThemeAsset} WHERE theme_id = @id" [ idParam themeId ]
|
||||
|
||||
/// Find a theme asset by its ID
|
||||
let findById assetId =
|
||||
|
@ -23,22 +23,22 @@ type PostgresWebLogData(log: ILogger) =
|
||||
let delete webLogId =
|
||||
log.LogTrace "WebLog.delete"
|
||||
Custom.nonQuery
|
||||
$"""DELETE FROM {Table.PostComment}
|
||||
WHERE data ->> '{nameof Comment.Empty.PostId}'
|
||||
IN (SELECT data ->> '{nameof Post.Empty.Id}'
|
||||
FROM {Table.Post}
|
||||
WHERE {Query.whereDataContains "@criteria"});
|
||||
DELETE FROM {Table.PostRevision}
|
||||
WHERE post_id IN (SELECT data ->> 'Id' FROM {Table.Post} WHERE {Query.whereDataContains "@criteria"});
|
||||
DELETE FROM {Table.PageRevision}
|
||||
WHERE page_id IN (SELECT data ->> 'Id' FROM {Table.Page} WHERE {Query.whereDataContains "@criteria"});
|
||||
{Query.Delete.byContains Table.Post};
|
||||
{Query.Delete.byContains Table.Page};
|
||||
{Query.Delete.byContains Table.Category};
|
||||
{Query.Delete.byContains Table.TagMap};
|
||||
{Query.Delete.byContains Table.WebLogUser};
|
||||
DELETE FROM {Table.Upload} WHERE web_log_id = @webLogId;
|
||||
DELETE FROM {Table.WebLog} WHERE {Query.whereById "@webLogId"}"""
|
||||
$"""{Query.delete Table.PostComment}
|
||||
WHERE data->>'{nameof Comment.Empty.PostId}'
|
||||
IN (SELECT data->>'{nameof Post.Empty.Id}'
|
||||
FROM {Table.Post}
|
||||
WHERE {Query.whereDataContains "@criteria"});
|
||||
{Query.delete Table.PostRevision}
|
||||
WHERE post_id IN (SELECT data->>'Id' FROM {Table.Post} WHERE {Query.whereDataContains "@criteria"});
|
||||
{Query.delete Table.PageRevision}
|
||||
WHERE page_id IN (SELECT data->>'Id' FROM {Table.Page} WHERE {Query.whereDataContains "@criteria"});
|
||||
{Query.byContains (Query.delete Table.Post)};
|
||||
{Query.byContains (Query.delete Table.Page)};
|
||||
{Query.byContains (Query.delete Table.Category)};
|
||||
{Query.byContains (Query.delete Table.TagMap)};
|
||||
{Query.byContains (Query.delete Table.WebLogUser)};
|
||||
{Query.delete Table.Upload} WHERE web_log_id = @webLogId;
|
||||
{Query.delete Table.WebLog} WHERE data->>'Id' = @webLogId"""
|
||||
[ webLogIdParam webLogId; webLogContains webLogId ]
|
||||
|
||||
/// Find a web log by its host (URL base)
|
||||
|
@ -49,19 +49,17 @@ type PostgresWebLogUserData(log: ILogger) =
|
||||
/// Get all users for the given web log
|
||||
let findByWebLog webLogId =
|
||||
log.LogTrace "WebLogUser.findByWebLog"
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.WebLogUser} ORDER BY LOWER(data ->> '{nameof WebLogUser.Empty.PreferredName}')"
|
||||
[ webLogContains webLogId ]
|
||||
fromData<WebLogUser>
|
||||
Find.byContainsOrdered<WebLogUser>
|
||||
Table.WebLogUser (webLogDoc webLogId) [ Field.Named $"i:{nameof WebLogUser.Empty.PreferredName}" ]
|
||||
|
||||
/// Find the names of users by their IDs for the given web log
|
||||
let findNames webLogId (userIds: WebLogUserId list) = backgroundTask {
|
||||
log.LogTrace "WebLogUser.findNames"
|
||||
let idSql, idParams = inClause $"AND data ->> '{nameof WebLogUser.Empty.Id}'" "id" userIds
|
||||
let idField = Field.In (nameof WebLogUser.Empty.Id) (List.map string userIds)
|
||||
let! users =
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.WebLogUser} {idSql}"
|
||||
(webLogContains webLogId :: idParams)
|
||||
$"{selectWithCriteria Table.WebLogUser} AND {Query.whereByFields All [ idField ]}"
|
||||
(addFieldParams [ idField ] [ webLogContains webLogId ])
|
||||
fromData<WebLogUser>
|
||||
return users |> List.map (fun u -> { Name = string u.Id; Value = u.DisplayName })
|
||||
}
|
||||
|
@ -10,22 +10,22 @@ open Npgsql.FSharp
|
||||
|
||||
/// Data implementation for PostgreSQL
|
||||
type PostgresData(log: ILogger<PostgresData>, ser: JsonSerializer) =
|
||||
|
||||
|
||||
/// Create any needed tables
|
||||
let ensureTables () = backgroundTask {
|
||||
// Set up the PostgreSQL document store
|
||||
Configuration.useSerializer (Utils.createDocumentSerializer ser)
|
||||
|
||||
|
||||
let! tables =
|
||||
Custom.list
|
||||
"SELECT tablename FROM pg_tables WHERE schemaname = 'public'" [] (fun row -> row.string "tablename")
|
||||
let needsTable table = not (List.contains table tables)
|
||||
|
||||
|
||||
let sql = seq {
|
||||
// Theme tables
|
||||
if needsTable Table.Theme then
|
||||
Query.Definition.ensureTable Table.Theme
|
||||
Query.Definition.ensureKey Table.Theme
|
||||
Query.Definition.ensureKey Table.Theme PostgreSQL
|
||||
if needsTable Table.ThemeAsset then
|
||||
$"CREATE TABLE {Table.ThemeAsset} (
|
||||
theme_id TEXT NOT NULL,
|
||||
@ -33,50 +33,51 @@ type PostgresData(log: ILogger<PostgresData>, ser: JsonSerializer) =
|
||||
updated_on TIMESTAMPTZ NOT NULL,
|
||||
data BYTEA NOT NULL,
|
||||
PRIMARY KEY (theme_id, path))"
|
||||
|
||||
|
||||
// Web log table
|
||||
if needsTable Table.WebLog then
|
||||
Query.Definition.ensureTable Table.WebLog
|
||||
Query.Definition.ensureKey Table.WebLog
|
||||
Query.Definition.ensureKey Table.WebLog PostgreSQL
|
||||
Query.Definition.ensureDocumentIndex Table.WebLog Optimized
|
||||
|
||||
|
||||
// Category table
|
||||
if needsTable Table.Category then
|
||||
Query.Definition.ensureTable Table.Category
|
||||
Query.Definition.ensureKey Table.Category
|
||||
Query.Definition.ensureKey Table.Category PostgreSQL
|
||||
Query.Definition.ensureDocumentIndex Table.Category Optimized
|
||||
|
||||
|
||||
// Web log user table
|
||||
if needsTable Table.WebLogUser then
|
||||
Query.Definition.ensureTable Table.WebLogUser
|
||||
Query.Definition.ensureKey Table.WebLogUser
|
||||
Query.Definition.ensureKey Table.WebLogUser PostgreSQL
|
||||
Query.Definition.ensureDocumentIndex Table.WebLogUser Optimized
|
||||
|
||||
|
||||
// Page tables
|
||||
if needsTable Table.Page then
|
||||
Query.Definition.ensureTable Table.Page
|
||||
Query.Definition.ensureKey Table.Page
|
||||
Query.Definition.ensureIndexOn Table.Page "author" [ nameof Page.Empty.AuthorId ]
|
||||
Query.Definition.ensureKey Table.Page PostgreSQL
|
||||
Query.Definition.ensureIndexOn Table.Page "author" [ nameof Page.Empty.AuthorId ] PostgreSQL
|
||||
Query.Definition.ensureIndexOn
|
||||
Table.Page "permalink" [ nameof Page.Empty.WebLogId; nameof Page.Empty.Permalink ]
|
||||
Table.Page "permalink" [ nameof Page.Empty.WebLogId; nameof Page.Empty.Permalink ] PostgreSQL
|
||||
if needsTable Table.PageRevision then
|
||||
$"CREATE TABLE {Table.PageRevision} (
|
||||
page_id TEXT NOT NULL,
|
||||
as_of TIMESTAMPTZ NOT NULL,
|
||||
revision_text TEXT NOT NULL,
|
||||
PRIMARY KEY (page_id, as_of))"
|
||||
|
||||
|
||||
// Post tables
|
||||
if needsTable Table.Post then
|
||||
Query.Definition.ensureTable Table.Post
|
||||
Query.Definition.ensureKey Table.Post
|
||||
Query.Definition.ensureIndexOn Table.Post "author" [ nameof Post.Empty.AuthorId ]
|
||||
Query.Definition.ensureKey Table.Post PostgreSQL
|
||||
Query.Definition.ensureIndexOn Table.Post "author" [ nameof Post.Empty.AuthorId ] PostgreSQL
|
||||
Query.Definition.ensureIndexOn
|
||||
Table.Post "permalink" [ nameof Post.Empty.WebLogId; nameof Post.Empty.Permalink ]
|
||||
Table.Post "permalink" [ nameof Post.Empty.WebLogId; nameof Post.Empty.Permalink ] PostgreSQL
|
||||
Query.Definition.ensureIndexOn
|
||||
Table.Post
|
||||
"status"
|
||||
[ nameof Post.Empty.WebLogId; nameof Post.Empty.Status; nameof Post.Empty.UpdatedOn ]
|
||||
PostgreSQL
|
||||
$"CREATE INDEX idx_post_category ON {Table.Post} USING GIN ((data['{nameof Post.Empty.CategoryIds}']))"
|
||||
$"CREATE INDEX idx_post_tag ON {Table.Post} USING GIN ((data['{nameof Post.Empty.Tags}']))"
|
||||
if needsTable Table.PostRevision then
|
||||
@ -87,15 +88,15 @@ type PostgresData(log: ILogger<PostgresData>, ser: JsonSerializer) =
|
||||
PRIMARY KEY (post_id, as_of))"
|
||||
if needsTable Table.PostComment then
|
||||
Query.Definition.ensureTable Table.PostComment
|
||||
Query.Definition.ensureKey Table.PostComment
|
||||
Query.Definition.ensureIndexOn Table.PostComment "post" [ nameof Comment.Empty.PostId ]
|
||||
|
||||
Query.Definition.ensureKey Table.PostComment PostgreSQL
|
||||
Query.Definition.ensureIndexOn Table.PostComment "post" [ nameof Comment.Empty.PostId ] PostgreSQL
|
||||
|
||||
// Tag map table
|
||||
if needsTable Table.TagMap then
|
||||
Query.Definition.ensureTable Table.TagMap
|
||||
Query.Definition.ensureKey Table.TagMap
|
||||
Query.Definition.ensureKey Table.TagMap PostgreSQL
|
||||
Query.Definition.ensureDocumentIndex Table.TagMap Optimized
|
||||
|
||||
|
||||
// Uploaded file table
|
||||
if needsTable Table.Upload then
|
||||
$"CREATE TABLE {Table.Upload} (
|
||||
@ -106,13 +107,13 @@ type PostgresData(log: ILogger<PostgresData>, ser: JsonSerializer) =
|
||||
data BYTEA NOT NULL)"
|
||||
$"CREATE INDEX idx_upload_web_log ON {Table.Upload} (web_log_id)"
|
||||
$"CREATE INDEX idx_upload_path ON {Table.Upload} (web_log_id, path)"
|
||||
|
||||
|
||||
// Database version table
|
||||
if needsTable Table.DbVersion then
|
||||
$"CREATE TABLE {Table.DbVersion} (id TEXT NOT NULL PRIMARY KEY)"
|
||||
$"INSERT INTO {Table.DbVersion} VALUES ('{Utils.Migration.currentDbVersion}')"
|
||||
}
|
||||
|
||||
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.executeTransactionAsync
|
||||
@ -127,13 +128,13 @@ type PostgresData(log: ILogger<PostgresData>, ser: JsonSerializer) =
|
||||
|> Async.RunSynchronously
|
||||
|> ignore
|
||||
}
|
||||
|
||||
|
||||
/// Set a specific database version
|
||||
let setDbVersion version = backgroundTask {
|
||||
do! Custom.nonQuery $"DELETE FROM db_version; INSERT INTO db_version VALUES ('%s{version}')" []
|
||||
return version
|
||||
}
|
||||
|
||||
|
||||
/// Migrate from v2-rc2 to v2 (manual migration required)
|
||||
let migrateV2Rc2ToV2 () = backgroundTask {
|
||||
let! webLogs =
|
||||
@ -151,10 +152,11 @@ type PostgresData(log: ILogger<PostgresData>, ser: JsonSerializer) =
|
||||
let tables =
|
||||
[ Table.Category; Table.Page; Table.Post; Table.PostComment; Table.TagMap; Table.Theme; Table.WebLog
|
||||
Table.WebLogUser ]
|
||||
|
||||
|
||||
Utils.Migration.logStep log migration "Adding unique indexes on ID fields"
|
||||
do! Custom.nonQuery (tables |> List.map Query.Definition.ensureKey |> String.concat "; ") []
|
||||
|
||||
do! Custom.nonQuery
|
||||
(tables |> List.map (fun it -> Query.Definition.ensureKey it PostgreSQL) |> String.concat "; ") []
|
||||
|
||||
Utils.Migration.logStep log migration "Removing constraints"
|
||||
let fkToDrop =
|
||||
[ "page_revision", "page_revision_page_id_fkey"
|
||||
@ -174,39 +176,40 @@ type PostgresData(log: ILogger<PostgresData>, ser: JsonSerializer) =
|
||||
|> List.map (fun (tbl, fk) -> $"ALTER TABLE {tbl} DROP CONSTRAINT {fk}")
|
||||
|> String.concat "; ")
|
||||
[]
|
||||
|
||||
|
||||
Utils.Migration.logStep log migration "Dropping old indexes"
|
||||
let toDrop =
|
||||
[ "idx_category"; "page_author_idx"; "page_permalink_idx"; "page_web_log_idx"; "post_author_idx"
|
||||
"post_category_idx"; "post_permalink_idx"; "post_status_idx"; "post_tag_idx"; "post_web_log_idx"
|
||||
"post_comment_post_idx"; "idx_tag_map"; "idx_web_log"; "idx_web_log_user" ]
|
||||
do! Custom.nonQuery (toDrop |> List.map (sprintf "DROP INDEX %s") |> String.concat "; ") []
|
||||
|
||||
|
||||
Utils.Migration.logStep log migration "Dropping old ID columns"
|
||||
do! Custom.nonQuery (tables |> List.map (sprintf "ALTER TABLE %s DROP COLUMN id") |> String.concat "; ") []
|
||||
|
||||
|
||||
Utils.Migration.logStep log migration "Adding new indexes"
|
||||
let newIdx =
|
||||
[ yield! tables |> List.map Query.Definition.ensureKey
|
||||
[ yield! tables |> List.map (fun it -> Query.Definition.ensureKey it PostgreSQL)
|
||||
Query.Definition.ensureDocumentIndex Table.Category Optimized
|
||||
Query.Definition.ensureDocumentIndex Table.TagMap Optimized
|
||||
Query.Definition.ensureDocumentIndex Table.WebLog Optimized
|
||||
Query.Definition.ensureDocumentIndex Table.WebLogUser Optimized
|
||||
Query.Definition.ensureIndexOn Table.Page "author" [ nameof Page.Empty.AuthorId ]
|
||||
Query.Definition.ensureIndexOn Table.Page "author" [ nameof Page.Empty.AuthorId ] PostgreSQL
|
||||
Query.Definition.ensureIndexOn
|
||||
Table.Page "permalink" [ nameof Page.Empty.WebLogId; nameof Page.Empty.Permalink ]
|
||||
Query.Definition.ensureIndexOn Table.Post "author" [ nameof Post.Empty.AuthorId ]
|
||||
Table.Page "permalink" [ nameof Page.Empty.WebLogId; nameof Page.Empty.Permalink ] PostgreSQL
|
||||
Query.Definition.ensureIndexOn Table.Post "author" [ nameof Post.Empty.AuthorId ] PostgreSQL
|
||||
Query.Definition.ensureIndexOn
|
||||
Table.Post "permalink" [ nameof Post.Empty.WebLogId; nameof Post.Empty.Permalink ]
|
||||
Table.Post "permalink" [ nameof Post.Empty.WebLogId; nameof Post.Empty.Permalink ] PostgreSQL
|
||||
Query.Definition.ensureIndexOn
|
||||
Table.Post
|
||||
"status"
|
||||
[ nameof Post.Empty.WebLogId; nameof Post.Empty.Status; nameof Post.Empty.UpdatedOn ]
|
||||
PostgreSQL
|
||||
$"CREATE INDEX idx_post_category ON {Table.Post} USING GIN ((data['{nameof Post.Empty.CategoryIds}']))"
|
||||
$"CREATE INDEX idx_post_tag ON {Table.Post} USING GIN ((data['{nameof Post.Empty.Tags}']))"
|
||||
Query.Definition.ensureIndexOn Table.PostComment "post" [ nameof Comment.Empty.PostId ] ]
|
||||
Query.Definition.ensureIndexOn Table.PostComment "post" [ nameof Comment.Empty.PostId ] PostgreSQL ]
|
||||
do! Custom.nonQuery (newIdx |> String.concat "; ") []
|
||||
|
||||
|
||||
Utils.Migration.logStep log migration "Setting database to version 2.1.1"
|
||||
return! setDbVersion "v2.1.1"
|
||||
}
|
||||
@ -221,33 +224,45 @@ type PostgresData(log: ILogger<PostgresData>, ser: JsonSerializer) =
|
||||
return! setDbVersion "v2.2"
|
||||
}
|
||||
|
||||
/// Migrate from v2.2 to v3
|
||||
let migrateV2point2ToV3 () = backgroundTask {
|
||||
Utils.Migration.logStep log "v2.2 to v3" "Adding auto-OpenGraph flag to all web logs"
|
||||
do! Patch.byFields Table.WebLog Any [ Field.Exists (nameof WebLog.Empty.Id) ] {| AutoOpenGraph = true |}
|
||||
Utils.Migration.logStep log "v2.2 to v3" "Setting database version to v3"
|
||||
return! setDbVersion "v3"
|
||||
}
|
||||
|
||||
/// Do required data migration between versions
|
||||
let migrate version = backgroundTask {
|
||||
let mutable v = defaultArg version ""
|
||||
|
||||
if v = "v2-rc2" then
|
||||
if v = "v2-rc2" then
|
||||
let! webLogs =
|
||||
Custom.list
|
||||
$"SELECT url_base, slug FROM {Table.WebLog}" []
|
||||
(fun row -> row.string "url_base", row.string "slug")
|
||||
Utils.Migration.backupAndRestoreRequired log "v2-rc2" "v2" webLogs
|
||||
|
||||
|
||||
if v = "v2" then
|
||||
let! ver = migrateV2ToV2point1point1 ()
|
||||
v <- ver
|
||||
|
||||
|
||||
if v = "v2.1.1" then
|
||||
let! ver = migrateV2point1point1ToV2point2 ()
|
||||
v <- ver
|
||||
|
||||
|
||||
if v = "v2.2" then
|
||||
let! ver = migrateV2point2ToV3 ()
|
||||
v <- ver
|
||||
|
||||
if v <> Utils.Migration.currentDbVersion then
|
||||
log.LogWarning $"Unknown database version; assuming {Utils.Migration.currentDbVersion}"
|
||||
let! _ = setDbVersion Utils.Migration.currentDbVersion
|
||||
()
|
||||
}
|
||||
|
||||
|
||||
interface IData with
|
||||
|
||||
|
||||
member _.Category = PostgresCategoryData log
|
||||
member _.Page = PostgresPageData log
|
||||
member _.Post = PostgresPostData log
|
||||
@ -257,13 +272,13 @@ type PostgresData(log: ILogger<PostgresData>, ser: JsonSerializer) =
|
||||
member _.Upload = PostgresUploadData log
|
||||
member _.WebLog = PostgresWebLogData log
|
||||
member _.WebLogUser = PostgresWebLogUserData log
|
||||
|
||||
|
||||
member _.Serializer = ser
|
||||
|
||||
|
||||
member _.StartUp () = backgroundTask {
|
||||
log.LogTrace "PostgresData.StartUp"
|
||||
do! ensureTables ()
|
||||
|
||||
|
||||
let! version = Custom.single "SELECT id FROM db_version" [] (fun row -> row.string "id")
|
||||
do! migrate version
|
||||
}
|
||||
|
@ -6,38 +6,38 @@ open RethinkDb.Driver
|
||||
|
||||
/// Functions to assist with retrieving data
|
||||
module private RethinkHelpers =
|
||||
|
||||
|
||||
/// Table names
|
||||
[<RequireQualifiedAccess>]
|
||||
module Table =
|
||||
|
||||
|
||||
/// The category table
|
||||
let Category = "Category"
|
||||
|
||||
/// The comment table
|
||||
let Comment = "Comment"
|
||||
|
||||
|
||||
/// The database version table
|
||||
let DbVersion = "DbVersion"
|
||||
|
||||
|
||||
/// The page table
|
||||
let Page = "Page"
|
||||
|
||||
|
||||
/// The post table
|
||||
let Post = "Post"
|
||||
|
||||
|
||||
/// The tag map table
|
||||
let TagMap = "TagMap"
|
||||
|
||||
|
||||
/// The theme table
|
||||
let Theme = "Theme"
|
||||
|
||||
|
||||
/// The theme asset table
|
||||
let ThemeAsset = "ThemeAsset"
|
||||
|
||||
|
||||
/// The uploaded file table
|
||||
let Upload = "Upload"
|
||||
|
||||
|
||||
/// The web log table
|
||||
let WebLog = "WebLog"
|
||||
|
||||
@ -47,24 +47,24 @@ module private RethinkHelpers =
|
||||
/// A list of all tables
|
||||
let all = [ Category; Comment; DbVersion; Page; Post; TagMap; Theme; ThemeAsset; Upload; WebLog; WebLogUser ]
|
||||
|
||||
|
||||
|
||||
/// Index names for indexes not on a data item's name
|
||||
[<RequireQualifiedAccess>]
|
||||
module Index =
|
||||
|
||||
|
||||
/// An index by web log ID and e-mail address
|
||||
let LogOn = "LogOn"
|
||||
|
||||
|
||||
/// An index by web log ID and uploaded file path
|
||||
let WebLogAndPath = "WebLogAndPath"
|
||||
|
||||
|
||||
/// An index by web log ID and mapped tag
|
||||
let WebLogAndTag = "WebLogAndTag"
|
||||
|
||||
|
||||
/// An index by web log ID and tag URL value
|
||||
let WebLogAndUrl = "WebLogAndUrl"
|
||||
|
||||
|
||||
|
||||
/// Shorthand for the ReQL starting point
|
||||
let r = RethinkDB.R
|
||||
|
||||
@ -73,14 +73,14 @@ module private RethinkHelpers =
|
||||
fun conn -> backgroundTask {
|
||||
match! f conn with Some it when (prop it) = webLogId -> return Some it | _ -> return None
|
||||
}
|
||||
|
||||
|
||||
/// Get the first item from a list, or None if the list is empty
|
||||
let tryFirst<'T> (f: Net.IConnection -> Task<'T list>) =
|
||||
fun conn -> backgroundTask {
|
||||
let! results = f conn
|
||||
return results |> List.tryHead
|
||||
}
|
||||
|
||||
|
||||
/// Cast a strongly-typed list to an object list
|
||||
let objList<'T> (objects: 'T list) = objects |> List.map (fun it -> it :> obj)
|
||||
|
||||
@ -93,16 +93,16 @@ open RethinkHelpers
|
||||
|
||||
/// RethinkDB implementation of data functions for myWebLog
|
||||
type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<RethinkDbData>) =
|
||||
|
||||
|
||||
/// Match theme asset IDs by their prefix (the theme ID)
|
||||
let matchAssetByThemeId themeId =
|
||||
let keyPrefix = $"^{themeId}/"
|
||||
fun (row: Ast.ReqlExpr) -> row[nameof ThemeAsset.Empty.Id].Match keyPrefix :> obj
|
||||
|
||||
|
||||
/// Function to exclude template text from themes
|
||||
let withoutTemplateText (row: Ast.ReqlExpr) : obj =
|
||||
{| Templates = row[nameof Theme.Empty.Templates].Merge(r.HashMap(nameof ThemeTemplate.Empty.Text, "")) |}
|
||||
|
||||
|
||||
/// Ensure field indexes exist, as well as special indexes for selected tables
|
||||
let ensureIndexes table fields = backgroundTask {
|
||||
let! indexes = rethink<string list> { withTable table; indexList; result; withRetryOnce conn }
|
||||
@ -180,13 +180,13 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
}
|
||||
do! rethink { withTable table; indexWait; result; withRetryDefault; ignoreResult conn }
|
||||
}
|
||||
|
||||
|
||||
/// The batch size for restoration methods
|
||||
let restoreBatchSize = 100
|
||||
|
||||
|
||||
/// A value to use when files need to be retrieved without their data
|
||||
let emptyFile = r.Binary(Array.Empty<byte>())
|
||||
|
||||
|
||||
/// Delete assets for the given theme ID
|
||||
let deleteAssetsByTheme themeId = rethink {
|
||||
withTable Table.ThemeAsset
|
||||
@ -194,7 +194,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
delete
|
||||
write; withRetryDefault; ignoreResult conn
|
||||
}
|
||||
|
||||
|
||||
/// Set a specific database version
|
||||
let setDbVersion (version: string) = backgroundTask {
|
||||
do! rethink {
|
||||
@ -208,7 +208,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
write; withRetryOnce; ignoreResult conn
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// Migrate from v2-rc1 to v2-rc2
|
||||
let migrateV2Rc1ToV2Rc2 () = backgroundTask {
|
||||
let logStep = Utils.Migration.logStep log "v2-rc1 to v2-rc2"
|
||||
@ -233,11 +233,11 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
update [ nameof WebLog.Empty.RedirectRules, [] :> obj ]
|
||||
write; withRetryOnce; ignoreResult conn
|
||||
}
|
||||
|
||||
|
||||
Utils.Migration.logStep log "v2 to v2.1" "Setting database version to v2.1"
|
||||
do! setDbVersion "v2.1"
|
||||
}
|
||||
|
||||
|
||||
/// Migrate from v2.1 to v2.1.1
|
||||
let migrateV2point1ToV2point1point1 () = backgroundTask {
|
||||
Utils.Migration.logStep log "v2.1 to v2.1.1" "Setting database version; no migration required"
|
||||
@ -256,10 +256,22 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
do! setDbVersion "v2.2"
|
||||
}
|
||||
|
||||
/// Migrate from v2.2 to v3
|
||||
let migrateV2point2ToV3 () = backgroundTask {
|
||||
Utils.Migration.logStep log "v2.2 to v3" "Adding auto-OpenGraph flag to all web logs"
|
||||
do! rethink {
|
||||
withTable Table.WebLog
|
||||
update [ nameof WebLog.Empty.AutoOpenGraph, true :> obj ]
|
||||
write; withRetryOnce; ignoreResult conn
|
||||
}
|
||||
Utils.Migration.logStep log "v2.2 to v3" "Setting database version to v3"
|
||||
do! setDbVersion "v3"
|
||||
}
|
||||
|
||||
/// Migrate data between versions
|
||||
let migrate version = backgroundTask {
|
||||
let mutable v = defaultArg version ""
|
||||
|
||||
|
||||
if v = "v2-rc1" then
|
||||
do! migrateV2Rc1ToV2Rc2 ()
|
||||
v <- "v2-rc2"
|
||||
@ -267,38 +279,42 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
if v = "v2-rc2" then
|
||||
do! migrateV2Rc2ToV2 ()
|
||||
v <- "v2"
|
||||
|
||||
|
||||
if v = "v2" then
|
||||
do! migrateV2ToV2point1 ()
|
||||
v <- "v2.1"
|
||||
|
||||
|
||||
if v = "v2.1" then
|
||||
do! migrateV2point1ToV2point1point1 ()
|
||||
v <- "v2.1.1"
|
||||
|
||||
|
||||
if v = "v2.1.1" then
|
||||
do! migrateV2point1point1ToV2point2 ()
|
||||
v <- "v2.2"
|
||||
|
||||
|
||||
if v = "v2.2" then
|
||||
do! migrateV2point2ToV3 ()
|
||||
v <- "v3"
|
||||
|
||||
if v <> Utils.Migration.currentDbVersion then
|
||||
log.LogWarning $"Unknown database version; assuming {Utils.Migration.currentDbVersion}"
|
||||
do! setDbVersion Utils.Migration.currentDbVersion
|
||||
}
|
||||
|
||||
|
||||
/// The connection for this instance
|
||||
member _.Conn = conn
|
||||
|
||||
|
||||
interface IData with
|
||||
|
||||
|
||||
member _.Category = {
|
||||
new ICategoryData with
|
||||
|
||||
|
||||
member _.Add cat = rethink {
|
||||
withTable Table.Category
|
||||
insert cat
|
||||
write; withRetryDefault; ignoreResult conn
|
||||
}
|
||||
|
||||
|
||||
member _.CountAll webLogId = rethink<int> {
|
||||
withTable Table.Category
|
||||
getAll [ webLogId ] (nameof Category.Empty.WebLogId)
|
||||
@ -313,7 +329,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
count
|
||||
result; withRetryDefault conn
|
||||
}
|
||||
|
||||
|
||||
member _.FindAllForView webLogId = backgroundTask {
|
||||
let! cats = rethink<Category list> {
|
||||
withTable Table.Category
|
||||
@ -353,7 +369,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
|> Option.defaultValue 0 })
|
||||
|> Array.ofSeq
|
||||
}
|
||||
|
||||
|
||||
member _.FindById catId webLogId =
|
||||
rethink<Category> {
|
||||
withTable Table.Category
|
||||
@ -361,13 +377,13 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
resultOption; withRetryOptionDefault
|
||||
}
|
||||
|> verifyWebLog webLogId _.WebLogId <| conn
|
||||
|
||||
|
||||
member _.FindByWebLog webLogId = rethink<Category list> {
|
||||
withTable Table.Category
|
||||
getAll [ webLogId ] (nameof Category.Empty.WebLogId)
|
||||
result; withRetryDefault conn
|
||||
}
|
||||
|
||||
|
||||
member this.Delete catId webLogId = backgroundTask {
|
||||
match! this.FindById catId webLogId with
|
||||
| Some cat ->
|
||||
@ -394,7 +410,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
{| CategoryIds =
|
||||
row[nameof Post.Empty.CategoryIds].CoerceTo("array")
|
||||
.SetDifference(r.Array(catId)) |} :> obj)
|
||||
write; withRetryDefault; ignoreResult conn
|
||||
write; withRetryDefault; ignoreResult conn
|
||||
}
|
||||
// Delete the category itself
|
||||
do! rethink {
|
||||
@ -406,7 +422,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
return if children = 0 then CategoryDeleted else ReassignedChildCategories
|
||||
| None -> return CategoryNotFound
|
||||
}
|
||||
|
||||
|
||||
member _.Restore cats = backgroundTask {
|
||||
for batch in cats |> List.chunkBySize restoreBatchSize do
|
||||
do! rethink {
|
||||
@ -415,7 +431,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
write; withRetryOnce; ignoreResult conn
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
member _.Update cat = rethink {
|
||||
withTable Table.Category
|
||||
get cat.Id
|
||||
@ -427,10 +443,10 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
write; withRetryDefault; ignoreResult conn
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
member _.Page = {
|
||||
new IPageData with
|
||||
|
||||
|
||||
member _.Add page = rethink {
|
||||
withTable Table.Page
|
||||
insert page
|
||||
@ -447,7 +463,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
orderByFunc (fun row -> row[nameof Page.Empty.Title].Downcase() :> obj)
|
||||
result; withRetryDefault conn
|
||||
}
|
||||
|
||||
|
||||
member _.CountAll webLogId = rethink<int> {
|
||||
withTable Table.Page
|
||||
getAll [ webLogId ] (nameof Page.Empty.WebLogId)
|
||||
@ -473,7 +489,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
}
|
||||
return result.Deleted > 0UL
|
||||
}
|
||||
|
||||
|
||||
member _.FindById pageId webLogId =
|
||||
rethink<Page list> {
|
||||
withTable Table.Page
|
||||
@ -495,7 +511,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
result; withRetryDefault
|
||||
}
|
||||
|> tryFirst <| conn
|
||||
|
||||
|
||||
member _.FindCurrentPermalink permalinks webLogId = backgroundTask {
|
||||
let! result =
|
||||
(rethink<Page list> {
|
||||
@ -509,7 +525,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
|> tryFirst) conn
|
||||
return result |> Option.map _.Permalink
|
||||
}
|
||||
|
||||
|
||||
member _.FindFullById pageId webLogId =
|
||||
rethink<Page> {
|
||||
withTable Table.Page
|
||||
@ -517,13 +533,13 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
resultOption; withRetryOptionDefault
|
||||
}
|
||||
|> verifyWebLog webLogId _.WebLogId <| conn
|
||||
|
||||
|
||||
member _.FindFullByWebLog webLogId = rethink<Page> {
|
||||
withTable Table.Page
|
||||
getAll [ webLogId ] (nameof Page.Empty.WebLogId)
|
||||
resultCursor; withRetryCursorDefault; toList conn
|
||||
}
|
||||
|
||||
|
||||
member _.FindListed webLogId = rethink<Page list> {
|
||||
withTable Table.Page
|
||||
getAll [ webLogId ] (nameof Page.Empty.WebLogId)
|
||||
@ -546,7 +562,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
limit 25
|
||||
result; withRetryDefault conn
|
||||
}
|
||||
|
||||
|
||||
member _.Restore pages = backgroundTask {
|
||||
for batch in pages |> List.chunkBySize restoreBatchSize do
|
||||
do! rethink {
|
||||
@ -555,7 +571,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
write; withRetryOnce; ignoreResult conn
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
member _.Update page = rethink {
|
||||
withTable Table.Page
|
||||
get page.Id
|
||||
@ -572,7 +588,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
]
|
||||
write; withRetryDefault; ignoreResult conn
|
||||
}
|
||||
|
||||
|
||||
member this.UpdatePriorPermalinks pageId webLogId permalinks = backgroundTask {
|
||||
match! this.FindById pageId webLogId with
|
||||
| Some _ ->
|
||||
@ -586,16 +602,16 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
| None -> return false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
member _.Post = {
|
||||
new IPostData with
|
||||
|
||||
|
||||
member _.Add post = rethink {
|
||||
withTable Table.Post
|
||||
insert post
|
||||
write; withRetryDefault; ignoreResult conn
|
||||
}
|
||||
|
||||
|
||||
member _.CountByStatus status webLogId = rethink<int> {
|
||||
withTable Table.Post
|
||||
getAll [ webLogId ] (nameof Post.Empty.WebLogId)
|
||||
@ -614,7 +630,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
}
|
||||
return result.Deleted > 0UL
|
||||
}
|
||||
|
||||
|
||||
member _.FindById postId webLogId =
|
||||
rethink<Post list> {
|
||||
withTable Table.Post
|
||||
@ -625,7 +641,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
result; withRetryDefault
|
||||
}
|
||||
|> tryFirst <| conn
|
||||
|
||||
|
||||
member _.FindByPermalink permalink webLogId =
|
||||
rethink<Post list> {
|
||||
withTable Table.Post
|
||||
@ -636,7 +652,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
result; withRetryDefault
|
||||
}
|
||||
|> tryFirst <| conn
|
||||
|
||||
|
||||
member _.FindFullById postId webLogId =
|
||||
rethink<Post> {
|
||||
withTable Table.Post
|
||||
@ -658,13 +674,13 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
|> tryFirst) conn
|
||||
return result |> Option.map _.Permalink
|
||||
}
|
||||
|
||||
|
||||
member _.FindFullByWebLog webLogId = rethink<Post> {
|
||||
withTable Table.Post
|
||||
getAll [ webLogId ] (nameof Post.Empty.WebLogId)
|
||||
resultCursor; withRetryCursorDefault; toList conn
|
||||
}
|
||||
|
||||
|
||||
member _.FindPageOfCategorizedPosts webLogId categoryIds pageNbr postsPerPage = rethink<Post list> {
|
||||
withTable Table.Post
|
||||
getAll (objList categoryIds) (nameof Post.Empty.CategoryIds)
|
||||
@ -678,7 +694,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
limit (postsPerPage + 1)
|
||||
result; withRetryDefault conn
|
||||
}
|
||||
|
||||
|
||||
member _.FindPageOfPosts webLogId pageNbr postsPerPage = rethink<Post list> {
|
||||
withTable Table.Post
|
||||
getAll [ webLogId ] (nameof Post.Empty.WebLogId)
|
||||
@ -703,7 +719,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
limit (postsPerPage + 1)
|
||||
result; withRetryDefault conn
|
||||
}
|
||||
|
||||
|
||||
member _.FindPageOfTaggedPosts webLogId tag pageNbr postsPerPage = rethink<Post list> {
|
||||
withTable Table.Post
|
||||
getAll [ tag ] (nameof Post.Empty.Tags)
|
||||
@ -716,7 +732,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
limit (postsPerPage + 1)
|
||||
result; withRetryDefault conn
|
||||
}
|
||||
|
||||
|
||||
member _.FindSurroundingPosts webLogId publishedOn = backgroundTask {
|
||||
let! older =
|
||||
rethink<Post list> {
|
||||
@ -744,7 +760,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
|> tryFirst <| conn
|
||||
return older, newer
|
||||
}
|
||||
|
||||
|
||||
member _.Restore pages = backgroundTask {
|
||||
for batch in pages |> List.chunkBySize restoreBatchSize do
|
||||
do! rethink {
|
||||
@ -753,7 +769,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
write; withRetryOnce; ignoreResult conn
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
member this.Update post = backgroundTask {
|
||||
match! this.FindById post.Id post.WebLogId with
|
||||
| Some _ ->
|
||||
@ -779,10 +795,10 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
| None -> return false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
member _.TagMap = {
|
||||
new ITagMapData with
|
||||
|
||||
|
||||
member _.Delete tagMapId webLogId = backgroundTask {
|
||||
let! result = rethink<Model.Result> {
|
||||
withTable Table.TagMap
|
||||
@ -793,7 +809,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
}
|
||||
return result.Deleted > 0UL
|
||||
}
|
||||
|
||||
|
||||
member _.FindById tagMapId webLogId =
|
||||
rethink<TagMap> {
|
||||
withTable Table.TagMap
|
||||
@ -801,7 +817,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
resultOption; withRetryOptionDefault
|
||||
}
|
||||
|> verifyWebLog webLogId _.WebLogId <| conn
|
||||
|
||||
|
||||
member _.FindByUrlValue urlValue webLogId =
|
||||
rethink<TagMap list> {
|
||||
withTable Table.TagMap
|
||||
@ -810,7 +826,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
result; withRetryDefault
|
||||
}
|
||||
|> tryFirst <| conn
|
||||
|
||||
|
||||
member _.FindByWebLog webLogId = rethink<TagMap list> {
|
||||
withTable Table.TagMap
|
||||
between [| webLogId :> obj; r.Minval() |] [| webLogId :> obj; r.Maxval() |]
|
||||
@ -818,13 +834,13 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
orderBy (nameof TagMap.Empty.Tag)
|
||||
result; withRetryDefault conn
|
||||
}
|
||||
|
||||
|
||||
member _.FindMappingForTags tags webLogId = rethink<TagMap list> {
|
||||
withTable Table.TagMap
|
||||
getAll (tags |> List.map (fun tag -> [| webLogId :> obj; tag |] :> obj)) Index.WebLogAndTag
|
||||
result; withRetryDefault conn
|
||||
}
|
||||
|
||||
|
||||
member _.Restore tagMaps = backgroundTask {
|
||||
for batch in tagMaps |> List.chunkBySize restoreBatchSize do
|
||||
do! rethink {
|
||||
@ -833,7 +849,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
write; withRetryOnce; ignoreResult conn
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
member _.Save tagMap = rethink {
|
||||
withTable Table.TagMap
|
||||
get tagMap.Id
|
||||
@ -841,10 +857,10 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
write; withRetryDefault; ignoreResult conn
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
member _.Theme = {
|
||||
new IThemeData with
|
||||
|
||||
|
||||
member _.All () = rethink<Theme list> {
|
||||
withTable Table.Theme
|
||||
filter (fun row -> row[nameof Theme.Empty.Id].Ne "admin" :> obj)
|
||||
@ -852,7 +868,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
orderBy (nameof Theme.Empty.Id)
|
||||
result; withRetryDefault conn
|
||||
}
|
||||
|
||||
|
||||
member _.Exists themeId = backgroundTask {
|
||||
let! count = rethink<int> {
|
||||
withTable Table.Theme
|
||||
@ -862,13 +878,13 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
}
|
||||
return count > 0
|
||||
}
|
||||
|
||||
|
||||
member _.FindById themeId = rethink<Theme> {
|
||||
withTable Table.Theme
|
||||
get themeId
|
||||
resultOption; withRetryOptionDefault conn
|
||||
}
|
||||
|
||||
|
||||
member _.FindByIdWithoutText themeId =
|
||||
rethink<Theme list> {
|
||||
withTable Table.Theme
|
||||
@ -877,7 +893,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
result; withRetryDefault
|
||||
}
|
||||
|> tryFirst <| conn
|
||||
|
||||
|
||||
member this.Delete themeId = backgroundTask {
|
||||
match! this.FindByIdWithoutText themeId with
|
||||
| Some _ ->
|
||||
@ -891,7 +907,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
return true
|
||||
| None -> return false
|
||||
}
|
||||
|
||||
|
||||
member _.Save theme = rethink {
|
||||
withTable Table.Theme
|
||||
get theme.Id
|
||||
@ -899,37 +915,37 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
write; withRetryDefault; ignoreResult conn
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
member _.ThemeAsset = {
|
||||
new IThemeAssetData with
|
||||
|
||||
|
||||
member _.All () = rethink<ThemeAsset list> {
|
||||
withTable Table.ThemeAsset
|
||||
merge (r.HashMap(nameof ThemeAsset.Empty.Data, emptyFile))
|
||||
result; withRetryDefault conn
|
||||
}
|
||||
|
||||
|
||||
member _.DeleteByTheme themeId = deleteAssetsByTheme themeId
|
||||
|
||||
|
||||
member _.FindById assetId = rethink<ThemeAsset> {
|
||||
withTable Table.ThemeAsset
|
||||
get assetId
|
||||
resultOption; withRetryOptionDefault conn
|
||||
}
|
||||
|
||||
|
||||
member _.FindByTheme themeId = rethink<ThemeAsset list> {
|
||||
withTable Table.ThemeAsset
|
||||
filter (matchAssetByThemeId themeId)
|
||||
merge (r.HashMap(nameof ThemeAsset.Empty.Data, emptyFile))
|
||||
result; withRetryDefault conn
|
||||
}
|
||||
|
||||
|
||||
member _.FindByThemeWithData themeId = rethink<ThemeAsset> {
|
||||
withTable Table.ThemeAsset
|
||||
filter (matchAssetByThemeId themeId)
|
||||
resultCursor; withRetryCursorDefault; toList conn
|
||||
}
|
||||
|
||||
|
||||
member _.Save asset = rethink {
|
||||
withTable Table.ThemeAsset
|
||||
get asset.Id
|
||||
@ -937,16 +953,16 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
write; withRetryDefault; ignoreResult conn
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
member _.Upload = {
|
||||
new IUploadData with
|
||||
|
||||
|
||||
member _.Add upload = rethink {
|
||||
withTable Table.Upload
|
||||
insert upload
|
||||
write; withRetryDefault; ignoreResult conn
|
||||
}
|
||||
|
||||
|
||||
member _.Delete uploadId webLogId = backgroundTask {
|
||||
let! upload =
|
||||
rethink<Upload> {
|
||||
@ -966,7 +982,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
return Ok (string up.Path)
|
||||
| None -> return Result.Error $"Upload ID {uploadId} not found"
|
||||
}
|
||||
|
||||
|
||||
member _.FindByPath path webLogId =
|
||||
rethink<Upload> {
|
||||
withTable Table.Upload
|
||||
@ -974,7 +990,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
resultCursor; withRetryCursorDefault; toList
|
||||
}
|
||||
|> tryFirst <| conn
|
||||
|
||||
|
||||
member _.FindByWebLog webLogId = rethink<Upload> {
|
||||
withTable Table.Upload
|
||||
between [| webLogId :> obj; r.Minval() |] [| webLogId :> obj; r.Maxval() |]
|
||||
@ -982,14 +998,14 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
merge (r.HashMap(nameof Upload.Empty.Data, emptyFile))
|
||||
resultCursor; withRetryCursorDefault; toList conn
|
||||
}
|
||||
|
||||
|
||||
member _.FindByWebLogWithData webLogId = rethink<Upload> {
|
||||
withTable Table.Upload
|
||||
between [| webLogId :> obj; r.Minval() |] [| webLogId :> obj; r.Maxval() |]
|
||||
[ Index Index.WebLogAndPath ]
|
||||
resultCursor; withRetryCursorDefault; toList conn
|
||||
}
|
||||
|
||||
|
||||
member _.Restore uploads = backgroundTask {
|
||||
// Files can be large; we'll do 5 at a time
|
||||
for batch in uploads |> List.chunkBySize 5 do
|
||||
@ -1000,21 +1016,21 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
member _.WebLog = {
|
||||
new IWebLogData with
|
||||
|
||||
|
||||
member _.Add webLog = rethink {
|
||||
withTable Table.WebLog
|
||||
insert webLog
|
||||
write; withRetryOnce; ignoreResult conn
|
||||
}
|
||||
|
||||
|
||||
member _.All () = rethink<WebLog list> {
|
||||
withTable Table.WebLog
|
||||
result; withRetryDefault conn
|
||||
}
|
||||
|
||||
|
||||
member _.Delete webLogId = backgroundTask {
|
||||
// Comments should be deleted by post IDs
|
||||
let! thePostIds = rethink<{| Id: string |} list> {
|
||||
@ -1061,7 +1077,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
write; withRetryOnce; ignoreResult conn
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
member _.FindByHost url =
|
||||
rethink<WebLog list> {
|
||||
withTable Table.WebLog
|
||||
@ -1076,21 +1092,21 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
get webLogId
|
||||
resultOption; withRetryOptionDefault conn
|
||||
}
|
||||
|
||||
|
||||
member _.UpdateRedirectRules webLog = rethink {
|
||||
withTable Table.WebLog
|
||||
get webLog.Id
|
||||
update [ nameof WebLog.Empty.RedirectRules, webLog.RedirectRules :> obj ]
|
||||
write; withRetryDefault; ignoreResult conn
|
||||
}
|
||||
|
||||
|
||||
member _.UpdateRssOptions webLog = rethink {
|
||||
withTable Table.WebLog
|
||||
get webLog.Id
|
||||
update [ nameof WebLog.Empty.Rss, webLog.Rss :> obj ]
|
||||
write; withRetryDefault; ignoreResult conn
|
||||
}
|
||||
|
||||
|
||||
member _.UpdateSettings webLog = rethink {
|
||||
withTable Table.WebLog
|
||||
get webLog.Id
|
||||
@ -1108,16 +1124,16 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
write; withRetryDefault; ignoreResult conn
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
member _.WebLogUser = {
|
||||
new IWebLogUserData with
|
||||
|
||||
|
||||
member _.Add user = rethink {
|
||||
withTable Table.WebLogUser
|
||||
insert user
|
||||
write; withRetryDefault; ignoreResult conn
|
||||
}
|
||||
|
||||
|
||||
member _.FindById userId webLogId =
|
||||
rethink<WebLogUser> {
|
||||
withTable Table.WebLogUser
|
||||
@ -1125,7 +1141,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
resultOption; withRetryOptionDefault
|
||||
}
|
||||
|> verifyWebLog webLogId _.WebLogId <| conn
|
||||
|
||||
|
||||
member this.Delete userId webLogId = backgroundTask {
|
||||
match! this.FindById userId webLogId with
|
||||
| Some _ ->
|
||||
@ -1155,7 +1171,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
return Ok true
|
||||
| None -> return Result.Error "User does not exist"
|
||||
}
|
||||
|
||||
|
||||
member _.FindByEmail email webLogId =
|
||||
rethink<WebLogUser list> {
|
||||
withTable Table.WebLogUser
|
||||
@ -1164,14 +1180,14 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
result; withRetryDefault
|
||||
}
|
||||
|> tryFirst <| conn
|
||||
|
||||
|
||||
member _.FindByWebLog webLogId = rethink<WebLogUser list> {
|
||||
withTable Table.WebLogUser
|
||||
getAll [ webLogId ] (nameof WebLogUser.Empty.WebLogId)
|
||||
orderByFunc (fun row -> row[nameof WebLogUser.Empty.PreferredName].Downcase())
|
||||
result; withRetryDefault conn
|
||||
}
|
||||
|
||||
|
||||
member _.FindNames webLogId userIds = backgroundTask {
|
||||
let! users = rethink<WebLogUser list> {
|
||||
withTable Table.WebLogUser
|
||||
@ -1181,7 +1197,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
}
|
||||
return users |> List.map (fun u -> { Name = string u.Id; Value = u.DisplayName })
|
||||
}
|
||||
|
||||
|
||||
member _.Restore users = backgroundTask {
|
||||
for batch in users |> List.chunkBySize restoreBatchSize do
|
||||
do! rethink {
|
||||
@ -1190,7 +1206,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
write; withRetryOnce; ignoreResult conn
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
member this.SetLastSeen userId webLogId = backgroundTask {
|
||||
match! this.FindById userId webLogId with
|
||||
| Some _ ->
|
||||
@ -1202,7 +1218,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
}
|
||||
| None -> ()
|
||||
}
|
||||
|
||||
|
||||
member _.Update user = rethink {
|
||||
withTable Table.WebLogUser
|
||||
get user.Id
|
||||
@ -1218,30 +1234,37 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
write; withRetryDefault; ignoreResult conn
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
member _.Serializer =
|
||||
Net.Converter.Serializer
|
||||
|
||||
|
||||
member _.StartUp () = backgroundTask {
|
||||
let! dbs = rethink<string list> { dbList; result; withRetryOnce conn }
|
||||
if not (dbs |> List.contains config.Database) then
|
||||
log.LogInformation $"Creating database {config.Database}..."
|
||||
do! rethink { dbCreate config.Database; write; withRetryOnce; ignoreResult conn }
|
||||
|
||||
|
||||
let! tables = rethink<string list> { tableList; result; withRetryOnce conn }
|
||||
for tbl in Table.all do
|
||||
if not (tables |> List.contains tbl) then
|
||||
log.LogInformation $"Creating table {tbl}..."
|
||||
do! rethink { tableCreate tbl [ PrimaryKey "Id" ]; write; withRetryOnce; ignoreResult conn }
|
||||
|
||||
if not (List.contains Table.DbVersion tables) then
|
||||
// Version table added in v2-rc2; this will flag that migration to be run
|
||||
if List.isEmpty tables then
|
||||
// New install; set version to current version
|
||||
do! rethink {
|
||||
withTable Table.DbVersion
|
||||
insert {| Id = Utils.Migration.currentDbVersion |}
|
||||
write; withRetryOnce; ignoreResult conn
|
||||
}
|
||||
elif not (List.contains Table.DbVersion tables) then
|
||||
// Other tables, but not version, added in v2-rc2; this will flag that migration to be run
|
||||
do! rethink {
|
||||
withTable Table.DbVersion
|
||||
insert {| Id = "v2-rc1" |}
|
||||
write; withRetryOnce; ignoreResult conn
|
||||
}
|
||||
|
||||
|
||||
do! ensureIndexes Table.Category [ nameof Category.Empty.WebLogId ]
|
||||
do! ensureIndexes Table.Comment [ nameof Comment.Empty.PostId ]
|
||||
do! ensureIndexes Table.Page [ nameof Page.Empty.WebLogId; nameof Page.Empty.AuthorId ]
|
||||
@ -1250,7 +1273,7 @@ type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<Rethi
|
||||
do! ensureIndexes Table.Upload []
|
||||
do! ensureIndexes Table.WebLog [ nameof WebLog.Empty.UrlBase ]
|
||||
do! ensureIndexes Table.WebLogUser [ nameof WebLogUser.Empty.WebLogId ]
|
||||
|
||||
|
||||
let! version = rethink<{| Id: string |} list> {
|
||||
withTable Table.DbVersion
|
||||
limit 1
|
||||
|
@ -16,22 +16,23 @@ type SQLiteCategoryData(conn: SqliteConnection, ser: JsonSerializer, log: ILogge
|
||||
let parentIdField = nameof Category.Empty.ParentId
|
||||
|
||||
/// Count all categories for the given web log
|
||||
let countAll webLogId =
|
||||
let countAll webLogId = backgroundTask {
|
||||
log.LogTrace "Category.countAll"
|
||||
Document.countByWebLog Table.Category webLogId conn
|
||||
let! count = conn.countByFields Table.Category Any [ webLogField webLogId ]
|
||||
return int count
|
||||
}
|
||||
|
||||
/// Count all top-level categories for the given web log
|
||||
let countTopLevel webLogId =
|
||||
let countTopLevel webLogId = backgroundTask {
|
||||
log.LogTrace "Category.countTopLevel"
|
||||
conn.customScalar
|
||||
$"{Document.Query.countByWebLog Table.Category} AND data ->> '{parentIdField}' IS NULL"
|
||||
[ webLogParam webLogId ]
|
||||
(toCount >> int)
|
||||
let! count = conn.countByFields Table.Category All [ webLogField webLogId; Field.NotExists parentIdField ]
|
||||
return int count
|
||||
}
|
||||
|
||||
/// Find all categories for the given web log
|
||||
let findByWebLog webLogId =
|
||||
log.LogTrace "Category.findByWebLog"
|
||||
Document.findByWebLog<Category> Table.Category webLogId conn
|
||||
conn.findByFields<Category> Table.Category Any [ webLogField webLogId ]
|
||||
|
||||
/// Retrieve all categories for the given web log in a DotLiquid-friendly format
|
||||
let findAllForView webLogId = backgroundTask {
|
||||
@ -42,20 +43,18 @@ type SQLiteCategoryData(conn: SqliteConnection, ser: JsonSerializer, log: ILogge
|
||||
ordered
|
||||
|> Seq.map (fun it -> backgroundTask {
|
||||
// Parent category post counts include posts in subcategories
|
||||
let catSql, catParams =
|
||||
let childField =
|
||||
ordered
|
||||
|> Seq.filter (fun cat -> cat.ParentNames |> Array.contains it.Name)
|
||||
|> Seq.map _.Id
|
||||
|> Seq.append (Seq.singleton it.Id)
|
||||
|> List.ofSeq
|
||||
|> inJsonArray Table.Post (nameof Post.Empty.CategoryIds) "catId"
|
||||
let query = $"""
|
||||
SELECT COUNT(DISTINCT data ->> '{nameof Post.Empty.Id}')
|
||||
FROM {Table.Post}
|
||||
WHERE {Document.Query.whereByWebLog}
|
||||
AND {Query.whereByField (Field.EQ (nameof Post.Empty.Status) "") $"'{string Published}'"}
|
||||
AND {catSql}"""
|
||||
let! postCount = conn.customScalar query (webLogParam webLogId :: catParams) toCount
|
||||
|> Field.InArray (nameof Post.Empty.CategoryIds) Table.Post
|
||||
let fields =
|
||||
[ webLogField webLogId; Field.Equal (nameof Post.Empty.Status) (string Published); childField ]
|
||||
let query =
|
||||
(Query.statementWhere (Query.count Table.Post) (Query.whereByFields All fields))
|
||||
.Replace("(*)", $"(DISTINCT data->>'{nameof Post.Empty.Id}')")
|
||||
let! postCount = conn.customScalar query (addFieldParams fields []) toCount
|
||||
return it.Id, int postCount
|
||||
})
|
||||
|> Task.WhenAll
|
||||
@ -69,9 +68,9 @@ type SQLiteCategoryData(conn: SqliteConnection, ser: JsonSerializer, log: ILogge
|
||||
}
|
||||
|
||||
/// Find a category by its ID for the given web log
|
||||
let findById catId webLogId =
|
||||
let findById (catId: CategoryId) webLogId =
|
||||
log.LogTrace "Category.findById"
|
||||
Document.findByIdAndWebLog<CategoryId, Category> Table.Category catId webLogId conn
|
||||
conn.findFirstByFields<Category> Table.Category All [ idField catId; webLogField webLogId ]
|
||||
|
||||
/// Delete a category
|
||||
let delete catId webLogId = backgroundTask {
|
||||
@ -79,24 +78,22 @@ type SQLiteCategoryData(conn: SqliteConnection, ser: JsonSerializer, log: ILogge
|
||||
match! findById catId webLogId with
|
||||
| Some cat ->
|
||||
// Reassign any children to the category's parent category
|
||||
let! children = conn.countByField Table.Category (Field.EQ parentIdField (string catId))
|
||||
let! children = conn.countByFields Table.Category Any [ Field.Equal parentIdField (string catId) ]
|
||||
if children > 0L then
|
||||
let parent = Field.EQ parentIdField (string catId)
|
||||
let parent = [ Field.Equal parentIdField (string catId) ]
|
||||
match cat.ParentId with
|
||||
| Some _ -> do! conn.patchByField Table.Category parent {| ParentId = cat.ParentId |}
|
||||
| None -> do! conn.removeFieldsByField Table.Category parent [ parentIdField ]
|
||||
| Some _ -> do! conn.patchByFields Table.Category Any parent {| ParentId = cat.ParentId |}
|
||||
| None -> do! conn.removeFieldsByFields Table.Category Any parent [ parentIdField ]
|
||||
// Delete the category off all posts where it is assigned, and the category itself
|
||||
let catIdField = nameof Post.Empty.CategoryIds
|
||||
let fields = [ webLogField webLogId; Field.InArray catIdField Table.Post [ string catId ] ]
|
||||
let query =
|
||||
(Query.statementWhere (Query.find Table.Post) (Query.whereByFields All fields))
|
||||
.Replace("SELECT data", $"SELECT data->>'{nameof Post.Empty.Id}', data->'{catIdField}'")
|
||||
let! posts =
|
||||
conn.customList
|
||||
$"SELECT data ->> '{nameof Post.Empty.Id}', data -> '{catIdField}'
|
||||
FROM {Table.Post}
|
||||
WHERE {Document.Query.whereByWebLog}
|
||||
AND EXISTS
|
||||
(SELECT 1
|
||||
FROM json_each({Table.Post}.data -> '{catIdField}')
|
||||
WHERE json_each.value = @id)"
|
||||
[ idParam catId; webLogParam webLogId ]
|
||||
query
|
||||
(addFieldParams fields [])
|
||||
(fun rdr -> rdr.GetString 0, Utils.deserialize<string list> ser (rdr.GetString 1))
|
||||
for postId, cats in posts do
|
||||
do! conn.patchById
|
||||
|
@ -5,55 +5,55 @@ module MyWebLog.Data.SQLite.SQLiteHelpers
|
||||
/// The table names used in the SQLite implementation
|
||||
[<RequireQualifiedAccess>]
|
||||
module Table =
|
||||
|
||||
|
||||
/// Categories
|
||||
[<Literal>]
|
||||
let Category = "category"
|
||||
|
||||
|
||||
/// Database Version
|
||||
[<Literal>]
|
||||
let DbVersion = "db_version"
|
||||
|
||||
|
||||
/// Pages
|
||||
[<Literal>]
|
||||
let Page = "page"
|
||||
|
||||
|
||||
/// Page Revisions
|
||||
[<Literal>]
|
||||
let PageRevision = "page_revision"
|
||||
|
||||
|
||||
/// Posts
|
||||
[<Literal>]
|
||||
let Post = "post"
|
||||
|
||||
|
||||
/// Post Comments
|
||||
[<Literal>]
|
||||
let PostComment = "post_comment"
|
||||
|
||||
|
||||
/// Post Revisions
|
||||
[<Literal>]
|
||||
let PostRevision = "post_revision"
|
||||
|
||||
|
||||
/// Tag/URL Mappings
|
||||
[<Literal>]
|
||||
let TagMap = "tag_map"
|
||||
|
||||
|
||||
/// Themes
|
||||
[<Literal>]
|
||||
let Theme = "theme"
|
||||
|
||||
|
||||
/// Theme Assets
|
||||
[<Literal>]
|
||||
let ThemeAsset = "theme_asset"
|
||||
|
||||
|
||||
/// Uploads
|
||||
[<Literal>]
|
||||
let Upload = "upload"
|
||||
|
||||
|
||||
/// Web Logs
|
||||
[<Literal>]
|
||||
let WebLog = "web_log"
|
||||
|
||||
|
||||
/// Users
|
||||
[<Literal>]
|
||||
let WebLogUser = "web_log_user"
|
||||
@ -82,111 +82,78 @@ let instantParam =
|
||||
let maybeInstant =
|
||||
Option.map instantParam >> maybe
|
||||
|
||||
/// Create the SQL and parameters for an EXISTS applied to a JSON array
|
||||
let inJsonArray<'T> table jsonField paramName (items: 'T list) =
|
||||
if List.isEmpty items then "", []
|
||||
else
|
||||
let mutable idx = 0
|
||||
items
|
||||
|> List.skip 1
|
||||
|> List.fold (fun (itemS, itemP) it ->
|
||||
idx <- idx + 1
|
||||
$"{itemS}, @%s{paramName}{idx}", (SqliteParameter($"@%s{paramName}{idx}", string it) :: itemP))
|
||||
(Seq.ofList items
|
||||
|> Seq.map (fun it -> $"(@%s{paramName}0", [ SqliteParameter($"@%s{paramName}0", string it) ])
|
||||
|> Seq.head)
|
||||
|> function
|
||||
sql, ps ->
|
||||
$"EXISTS (SELECT 1 FROM json_each(%s{table}.data, '$.%s{jsonField}') WHERE value IN {sql}))", ps
|
||||
|
||||
/// Create the SQL and parameters for an IN clause
|
||||
let inClause<'T> colNameAndPrefix paramName (valueFunc: 'T -> string) (items: 'T list) =
|
||||
if List.isEmpty items then "", []
|
||||
else
|
||||
let mutable idx = 0
|
||||
items
|
||||
|> List.skip 1
|
||||
|> List.fold (fun (itemS, itemP) it ->
|
||||
idx <- idx + 1
|
||||
$"{itemS}, @%s{paramName}{idx}", (SqliteParameter ($"@%s{paramName}{idx}", valueFunc it) :: itemP))
|
||||
(Seq.ofList items
|
||||
|> Seq.map (fun it ->
|
||||
$"%s{colNameAndPrefix} IN (@%s{paramName}0", [ SqliteParameter ($"@%s{paramName}0", valueFunc it) ])
|
||||
|> Seq.head)
|
||||
|> function sql, ps -> $"{sql})", ps
|
||||
|
||||
|
||||
/// Functions to map domain items from a data reader
|
||||
module Map =
|
||||
|
||||
|
||||
open System.IO
|
||||
|
||||
|
||||
/// Get a boolean value from a data reader
|
||||
let getBoolean col (rdr: SqliteDataReader) = rdr.GetBoolean(rdr.GetOrdinal col)
|
||||
|
||||
|
||||
/// Get a date/time value from a data reader
|
||||
let getDateTime col (rdr: SqliteDataReader) = rdr.GetDateTime(rdr.GetOrdinal col)
|
||||
|
||||
|
||||
/// Get a Guid value from a data reader
|
||||
let getGuid col (rdr: SqliteDataReader) = rdr.GetGuid(rdr.GetOrdinal col)
|
||||
|
||||
|
||||
/// Get an int value from a data reader
|
||||
let getInt col (rdr: SqliteDataReader) = rdr.GetInt32(rdr.GetOrdinal col)
|
||||
|
||||
|
||||
/// Get a long (64-bit int) value from a data reader
|
||||
let getLong col (rdr: SqliteDataReader) = rdr.GetInt64(rdr.GetOrdinal col)
|
||||
|
||||
|
||||
/// Get a BLOB stream value from a data reader
|
||||
let getStream col (rdr: SqliteDataReader) = rdr.GetStream(rdr.GetOrdinal col)
|
||||
|
||||
|
||||
/// Get a string value from a data reader
|
||||
let getString col (rdr: SqliteDataReader) = rdr.GetString(rdr.GetOrdinal col)
|
||||
|
||||
|
||||
/// Parse an Instant from the given value
|
||||
let parseInstant value =
|
||||
match InstantPattern.General.Parse value with
|
||||
| it when it.Success -> it.Value
|
||||
| it -> raise it.Exception
|
||||
|
||||
|
||||
/// Get an Instant value from a data reader
|
||||
let getInstant col rdr =
|
||||
getString col rdr |> parseInstant
|
||||
|
||||
|
||||
/// Get a timespan value from a data reader
|
||||
let getTimeSpan col (rdr: SqliteDataReader) = rdr.GetTimeSpan(rdr.GetOrdinal col)
|
||||
|
||||
|
||||
/// Get a possibly null boolean value from a data reader
|
||||
let tryBoolean col (rdr: SqliteDataReader) =
|
||||
if rdr.IsDBNull(rdr.GetOrdinal col) then None else Some (getBoolean col rdr)
|
||||
|
||||
|
||||
/// Get a possibly null date/time value from a data reader
|
||||
let tryDateTime col (rdr: SqliteDataReader) =
|
||||
if rdr.IsDBNull(rdr.GetOrdinal col) then None else Some (getDateTime col rdr)
|
||||
|
||||
|
||||
/// Get a possibly null Guid value from a data reader
|
||||
let tryGuid col (rdr: SqliteDataReader) =
|
||||
if rdr.IsDBNull(rdr.GetOrdinal col) then None else Some (getGuid col rdr)
|
||||
|
||||
|
||||
/// Get a possibly null int value from a data reader
|
||||
let tryInt col (rdr: SqliteDataReader) =
|
||||
if rdr.IsDBNull(rdr.GetOrdinal col) then None else Some (getInt col rdr)
|
||||
|
||||
|
||||
/// Get a possibly null string value from a data reader
|
||||
let tryString col (rdr: SqliteDataReader) =
|
||||
if rdr.IsDBNull(rdr.GetOrdinal col) then None else Some (getString col rdr)
|
||||
|
||||
|
||||
/// Get a possibly null timespan value from a data reader
|
||||
let tryTimeSpan col (rdr: SqliteDataReader) =
|
||||
if rdr.IsDBNull(rdr.GetOrdinal col) then None else Some (getTimeSpan col rdr)
|
||||
|
||||
|
||||
/// Create a permalink from the current row in the given data reader
|
||||
let toPermalink rdr = getString "permalink" rdr |> Permalink
|
||||
|
||||
|
||||
/// Create a revision from the current row in the given data reader
|
||||
let toRevision rdr : Revision =
|
||||
{ AsOf = getInstant "as_of" rdr
|
||||
Text = getString "revision_text" rdr |> MarkupText.Parse }
|
||||
|
||||
|
||||
/// Create a theme asset from the current row in the given data reader
|
||||
let toThemeAsset includeData rdr : ThemeAsset =
|
||||
let assetData =
|
||||
@ -200,7 +167,7 @@ module Map =
|
||||
{ Id = ThemeAssetId (ThemeId (getString "theme_id" rdr), getString "path" rdr)
|
||||
UpdatedOn = getInstant "updated_on" rdr
|
||||
Data = assetData }
|
||||
|
||||
|
||||
/// Create an uploaded file from the current row in the given data reader
|
||||
let toUpload includeData rdr : Upload =
|
||||
let data =
|
||||
@ -218,90 +185,57 @@ module Map =
|
||||
Data = data }
|
||||
|
||||
|
||||
open BitBadger.Documents
|
||||
|
||||
/// Create a named parameter
|
||||
let sqlParam name (value: obj) =
|
||||
SqliteParameter(name, value)
|
||||
|
||||
|
||||
/// Create a web log ID parameter
|
||||
let webLogParam (webLogId: WebLogId) =
|
||||
sqlParam "@webLogId" (string webLogId)
|
||||
|
||||
/// Create a field for an ID value
|
||||
let idField<'T> (idValue: 'T) =
|
||||
{ Field.Equal "Id" (string idValue) with ParameterName = Some "@id" }
|
||||
|
||||
/// Create a web log field
|
||||
let webLogField (webLogId: WebLogId) =
|
||||
{ Field.Equal "WebLogId" (string webLogId) with ParameterName = Some "@webLogId" }
|
||||
|
||||
|
||||
open BitBadger.Documents
|
||||
open BitBadger.Documents.Sqlite
|
||||
open BitBadger.Documents.Sqlite.WithConn
|
||||
|
||||
/// Functions for manipulating documents
|
||||
module Document =
|
||||
|
||||
/// Queries to assist with document manipulation
|
||||
module Query =
|
||||
|
||||
/// Fragment to add a web log ID condition to a WHERE clause (parameter @webLogId)
|
||||
let whereByWebLog =
|
||||
Query.whereByField (Field.EQ "WebLogId" "") "@webLogId"
|
||||
|
||||
/// A SELECT query to count documents for a given web log ID
|
||||
let countByWebLog table =
|
||||
$"{Query.Count.all table} WHERE {whereByWebLog}"
|
||||
|
||||
/// A query to select from a table by the document's ID and its web log ID
|
||||
let selectByIdAndWebLog table =
|
||||
$"{Query.Find.byId table} AND {whereByWebLog}"
|
||||
|
||||
/// A query to select from a table by its web log ID
|
||||
let selectByWebLog table =
|
||||
$"{Query.selectFromTable table} WHERE {whereByWebLog}"
|
||||
|
||||
/// Count documents for the given web log ID
|
||||
let countByWebLog table (webLogId: WebLogId) conn = backgroundTask {
|
||||
let! count = Count.byField table (Field.EQ "WebLogId" (string webLogId)) conn
|
||||
return int count
|
||||
}
|
||||
|
||||
/// Find a document by its ID and web log ID
|
||||
let findByIdAndWebLog<'TKey, 'TDoc> table (key: 'TKey) webLogId conn =
|
||||
Custom.single (Query.selectByIdAndWebLog table) [ idParam key; webLogParam webLogId ] fromData<'TDoc> conn
|
||||
|
||||
/// Find documents for the given web log
|
||||
let findByWebLog<'TDoc> table (webLogId: WebLogId) conn =
|
||||
Find.byField<'TDoc> table (Field.EQ "WebLogId" (string webLogId)) conn
|
||||
|
||||
|
||||
/// Functions to support revisions
|
||||
module Revisions =
|
||||
|
||||
|
||||
/// Find all revisions for the given entity
|
||||
let findByEntityId<'TKey> revTable entityTable (key: 'TKey) conn =
|
||||
Custom.list
|
||||
let findByEntityId<'TKey> revTable entityTable (key: 'TKey) (conn: SqliteConnection) =
|
||||
conn.customList
|
||||
$"SELECT as_of, revision_text FROM %s{revTable} WHERE %s{entityTable}_id = @id ORDER BY as_of DESC"
|
||||
[ idParam key ]
|
||||
Map.toRevision
|
||||
conn
|
||||
|
||||
|
||||
/// Find all revisions for all posts for the given web log
|
||||
let findByWebLog<'TKey> revTable entityTable (keyFunc: string -> 'TKey) webLogId conn =
|
||||
Custom.list
|
||||
let findByWebLog<'TKey> revTable entityTable (keyFunc: string -> 'TKey) webLogId (conn: SqliteConnection) =
|
||||
conn.customList
|
||||
$"SELECT pr.*
|
||||
FROM %s{revTable} pr
|
||||
INNER JOIN %s{entityTable} p ON p.data ->> 'Id' = pr.{entityTable}_id
|
||||
WHERE p.{Document.Query.whereByWebLog}
|
||||
INNER JOIN %s{entityTable} p ON p.data->>'Id' = pr.{entityTable}_id
|
||||
WHERE p.{Query.whereByFields Any [ webLogField webLogId ]}
|
||||
ORDER BY as_of DESC"
|
||||
[ webLogParam webLogId ]
|
||||
(fun rdr -> keyFunc (Map.getString $"{entityTable}_id" rdr), Map.toRevision rdr)
|
||||
conn
|
||||
|
||||
/// Update a page or post's revisions
|
||||
let update<'TKey> revTable entityTable (key: 'TKey) oldRevs newRevs conn = backgroundTask {
|
||||
let update<'TKey> revTable entityTable (key: 'TKey) oldRevs newRevs (conn: SqliteConnection) = backgroundTask {
|
||||
let toDelete, toAdd = Utils.diffRevisions oldRevs newRevs
|
||||
for delRev in toDelete do
|
||||
do! Custom.nonQuery
|
||||
do! conn.customNonQuery
|
||||
$"DELETE FROM %s{revTable} WHERE %s{entityTable}_id = @id AND as_of = @asOf"
|
||||
[ idParam key; sqlParam "@asOf" (instantParam delRev.AsOf) ]
|
||||
conn
|
||||
for addRev in toAdd do
|
||||
do! Custom.nonQuery
|
||||
do! conn.customNonQuery
|
||||
$"INSERT INTO {revTable} VALUES (@id, @asOf, @text)"
|
||||
[ idParam key; sqlParam "asOf" (instantParam addRev.AsOf); sqlParam "@text" (string addRev.Text) ]
|
||||
conn
|
||||
}
|
||||
|
@ -17,8 +17,10 @@ type SQLitePageData(conn: SqliteConnection, log: ILogger) =
|
||||
/// The JSON field name for the "is in page list" flag
|
||||
let pgListName = nameof Page.Empty.IsInPageList
|
||||
|
||||
/// The JSON field for the title of the page
|
||||
let titleField = $"data ->> '{nameof Page.Empty.Title}'"
|
||||
/// Query to return pages sorted by title
|
||||
let sortedPages fields =
|
||||
Query.byFields (Query.find Table.Page) All fields
|
||||
+ Query.orderBy [ Field.Named $"i:{nameof Page.Empty.Title}" ] SQLite
|
||||
|
||||
// SUPPORT FUNCTIONS
|
||||
|
||||
@ -50,36 +52,38 @@ type SQLitePageData(conn: SqliteConnection, log: ILogger) =
|
||||
/// Get all pages for a web log (without text, metadata, revisions, or prior permalinks)
|
||||
let all webLogId =
|
||||
log.LogTrace "Page.all"
|
||||
let field = [ webLogField webLogId ]
|
||||
conn.customList
|
||||
$"{Query.selectFromTable Table.Page} WHERE {Document.Query.whereByWebLog} ORDER BY LOWER({titleField})"
|
||||
[ webLogParam webLogId ]
|
||||
(sortedPages field)
|
||||
(addFieldParams field [])
|
||||
(fun rdr -> { fromData<Page> rdr with Text = ""; Metadata = []; PriorPermalinks = [] })
|
||||
|
||||
/// Count all pages for the given web log
|
||||
let countAll webLogId =
|
||||
let countAll webLogId = backgroundTask {
|
||||
log.LogTrace "Page.countAll"
|
||||
Document.countByWebLog Table.Page webLogId conn
|
||||
let! count = conn.countByFields Table.Page Any [ webLogField webLogId ]
|
||||
return int count
|
||||
}
|
||||
|
||||
/// Count all pages shown in the page list for the given web log
|
||||
let countListed webLogId =
|
||||
let countListed webLogId = backgroundTask {
|
||||
log.LogTrace "Page.countListed"
|
||||
conn.customScalar
|
||||
$"""{Document.Query.countByWebLog Table.Page} AND {Query.whereByField (Field.EQ pgListName "") "true"}"""
|
||||
[ webLogParam webLogId ]
|
||||
(toCount >> int)
|
||||
let! count = conn.countByFields Table.Page All [ webLogField webLogId; Field.Equal pgListName true ]
|
||||
return int count
|
||||
}
|
||||
|
||||
/// Find a page by its ID (without revisions and prior permalinks)
|
||||
let findById pageId webLogId = backgroundTask {
|
||||
let findById (pageId: PageId) webLogId = backgroundTask {
|
||||
log.LogTrace "Page.findById"
|
||||
match! Document.findByIdAndWebLog<PageId, Page> Table.Page pageId webLogId conn with
|
||||
match! conn.findFirstByFields<Page> Table.Page All [ idField pageId; webLogField webLogId ] with
|
||||
| Some page -> return Some { page with PriorPermalinks = [] }
|
||||
| None -> return None
|
||||
}
|
||||
|
||||
/// Find a complete page by its ID
|
||||
let findFullById pageId webLogId = backgroundTask {
|
||||
let findFullById (pageId: PageId) webLogId = backgroundTask {
|
||||
log.LogTrace "Page.findFullById"
|
||||
match! Document.findByIdAndWebLog<PageId, Page> Table.Page pageId webLogId conn with
|
||||
match! conn.findFirstByFields<Page> Table.Page All [ idField pageId; webLogField webLogId ] with
|
||||
| Some page ->
|
||||
let! page = appendPageRevisions page
|
||||
return Some page
|
||||
@ -93,7 +97,8 @@ type SQLitePageData(conn: SqliteConnection, log: ILogger) =
|
||||
match! findById pageId webLogId with
|
||||
| Some _ ->
|
||||
do! conn.customNonQuery
|
||||
$"DELETE FROM {Table.PageRevision} WHERE page_id = @id; {Query.Delete.byId Table.Page}"
|
||||
$"{Query.delete Table.PageRevision} WHERE page_id = @id;
|
||||
{Query.byId (Query.delete Table.Page) (string pageId)}"
|
||||
[ idParam pageId ]
|
||||
return true
|
||||
| None -> return false
|
||||
@ -102,27 +107,25 @@ type SQLitePageData(conn: SqliteConnection, log: ILogger) =
|
||||
/// Find a page by its permalink for the given web log
|
||||
let findByPermalink (permalink: Permalink) webLogId =
|
||||
log.LogTrace "Page.findByPermalink"
|
||||
let linkParam = Field.EQ linkName (string permalink)
|
||||
let fields = [ webLogField webLogId; Field.Equal linkName (string permalink) ]
|
||||
conn.customSingle
|
||||
$"""{Document.Query.selectByWebLog Table.Page} AND {Query.whereByField linkParam "@link"}"""
|
||||
(addFieldParam "@link" linkParam [ webLogParam webLogId ])
|
||||
pageWithoutLinks
|
||||
(Query.byFields (Query.find Table.Page) All fields) (addFieldParams fields []) pageWithoutLinks
|
||||
|
||||
/// Find the current permalink within a set of potential prior permalinks for the given web log
|
||||
let findCurrentPermalink (permalinks: Permalink list) webLogId =
|
||||
log.LogTrace "Page.findCurrentPermalink"
|
||||
let linkSql, linkParams = inJsonArray Table.Page (nameof Page.Empty.PriorPermalinks) "link" permalinks
|
||||
conn.customSingle
|
||||
$"SELECT data ->> '{linkName}' AS permalink
|
||||
FROM {Table.Page}
|
||||
WHERE {Document.Query.whereByWebLog} AND {linkSql}"
|
||||
(webLogParam webLogId :: linkParams)
|
||||
Map.toPermalink
|
||||
let fields =
|
||||
[ webLogField webLogId
|
||||
Field.InArray (nameof Page.Empty.PriorPermalinks) Table.Page (List.map string permalinks) ]
|
||||
let query =
|
||||
(Query.statementWhere (Query.find Table.Page) (Query.whereByFields All fields))
|
||||
.Replace("SELECT data", $"SELECT data->>'{linkName}' AS permalink")
|
||||
conn.customSingle query (addFieldParams fields []) Map.toPermalink
|
||||
|
||||
/// Get all complete pages for the given web log
|
||||
let findFullByWebLog webLogId = backgroundTask {
|
||||
log.LogTrace "Page.findFullByWebLog"
|
||||
let! pages = Document.findByWebLog<Page> Table.Page webLogId conn
|
||||
let! pages = conn.findByFields<Page> Table.Page Any [ webLogField webLogId ]
|
||||
let! withRevs = pages |> List.map appendPageRevisions |> Task.WhenAll
|
||||
return List.ofArray withRevs
|
||||
}
|
||||
@ -130,18 +133,17 @@ type SQLitePageData(conn: SqliteConnection, log: ILogger) =
|
||||
/// Get all listed pages for the given web log (without revisions or text)
|
||||
let findListed webLogId =
|
||||
log.LogTrace "Page.findListed"
|
||||
let fields = [ webLogField webLogId; Field.Equal pgListName true ]
|
||||
conn.customList
|
||||
$"""{Document.Query.selectByWebLog Table.Page} AND {Query.whereByField (Field.EQ pgListName "") "true"}
|
||||
ORDER BY LOWER({titleField})"""
|
||||
[ webLogParam webLogId ]
|
||||
(fun rdr -> { fromData<Page> rdr with Text = "" })
|
||||
(sortedPages fields) (addFieldParams fields []) (fun rdr -> { fromData<Page> rdr with Text = "" })
|
||||
|
||||
/// Get a page of pages for the given web log (without revisions)
|
||||
let findPageOfPages webLogId pageNbr =
|
||||
log.LogTrace "Page.findPageOfPages"
|
||||
let field = [ webLogField webLogId ]
|
||||
conn.customList
|
||||
$"{Document.Query.selectByWebLog Table.Page} ORDER BY LOWER({titleField}) LIMIT @pageSize OFFSET @toSkip"
|
||||
[ webLogParam webLogId; SqliteParameter("@pageSize", 26); SqliteParameter("@toSkip", (pageNbr - 1) * 25) ]
|
||||
$"{sortedPages field} LIMIT @pageSize OFFSET @toSkip"
|
||||
(addFieldParams field [ sqlParam "@pageSize" 26; sqlParam "@toSkip" ((pageNbr - 1) * 25) ])
|
||||
(fun rdr -> { pageWithoutLinks rdr with Metadata = [] })
|
||||
|
||||
/// Update a page
|
||||
|
@ -16,7 +16,7 @@ type SQLitePostData(conn: SqliteConnection, log: ILogger) =
|
||||
let linkName = nameof Post.Empty.Permalink
|
||||
|
||||
/// The JSON field for when the post was published
|
||||
let publishField = $"data ->> '{nameof Post.Empty.PublishedOn}'"
|
||||
let publishName = nameof Post.Empty.PublishedOn
|
||||
|
||||
/// The name of the JSON field for the post's status
|
||||
let statName = nameof Post.Empty.Status
|
||||
@ -31,7 +31,10 @@ type SQLitePostData(conn: SqliteConnection, log: ILogger) =
|
||||
}
|
||||
|
||||
/// The SELECT statement to retrieve posts with a web log ID parameter
|
||||
let postByWebLog = Document.Query.selectByWebLog Table.Post
|
||||
let postByWebLog =
|
||||
Query.statementWhere
|
||||
(Query.find Table.Post)
|
||||
(Query.whereByFields Any [ { Field.Equal "WebLogId" "" with ParameterName = Some "@webLogId" } ])
|
||||
|
||||
/// Return a post with no revisions or prior permalinks
|
||||
let postWithoutLinks rdr =
|
||||
@ -43,7 +46,7 @@ type SQLitePostData(conn: SqliteConnection, log: ILogger) =
|
||||
|
||||
/// The SELECT statement to retrieve published posts with a web log ID parameter
|
||||
let publishedPostByWebLog =
|
||||
$"""{postByWebLog} AND {Query.whereByField (Field.EQ statName "") $"'{string Published}'"}"""
|
||||
$"{postByWebLog} AND data->>'{statName}' = '{string Published}'"
|
||||
|
||||
/// Update a post's revisions
|
||||
let updatePostRevisions (postId: PostId) oldRevs newRevs =
|
||||
@ -60,18 +63,16 @@ type SQLitePostData(conn: SqliteConnection, log: ILogger) =
|
||||
}
|
||||
|
||||
/// Count posts in a status for the given web log
|
||||
let countByStatus (status: PostStatus) webLogId =
|
||||
let countByStatus (status: PostStatus) webLogId = backgroundTask {
|
||||
log.LogTrace "Post.countByStatus"
|
||||
let statParam = Field.EQ statName (string status)
|
||||
conn.customScalar
|
||||
$"""{Document.Query.countByWebLog Table.Post} AND {Query.whereByField statParam "@status"}"""
|
||||
(addFieldParam "@status" statParam [ webLogParam webLogId ])
|
||||
(toCount >> int)
|
||||
let! count = conn.countByFields Table.Post All [ webLogField webLogId; Field.Equal statName (string status) ]
|
||||
return int count
|
||||
}
|
||||
|
||||
/// Find a post by its ID for the given web log (excluding revisions)
|
||||
let findById postId webLogId = backgroundTask {
|
||||
let findById (postId: PostId) webLogId = backgroundTask {
|
||||
log.LogTrace "Post.findById"
|
||||
match! Document.findByIdAndWebLog<PostId, Post> Table.Post postId webLogId conn with
|
||||
match! conn.findFirstByFields<Post> Table.Post All [ idField postId; webLogField webLogId ] with
|
||||
| Some post -> return Some { post with PriorPermalinks = [] }
|
||||
| None -> return None
|
||||
}
|
||||
@ -79,16 +80,14 @@ type SQLitePostData(conn: SqliteConnection, log: ILogger) =
|
||||
/// Find a post by its permalink for the given web log (excluding revisions)
|
||||
let findByPermalink (permalink: Permalink) webLogId =
|
||||
log.LogTrace "Post.findByPermalink"
|
||||
let linkParam = Field.EQ linkName (string permalink)
|
||||
let fields = [ webLogField webLogId; Field.Equal linkName (string permalink) ]
|
||||
conn.customSingle
|
||||
$"""{Document.Query.selectByWebLog Table.Post} AND {Query.whereByField linkParam "@link"}"""
|
||||
(addFieldParam "@link" linkParam [ webLogParam webLogId ])
|
||||
postWithoutLinks
|
||||
(Query.byFields (Query.find Table.Post) All fields) (addFieldParams fields []) postWithoutLinks
|
||||
|
||||
/// Find a complete post by its ID for the given web log
|
||||
let findFullById postId webLogId = backgroundTask {
|
||||
log.LogTrace "Post.findFullById"
|
||||
match! Document.findByIdAndWebLog<PostId, Post> Table.Post postId webLogId conn with
|
||||
match! conn.findFirstByFields<Post> Table.Post All [ idField postId; webLogField webLogId ] with
|
||||
| Some post ->
|
||||
let! post = appendPostRevisions post
|
||||
return Some post
|
||||
@ -101,10 +100,12 @@ type SQLitePostData(conn: SqliteConnection, log: ILogger) =
|
||||
match! findById postId webLogId with
|
||||
| Some _ ->
|
||||
do! conn.customNonQuery
|
||||
$"""DELETE FROM {Table.PostRevision} WHERE post_id = @id;
|
||||
DELETE FROM {Table.PostComment}
|
||||
WHERE {Query.whereByField (Field.EQ (nameof Comment.Empty.PostId) "") "@id"};
|
||||
{Query.Delete.byId Table.Post}"""
|
||||
$"""{Query.delete Table.PostRevision} WHERE post_id = @id;
|
||||
{Query.byFields
|
||||
(Query.delete Table.PostComment)
|
||||
Any
|
||||
[ { Field.EQ (nameof Comment.Empty.PostId) postId with ParameterName = Some "@id" }]};
|
||||
{Query.byId (Query.delete Table.Post) (string postId)}"""
|
||||
[ idParam postId ]
|
||||
return true
|
||||
| None -> return false
|
||||
@ -113,18 +114,18 @@ type SQLitePostData(conn: SqliteConnection, log: ILogger) =
|
||||
/// Find the current permalink from a list of potential prior permalinks for the given web log
|
||||
let findCurrentPermalink (permalinks: Permalink list) webLogId =
|
||||
log.LogTrace "Post.findCurrentPermalink"
|
||||
let linkSql, linkParams = inJsonArray Table.Post (nameof Post.Empty.PriorPermalinks) "link" permalinks
|
||||
conn.customSingle
|
||||
$"SELECT data ->> '{linkName}' AS permalink
|
||||
FROM {Table.Post}
|
||||
WHERE {Document.Query.whereByWebLog} AND {linkSql}"
|
||||
(webLogParam webLogId :: linkParams)
|
||||
Map.toPermalink
|
||||
let fields =
|
||||
[ webLogField webLogId
|
||||
Field.InArray (nameof Post.Empty.PriorPermalinks) Table.Post (List.map string permalinks) ]
|
||||
let query =
|
||||
(Query.statementWhere (Query.find Table.Post) (Query.whereByFields All fields))
|
||||
.Replace("SELECT data", $"SELECT data->>'{linkName}' AS permalink")
|
||||
conn.customSingle query (addFieldParams fields []) Map.toPermalink
|
||||
|
||||
/// Get all complete posts for the given web log
|
||||
let findFullByWebLog webLogId = backgroundTask {
|
||||
log.LogTrace "Post.findFullByWebLog"
|
||||
let! posts = Document.findByWebLog<Post> Table.Post webLogId conn
|
||||
let! posts = conn.findByFields<Post> Table.Post Any [ webLogField webLogId ]
|
||||
let! withRevs = posts |> List.map appendPostRevisions |> Task.WhenAll
|
||||
return List.ofArray withRevs
|
||||
}
|
||||
@ -132,21 +133,22 @@ type SQLitePostData(conn: SqliteConnection, log: ILogger) =
|
||||
/// Get a page of categorized posts for the given web log (excludes revisions)
|
||||
let findPageOfCategorizedPosts webLogId (categoryIds: CategoryId list) pageNbr postsPerPage =
|
||||
log.LogTrace "Post.findPageOfCategorizedPosts"
|
||||
let catSql, catParams = inJsonArray Table.Post (nameof Post.Empty.CategoryIds) "catId" categoryIds
|
||||
let catIdField = Field.InArray (nameof Post.Empty.CategoryIds) Table.Post (List.map string categoryIds)
|
||||
conn.customList
|
||||
$"{publishedPostByWebLog} AND {catSql}
|
||||
ORDER BY {publishField} DESC
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
(webLogParam webLogId :: catParams)
|
||||
$"""{publishedPostByWebLog} AND {Query.whereByFields Any [ catIdField ]}
|
||||
{Query.orderBy [ Field.Named $"{publishName} DESC" ] SQLite}
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"""
|
||||
(addFieldParams [ webLogField webLogId; catIdField ] [])
|
||||
postWithoutLinks
|
||||
|
||||
/// Get a page of posts for the given web log (excludes text and revisions)
|
||||
let findPageOfPosts webLogId pageNbr postsPerPage =
|
||||
log.LogTrace "Post.findPageOfPosts"
|
||||
let order =
|
||||
Query.orderBy
|
||||
[ Field.Named $"{publishName} DESC NULLS FIRST"; Field.Named (nameof Post.Empty.UpdatedOn) ] SQLite
|
||||
conn.customList
|
||||
$"{postByWebLog}
|
||||
ORDER BY {publishField} DESC NULLS FIRST, data ->> '{nameof Post.Empty.UpdatedOn}'
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
$"{postByWebLog}{order} LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
[ webLogParam webLogId ]
|
||||
postWithoutText
|
||||
|
||||
@ -154,36 +156,39 @@ type SQLitePostData(conn: SqliteConnection, log: ILogger) =
|
||||
let findPageOfPublishedPosts webLogId pageNbr postsPerPage =
|
||||
log.LogTrace "Post.findPageOfPublishedPosts"
|
||||
conn.customList
|
||||
$"{publishedPostByWebLog}
|
||||
ORDER BY {publishField} DESC
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
$"""{publishedPostByWebLog}
|
||||
{Query.orderBy [ Field.Named $"{publishName} DESC" ] SQLite}
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"""
|
||||
[ webLogParam webLogId ]
|
||||
postWithoutLinks
|
||||
|
||||
/// Get a page of tagged posts for the given web log (excludes revisions)
|
||||
let findPageOfTaggedPosts webLogId (tag : string) pageNbr postsPerPage =
|
||||
log.LogTrace "Post.findPageOfTaggedPosts"
|
||||
let tagSql, tagParams = inJsonArray Table.Post (nameof Post.Empty.Tags) "tag" [ tag ]
|
||||
let tagField = Field.InArray (nameof Post.Empty.Tags) Table.Post [ tag ]
|
||||
conn.customList
|
||||
$"{publishedPostByWebLog} AND {tagSql}
|
||||
ORDER BY {publishField} DESC
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
(webLogParam webLogId :: tagParams)
|
||||
$"""{publishedPostByWebLog} AND {Query.whereByFields Any [ tagField ]}
|
||||
{Query.orderBy [ Field.Named $"{publishName} DESC" ] SQLite}
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"""
|
||||
(addFieldParams [ webLogField webLogId; tagField ] [])
|
||||
postWithoutLinks
|
||||
|
||||
/// Find the next newest and oldest post from a publish date for the given web log
|
||||
let findSurroundingPosts webLogId (publishedOn : Instant) = backgroundTask {
|
||||
log.LogTrace "Post.findSurroundingPosts"
|
||||
let! older =
|
||||
let adjacent op order =
|
||||
let fields = [
|
||||
webLogField webLogId
|
||||
Field.Equal (nameof Post.Empty.Status) (string Published)
|
||||
(if op = "<" then Field.Less else Field.Greater) publishName (instantParam publishedOn)
|
||||
]
|
||||
conn.customSingle
|
||||
$"{publishedPostByWebLog} AND {publishField} < @publishedOn ORDER BY {publishField} DESC LIMIT 1"
|
||||
[ webLogParam webLogId; SqliteParameter("@publishedOn", instantParam publishedOn) ]
|
||||
postWithoutLinks
|
||||
let! newer =
|
||||
conn.customSingle
|
||||
$"{publishedPostByWebLog} AND {publishField} > @publishedOn ORDER BY {publishField} LIMIT 1"
|
||||
[ webLogParam webLogId; SqliteParameter("@publishedOn", instantParam publishedOn) ]
|
||||
(Query.byFields (Query.find Table.Post) All fields
|
||||
+ Query.orderBy [ Field.Named (publishName + order) ] SQLite + " LIMIT 1")
|
||||
(addFieldParams fields [])
|
||||
postWithoutLinks
|
||||
let! older = adjacent "<" " DESC"
|
||||
let! newer = adjacent ">" ""
|
||||
return older, newer
|
||||
}
|
||||
|
||||
|
@ -11,9 +11,9 @@ open MyWebLog.Data
|
||||
type SQLiteTagMapData(conn: SqliteConnection, log: ILogger) =
|
||||
|
||||
/// Find a tag mapping by its ID for the given web log
|
||||
let findById tagMapId webLogId =
|
||||
let findById (tagMapId: TagMapId) webLogId =
|
||||
log.LogTrace "TagMap.findById"
|
||||
Document.findByIdAndWebLog<TagMapId, TagMap> Table.TagMap tagMapId webLogId conn
|
||||
conn.findFirstByFields<TagMap> Table.TagMap All [ idField tagMapId; webLogField webLogId ]
|
||||
|
||||
/// Delete a tag mapping for the given web log
|
||||
let delete tagMapId webLogId = backgroundTask {
|
||||
@ -28,25 +28,18 @@ type SQLiteTagMapData(conn: SqliteConnection, log: ILogger) =
|
||||
/// Find a tag mapping by its URL value for the given web log
|
||||
let findByUrlValue (urlValue: string) webLogId =
|
||||
log.LogTrace "TagMap.findByUrlValue"
|
||||
let urlParam = Field.EQ (nameof TagMap.Empty.UrlValue) urlValue
|
||||
conn.customSingle
|
||||
$"""{Document.Query.selectByWebLog Table.TagMap} AND {Query.whereByField urlParam "@urlValue"}"""
|
||||
(addFieldParam "@urlValue" urlParam [ webLogParam webLogId ])
|
||||
fromData<TagMap>
|
||||
conn.findFirstByFields<TagMap>
|
||||
Table.TagMap All [ webLogField webLogId; Field.Equal (nameof TagMap.Empty.UrlValue) urlValue ]
|
||||
|
||||
/// Get all tag mappings for the given web log
|
||||
let findByWebLog webLogId =
|
||||
log.LogTrace "TagMap.findByWebLog"
|
||||
Document.findByWebLog<TagMap> Table.TagMap webLogId conn
|
||||
conn.findByFields<TagMap> Table.TagMap Any [ webLogField webLogId ]
|
||||
|
||||
/// Find any tag mappings in a list of tags for the given web log
|
||||
let findMappingForTags (tags: string list) webLogId =
|
||||
log.LogTrace "TagMap.findMappingForTags"
|
||||
let mapSql, mapParams = inClause $"AND data ->> '{nameof TagMap.Empty.Tag}'" "tag" id tags
|
||||
conn.customList
|
||||
$"{Document.Query.selectByWebLog Table.TagMap} {mapSql}"
|
||||
(webLogParam webLogId :: mapParams)
|
||||
fromData<TagMap>
|
||||
conn.findByFields<TagMap> Table.TagMap All [ webLogField webLogId; Field.In (nameof TagMap.Empty.Tag) tags ]
|
||||
|
||||
/// Save a tag mapping
|
||||
let save (tagMap: TagMap) =
|
||||
|
@ -10,8 +10,8 @@ open MyWebLog.Data
|
||||
/// SQLite myWebLog theme data implementation
|
||||
type SQLiteThemeData(conn : SqliteConnection, log: ILogger) =
|
||||
|
||||
/// The JSON field for the theme ID
|
||||
let idField = $"data ->> '{nameof Theme.Empty.Id}'"
|
||||
/// The name of the theme ID field
|
||||
let idName = nameof Theme.Empty.Id
|
||||
|
||||
/// Convert a document to a theme with no template text
|
||||
let withoutTemplateText (rdr: SqliteDataReader) =
|
||||
@ -25,9 +25,10 @@ type SQLiteThemeData(conn : SqliteConnection, log: ILogger) =
|
||||
/// Retrieve all themes (except 'admin'; excludes template text)
|
||||
let all () =
|
||||
log.LogTrace "Theme.all"
|
||||
let fields = [ Field.NE idName "admin" ]
|
||||
conn.customList
|
||||
$"{Query.selectFromTable Table.Theme} WHERE {idField} <> 'admin' ORDER BY {idField}"
|
||||
[]
|
||||
(Query.byFields (Query.find Table.Theme) Any fields + Query.orderBy [ Field.Named idName ] SQLite)
|
||||
(addFieldParams fields [])
|
||||
withoutTemplateText
|
||||
|
||||
/// Does a given theme exist?
|
||||
@ -43,7 +44,7 @@ type SQLiteThemeData(conn : SqliteConnection, log: ILogger) =
|
||||
/// Find a theme by its ID (excludes the text of templates)
|
||||
let findByIdWithoutText (themeId: ThemeId) =
|
||||
log.LogTrace "Theme.findByIdWithoutText"
|
||||
conn.customSingle (Query.Find.byId Table.Theme) [ idParam themeId ] withoutTemplateText
|
||||
conn.customSingle (Query.byId (Query.find Table.Theme) (string themeId)) [ idParam themeId ] withoutTemplateText
|
||||
|
||||
/// Delete a theme by its ID
|
||||
let delete themeId = backgroundTask {
|
||||
@ -51,7 +52,8 @@ type SQLiteThemeData(conn : SqliteConnection, log: ILogger) =
|
||||
match! findByIdWithoutText themeId with
|
||||
| Some _ ->
|
||||
do! conn.customNonQuery
|
||||
$"DELETE FROM {Table.ThemeAsset} WHERE theme_id = @id; {Query.Delete.byId Table.Theme}"
|
||||
$"{Query.delete Table.ThemeAsset} WHERE theme_id = @id;
|
||||
{Query.byId (Query.delete Table.Theme) (string themeId)}"
|
||||
[ idParam themeId ]
|
||||
return true
|
||||
| None -> return false
|
||||
@ -89,7 +91,7 @@ type SQLiteThemeAssetData(conn : SqliteConnection, log: ILogger) =
|
||||
/// Delete all assets for the given theme
|
||||
let deleteByTheme (themeId: ThemeId) =
|
||||
log.LogTrace "ThemeAsset.deleteByTheme"
|
||||
conn.customNonQuery $"DELETE FROM {Table.ThemeAsset} WHERE theme_id = @id" [ idParam themeId ]
|
||||
conn.customNonQuery $"{Query.delete Table.ThemeAsset} WHERE theme_id = @id" [ idParam themeId ]
|
||||
|
||||
/// Find a theme asset by its ID
|
||||
let findById assetId =
|
||||
|
@ -9,45 +9,46 @@ open MyWebLog.Data
|
||||
|
||||
/// SQLite myWebLog web log data implementation
|
||||
type SQLiteWebLogData(conn: SqliteConnection, log: ILogger) =
|
||||
|
||||
|
||||
/// Add a web log
|
||||
let add webLog =
|
||||
log.LogTrace "WebLog.add"
|
||||
conn.insert<WebLog> Table.WebLog webLog
|
||||
|
||||
|
||||
/// Retrieve all web logs
|
||||
let all () =
|
||||
log.LogTrace "WebLog.all"
|
||||
conn.findAll<WebLog> Table.WebLog
|
||||
|
||||
|
||||
/// Delete a web log by its ID
|
||||
let delete webLogId =
|
||||
log.LogTrace "WebLog.delete"
|
||||
let webLogMatches = Query.whereByField (Field.EQ "WebLogId" "") "@webLogId"
|
||||
let subQuery table = $"(SELECT data ->> 'Id' FROM {table} WHERE {webLogMatches})"
|
||||
let webLogMatches =
|
||||
Query.whereByFields Any [ { Field.Equal "WebLogId" "" with ParameterName = Some "@webLogId" } ]
|
||||
let subQuery table = $"(SELECT data->>'Id' FROM {table} WHERE {webLogMatches})"
|
||||
Custom.nonQuery
|
||||
$"""DELETE FROM {Table.PostComment} WHERE data ->> 'PostId' IN {subQuery Table.Post};
|
||||
DELETE FROM {Table.PostRevision} WHERE post_id IN {subQuery Table.Post};
|
||||
DELETE FROM {Table.PageRevision} WHERE page_id IN {subQuery Table.Page};
|
||||
DELETE FROM {Table.Post} WHERE {webLogMatches};
|
||||
DELETE FROM {Table.Page} WHERE {webLogMatches};
|
||||
DELETE FROM {Table.Category} WHERE {webLogMatches};
|
||||
DELETE FROM {Table.TagMap} WHERE {webLogMatches};
|
||||
DELETE FROM {Table.Upload} WHERE web_log_id = @webLogId;
|
||||
DELETE FROM {Table.WebLogUser} WHERE {webLogMatches};
|
||||
DELETE FROM {Table.WebLog} WHERE {Query.whereById "@webLogId"}"""
|
||||
$"""{Query.delete Table.PostComment} WHERE data ->> 'PostId' IN {subQuery Table.Post};
|
||||
{Query.delete Table.PostRevision} WHERE post_id IN {subQuery Table.Post};
|
||||
{Query.delete Table.PageRevision} WHERE page_id IN {subQuery Table.Page};
|
||||
{Query.delete Table.Post} WHERE {webLogMatches};
|
||||
{Query.delete Table.Page} WHERE {webLogMatches};
|
||||
{Query.delete Table.Category} WHERE {webLogMatches};
|
||||
{Query.delete Table.TagMap} WHERE {webLogMatches};
|
||||
{Query.delete Table.WebLogUser} WHERE {webLogMatches};
|
||||
{Query.delete Table.Upload} WHERE web_log_id = @webLogId;
|
||||
{Query.delete Table.WebLog} WHERE data->>'Id' = @webLogId"""
|
||||
[ webLogParam webLogId ]
|
||||
|
||||
|
||||
/// Find a web log by its host (URL base)
|
||||
let findByHost (url: string) =
|
||||
log.LogTrace "WebLog.findByHost"
|
||||
conn.findFirstByField<WebLog> Table.WebLog (Field.EQ (nameof WebLog.Empty.UrlBase) url)
|
||||
|
||||
conn.findFirstByFields<WebLog> Table.WebLog Any [ Field.Equal (nameof WebLog.Empty.UrlBase) url ]
|
||||
|
||||
/// Find a web log by its ID
|
||||
let findById webLogId =
|
||||
log.LogTrace "WebLog.findById"
|
||||
conn.findById<WebLogId, WebLog> Table.WebLog webLogId
|
||||
|
||||
|
||||
/// Update redirect rules for a web log
|
||||
let updateRedirectRules (webLog: WebLog) =
|
||||
log.LogTrace "WebLog.updateRedirectRules"
|
||||
@ -57,12 +58,12 @@ type SQLiteWebLogData(conn: SqliteConnection, log: ILogger) =
|
||||
let updateRssOptions (webLog: WebLog) =
|
||||
log.LogTrace "WebLog.updateRssOptions"
|
||||
conn.patchById Table.WebLog webLog.Id {| Rss = webLog.Rss |}
|
||||
|
||||
|
||||
/// Update settings for a web log
|
||||
let updateSettings (webLog: WebLog) =
|
||||
log.LogTrace "WebLog.updateSettings"
|
||||
conn.updateById Table.WebLog webLog.Id webLog
|
||||
|
||||
|
||||
interface IWebLogData with
|
||||
member _.Add webLog = add webLog
|
||||
member _.All () = all ()
|
||||
|
@ -16,17 +16,18 @@ type SQLiteWebLogUserData(conn: SqliteConnection, log: ILogger) =
|
||||
conn.insert<WebLogUser> Table.WebLogUser user
|
||||
|
||||
/// Find a user by their ID for the given web log
|
||||
let findById userId webLogId =
|
||||
let findById (userId: WebLogUserId) webLogId =
|
||||
log.LogTrace "WebLogUser.findById"
|
||||
Document.findByIdAndWebLog<WebLogUserId, WebLogUser> Table.WebLogUser userId webLogId conn
|
||||
conn.findFirstByFields<WebLogUser> Table.WebLogUser All [ idField userId; webLogField webLogId ]
|
||||
|
||||
/// Delete a user if they have no posts or pages
|
||||
let delete userId webLogId = backgroundTask {
|
||||
log.LogTrace "WebLogUser.delete"
|
||||
match! findById userId webLogId with
|
||||
| Some _ ->
|
||||
let! pageCount = conn.countByField Table.Page (Field.EQ (nameof Page.Empty.AuthorId) (string userId))
|
||||
let! postCount = conn.countByField Table.Post (Field.EQ (nameof Post.Empty.AuthorId) (string userId))
|
||||
let author = [ Field.Equal (nameof Page.Empty.AuthorId) (string userId) ]
|
||||
let! pageCount = conn.countByFields Table.Page Any author
|
||||
let! postCount = conn.countByFields Table.Post Any author
|
||||
if pageCount + postCount > 0 then
|
||||
return Error "User has pages or posts; cannot delete"
|
||||
else
|
||||
@ -38,27 +39,24 @@ type SQLiteWebLogUserData(conn: SqliteConnection, log: ILogger) =
|
||||
/// Find a user by their e-mail address for the given web log
|
||||
let findByEmail (email: string) webLogId =
|
||||
log.LogTrace "WebLogUser.findByEmail"
|
||||
let emailParam = Field.EQ (nameof WebLogUser.Empty.Email) email
|
||||
conn.customSingle
|
||||
$"""{Document.Query.selectByWebLog Table.WebLogUser}
|
||||
AND {Query.whereByField emailParam "@email"}"""
|
||||
(addFieldParam "@email" emailParam [ webLogParam webLogId ])
|
||||
fromData<WebLogUser>
|
||||
conn.findFirstByFields
|
||||
Table.WebLogUser All [ webLogField webLogId; Field.Equal (nameof WebLogUser.Empty.Email) email ]
|
||||
|
||||
/// Get all users for the given web log
|
||||
let findByWebLog webLogId = backgroundTask {
|
||||
log.LogTrace "WebLogUser.findByWebLog"
|
||||
let! users = Document.findByWebLog<WebLogUser> Table.WebLogUser webLogId conn
|
||||
let! users = conn.findByFields<WebLogUser> Table.WebLogUser Any [ webLogField webLogId ]
|
||||
return users |> List.sortBy _.PreferredName.ToLowerInvariant()
|
||||
}
|
||||
|
||||
/// Find the names of users by their IDs for the given web log
|
||||
let findNames webLogId (userIds: WebLogUserId list) =
|
||||
log.LogTrace "WebLogUser.findNames"
|
||||
let nameSql, nameParams = inClause $"AND data ->> '{nameof WebLogUser.Empty.Id}'" "id" string userIds
|
||||
let fields = [ webLogField webLogId; Field.In (nameof WebLogUser.Empty.Id) (List.map string userIds) ]
|
||||
let query = Query.statementWhere (Query.find Table.WebLogUser) (Query.whereByFields All fields)
|
||||
conn.customList
|
||||
$"{Document.Query.selectByWebLog Table.WebLogUser} {nameSql}"
|
||||
(webLogParam webLogId :: nameParams)
|
||||
query
|
||||
(addFieldParams fields [])
|
||||
(fun rdr ->
|
||||
let user = fromData<WebLogUser> rdr
|
||||
{ Name = string user.Id; Value = user.DisplayName })
|
||||
|
@ -1,7 +1,6 @@
|
||||
namespace MyWebLog.Data
|
||||
|
||||
open System
|
||||
open System.Threading.Tasks
|
||||
open BitBadger.Documents
|
||||
open BitBadger.Documents.Sqlite
|
||||
open Microsoft.Data.Sqlite
|
||||
@ -13,115 +12,124 @@ open NodaTime
|
||||
|
||||
/// SQLite myWebLog data implementation
|
||||
type SQLiteData(conn: SqliteConnection, log: ILogger<SQLiteData>, ser: JsonSerializer) =
|
||||
|
||||
|
||||
/// Create tables (and their associated indexes) if they do not exist
|
||||
let ensureTables () = backgroundTask {
|
||||
|
||||
|
||||
Configuration.useSerializer (Utils.createDocumentSerializer ser)
|
||||
|
||||
|
||||
let! tables = conn.customList "SELECT name FROM sqlite_master WHERE type = 'table'" [] _.GetString(0)
|
||||
|
||||
|
||||
let needsTable table =
|
||||
not (List.contains table tables)
|
||||
|
||||
let jsonTable table =
|
||||
$"{Query.Definition.ensureTable table}; {Query.Definition.ensureKey table}"
|
||||
|
||||
let tasks =
|
||||
seq {
|
||||
// Theme tables
|
||||
if needsTable Table.Theme then jsonTable Table.Theme
|
||||
if needsTable Table.ThemeAsset then
|
||||
|
||||
let creatingTable = "Creating {Table} table..."
|
||||
|
||||
// Theme tables
|
||||
if needsTable Table.Theme then
|
||||
log.LogInformation(creatingTable, Table.Theme)
|
||||
do! conn.ensureTable Table.Theme
|
||||
|
||||
if needsTable Table.ThemeAsset then
|
||||
log.LogInformation(creatingTable, Table.ThemeAsset)
|
||||
do! conn.customNonQuery
|
||||
$"CREATE TABLE {Table.ThemeAsset} (
|
||||
theme_id TEXT NOT NULL,
|
||||
path TEXT NOT NULL,
|
||||
updated_on TEXT NOT NULL,
|
||||
data BLOB NOT NULL,
|
||||
PRIMARY KEY (theme_id, path))"
|
||||
|
||||
// Web log table
|
||||
if needsTable Table.WebLog then jsonTable Table.WebLog
|
||||
|
||||
// Category table
|
||||
if needsTable Table.Category then
|
||||
$"""{jsonTable Table.Category};
|
||||
{Query.Definition.ensureIndexOn Table.Category "web_log" [ nameof Category.Empty.WebLogId ]}"""
|
||||
|
||||
// Web log user table
|
||||
if needsTable Table.WebLogUser then
|
||||
$"""{jsonTable Table.WebLogUser};
|
||||
{Query.Definition.ensureIndexOn
|
||||
Table.WebLogUser
|
||||
"email"
|
||||
[ nameof WebLogUser.Empty.WebLogId; nameof WebLogUser.Empty.Email ]}"""
|
||||
|
||||
// Page tables
|
||||
if needsTable Table.Page then
|
||||
$"""{jsonTable Table.Page};
|
||||
{Query.Definition.ensureIndexOn Table.Page "author" [ nameof Page.Empty.AuthorId ]};
|
||||
{Query.Definition.ensureIndexOn
|
||||
Table.Page "permalink" [ nameof Page.Empty.WebLogId; nameof Page.Empty.Permalink ]}"""
|
||||
if needsTable Table.PageRevision then
|
||||
PRIMARY KEY (theme_id, path))" []
|
||||
|
||||
// Web log table
|
||||
if needsTable Table.WebLog then
|
||||
log.LogInformation(creatingTable, Table.WebLog)
|
||||
do! conn.ensureTable Table.WebLog
|
||||
|
||||
// Category table
|
||||
if needsTable Table.Category then
|
||||
log.LogInformation(creatingTable, Table.Category)
|
||||
do! conn.ensureTable Table.Category
|
||||
do! conn.ensureFieldIndex Table.Category "web_log" [ nameof Category.Empty.WebLogId ]
|
||||
|
||||
// Web log user table
|
||||
if needsTable Table.WebLogUser then
|
||||
log.LogInformation(creatingTable, Table.WebLogUser)
|
||||
do! conn.ensureTable Table.WebLogUser
|
||||
do! conn.ensureFieldIndex
|
||||
Table.WebLogUser "email" [ nameof WebLogUser.Empty.WebLogId; nameof WebLogUser.Empty.Email ]
|
||||
|
||||
// Page tables
|
||||
if needsTable Table.Page then
|
||||
log.LogInformation(creatingTable, Table.Page)
|
||||
do! conn.ensureTable Table.Page
|
||||
do! conn.ensureFieldIndex Table.Page "author" [ nameof Page.Empty.AuthorId ]
|
||||
do! conn.ensureFieldIndex Table.Page "permalink" [ nameof Page.Empty.WebLogId; nameof Page.Empty.Permalink ]
|
||||
|
||||
if needsTable Table.PageRevision then
|
||||
log.LogInformation(creatingTable, Table.PageRevision)
|
||||
do! conn.customNonQuery
|
||||
$"CREATE TABLE {Table.PageRevision} (
|
||||
page_id TEXT NOT NULL,
|
||||
as_of TEXT NOT NULL,
|
||||
revision_text TEXT NOT NULL,
|
||||
PRIMARY KEY (page_id, as_of))"
|
||||
|
||||
// Post tables
|
||||
if needsTable Table.Post then
|
||||
$"""{jsonTable Table.Post};
|
||||
{Query.Definition.ensureIndexOn Table.Post "author" [ nameof Post.Empty.AuthorId ]};
|
||||
{Query.Definition.ensureIndexOn
|
||||
Table.Post "permalink" [ nameof Post.Empty.WebLogId; nameof Post.Empty.Permalink ]};
|
||||
{Query.Definition.ensureIndexOn
|
||||
Table.Post
|
||||
"status"
|
||||
[ nameof Post.Empty.WebLogId; nameof Post.Empty.Status; nameof Post.Empty.UpdatedOn ]}"""
|
||||
// TODO: index categories by post?
|
||||
if needsTable Table.PostRevision then
|
||||
PRIMARY KEY (page_id, as_of))" []
|
||||
|
||||
// Post tables
|
||||
if needsTable Table.Post then
|
||||
log.LogInformation(creatingTable, Table.Post)
|
||||
do! conn.ensureTable Table.Post
|
||||
do! conn.ensureFieldIndex Table.Post "author" [ nameof Post.Empty.AuthorId ]
|
||||
do! conn.ensureFieldIndex Table.Post "permalink" [ nameof Post.Empty.WebLogId; nameof Post.Empty.Permalink ]
|
||||
do! conn.ensureFieldIndex
|
||||
Table.Post
|
||||
"status"
|
||||
[ nameof Post.Empty.WebLogId; nameof Post.Empty.Status; nameof Post.Empty.UpdatedOn ]
|
||||
// TODO: index categories by post?
|
||||
|
||||
if needsTable Table.PostRevision then
|
||||
log.LogInformation(creatingTable, Table.PostRevision)
|
||||
do! conn.customNonQuery
|
||||
$"CREATE TABLE {Table.PostRevision} (
|
||||
post_id TEXT NOT NULL,
|
||||
as_of TEXT NOT NULL,
|
||||
revision_text TEXT NOT NULL,
|
||||
PRIMARY KEY (post_id, as_of))"
|
||||
if needsTable Table.PostComment then
|
||||
$"""{jsonTable Table.PostComment};
|
||||
{Query.Definition.ensureIndexOn Table.PostComment "post" [ nameof Comment.Empty.PostId ]}"""
|
||||
|
||||
// Tag map table
|
||||
if needsTable Table.TagMap then
|
||||
$"""{jsonTable Table.TagMap};
|
||||
{Query.Definition.ensureIndexOn
|
||||
Table.TagMap "url" [ nameof TagMap.Empty.WebLogId; nameof TagMap.Empty.UrlValue ]}"""
|
||||
|
||||
// Uploaded file table
|
||||
if needsTable Table.Upload then
|
||||
PRIMARY KEY (post_id, as_of))" []
|
||||
|
||||
if needsTable Table.PostComment then
|
||||
log.LogInformation(creatingTable, Table.PostComment)
|
||||
do! conn.ensureTable Table.PostComment
|
||||
do! conn.ensureFieldIndex Table.PostComment "post" [ nameof Comment.Empty.PostId ]
|
||||
|
||||
// Tag map table
|
||||
if needsTable Table.TagMap then
|
||||
log.LogInformation(creatingTable, Table.TagMap)
|
||||
do! conn.ensureTable Table.TagMap
|
||||
do! conn.ensureFieldIndex Table.TagMap "url" [ nameof TagMap.Empty.WebLogId; nameof TagMap.Empty.UrlValue ]
|
||||
|
||||
// Uploaded file table
|
||||
if needsTable Table.Upload then
|
||||
log.LogInformation(creatingTable, Table.Upload)
|
||||
do! conn.customNonQuery
|
||||
$"CREATE TABLE {Table.Upload} (
|
||||
id TEXT PRIMARY KEY,
|
||||
web_log_id TEXT NOT NULL,
|
||||
path TEXT NOT NULL,
|
||||
updated_on TEXT NOT NULL,
|
||||
data BLOB NOT NULL);
|
||||
CREATE INDEX idx_{Table.Upload}_path ON {Table.Upload} (web_log_id, path)"
|
||||
|
||||
// Database version table
|
||||
if needsTable Table.DbVersion then
|
||||
CREATE INDEX idx_{Table.Upload}_path ON {Table.Upload} (web_log_id, path)" []
|
||||
|
||||
// Database version table
|
||||
if needsTable Table.DbVersion then
|
||||
log.LogInformation(creatingTable, Table.DbVersion)
|
||||
do! conn.customNonQuery
|
||||
$"CREATE TABLE {Table.DbVersion} (id TEXT PRIMARY KEY);
|
||||
INSERT INTO {Table.DbVersion} VALUES ('{Utils.Migration.currentDbVersion}')"
|
||||
}
|
||||
|> Seq.map (fun sql ->
|
||||
log.LogInformation $"""Creating {(sql.Replace("IF NOT EXISTS ", "").Split ' ')[2]} table..."""
|
||||
conn.customNonQuery sql [])
|
||||
|
||||
let! _ = Task.WhenAll tasks
|
||||
()
|
||||
INSERT INTO {Table.DbVersion} VALUES ('{Utils.Migration.currentDbVersion}')" []
|
||||
}
|
||||
|
||||
|
||||
/// Set the database version to the specified version
|
||||
let setDbVersion version =
|
||||
conn.customNonQuery $"DELETE FROM {Table.DbVersion}; INSERT INTO {Table.DbVersion} VALUES ('%s{version}')" []
|
||||
|
||||
|
||||
/// Implement the changes between v2-rc1 and v2-rc2
|
||||
let migrateV2Rc1ToV2Rc2 () = backgroundTask {
|
||||
let logStep = Utils.Migration.logStep log "v2-rc1 to v2-rc2"
|
||||
@ -215,7 +223,7 @@ type SQLiteData(conn: SqliteConnection, log: ILogger<SQLiteData>, ser: JsonSeria
|
||||
|> Option.map (Utils.deserialize<Chapter list> ser)
|
||||
ChapterFile = Map.tryString "chapter_file" epRdr
|
||||
ChapterType = Map.tryString "chapter_type" epRdr
|
||||
ChapterWaypoints = None
|
||||
ChapterWaypoints = None
|
||||
TranscriptUrl = Map.tryString "transcript_url" epRdr
|
||||
TranscriptType = Map.tryString "transcript_type" epRdr
|
||||
TranscriptLang = Map.tryString "transcript_lang" epRdr
|
||||
@ -233,7 +241,7 @@ type SQLiteData(conn: SqliteConnection, log: ILogger<SQLiteData>, ser: JsonSeria
|
||||
cmd.Parameters.AddWithValue("@id", string postId) ] |> ignore
|
||||
let _ = cmd.ExecuteNonQuery()
|
||||
cmd.Parameters.Clear())
|
||||
|
||||
|
||||
logStep "Migrating dates/times"
|
||||
let inst (dt: DateTime) =
|
||||
DateTime(dt.Ticks, DateTimeKind.Utc)
|
||||
@ -400,10 +408,10 @@ type SQLiteData(conn: SqliteConnection, log: ILogger<SQLiteData>, ser: JsonSeria
|
||||
let _ = cmd.ExecuteNonQuery()
|
||||
())
|
||||
cmd.Parameters.Clear()
|
||||
|
||||
|
||||
conn.Close()
|
||||
conn.Open()
|
||||
|
||||
|
||||
logStep "Dropping old tables and columns"
|
||||
cmd.CommandText <-
|
||||
"ALTER TABLE web_log_user DROP COLUMN salt;
|
||||
@ -412,11 +420,11 @@ type SQLiteData(conn: SqliteConnection, log: ILogger<SQLiteData>, ser: JsonSeria
|
||||
DROP TABLE page_meta;
|
||||
DROP TABLE web_log_feed_podcast"
|
||||
do! write cmd
|
||||
|
||||
|
||||
logStep "Setting database version to v2-rc2"
|
||||
do! setDbVersion "v2-rc2"
|
||||
}
|
||||
|
||||
|
||||
/// Migrate from v2-rc2 to v2
|
||||
let migrateV2Rc2ToV2 () = backgroundTask {
|
||||
Utils.Migration.logStep log "v2-rc2 to v2" "Setting database version; no migration required"
|
||||
@ -435,7 +443,7 @@ type SQLiteData(conn: SqliteConnection, log: ILogger<SQLiteData>, ser: JsonSeria
|
||||
Utils.Migration.logStep log "v2.1 to v2.1.1" "Setting database version; no migration required"
|
||||
do! setDbVersion "v2.1.1"
|
||||
}
|
||||
|
||||
|
||||
/// Migrate from v2.1.1 to v2.2
|
||||
let migrateV2point1point1ToV2point2 () = backgroundTask {
|
||||
Utils.Migration.logStep log "v2.1.1 to v2.2" "Setting e-mail to lowercase"
|
||||
@ -444,6 +452,14 @@ type SQLiteData(conn: SqliteConnection, log: ILogger<SQLiteData>, ser: JsonSeria
|
||||
do! setDbVersion "v2.2"
|
||||
}
|
||||
|
||||
/// Migrate from v2.2 to v3
|
||||
let migrateV2point2ToV3 () = backgroundTask {
|
||||
Utils.Migration.logStep log "v2.2 to v3" "Adding auto-OpenGraph flag to all web logs"
|
||||
do! Patch.byFields Table.WebLog Any [ Field.Exists (nameof WebLog.Empty.Id) ] {| AutoOpenGraph = true |}
|
||||
Utils.Migration.logStep log "v2.2 to v3" "Setting database version to v3"
|
||||
do! setDbVersion "v3"
|
||||
}
|
||||
|
||||
/// Migrate data among versions (up only)
|
||||
let migrate version = backgroundTask {
|
||||
let mutable v = defaultArg version ""
|
||||
@ -451,33 +467,37 @@ type SQLiteData(conn: SqliteConnection, log: ILogger<SQLiteData>, ser: JsonSeria
|
||||
if v = "v2-rc1" then
|
||||
do! migrateV2Rc1ToV2Rc2 ()
|
||||
v <- "v2-rc2"
|
||||
|
||||
|
||||
if v = "v2-rc2" then
|
||||
do! migrateV2Rc2ToV2 ()
|
||||
v <- "v2"
|
||||
|
||||
|
||||
if v = "v2" then
|
||||
do! migrateV2ToV2point1 ()
|
||||
v <- "v2.1"
|
||||
|
||||
|
||||
if v = "v2.1" then
|
||||
do! migrateV2point1ToV2point1point1 ()
|
||||
v <- "v2.1.1"
|
||||
|
||||
|
||||
if v = "v2.1.1" then
|
||||
do! migrateV2point1point1ToV2point2 ()
|
||||
v <- "v2.2"
|
||||
|
||||
|
||||
if v = "v2.2" then
|
||||
do! migrateV2point2ToV3 ()
|
||||
v <- "v3"
|
||||
|
||||
if v <> Utils.Migration.currentDbVersion then
|
||||
log.LogWarning $"Unknown database version; assuming {Utils.Migration.currentDbVersion}"
|
||||
do! setDbVersion Utils.Migration.currentDbVersion
|
||||
}
|
||||
|
||||
|
||||
/// The connection for this instance
|
||||
member _.Conn = conn
|
||||
|
||||
|
||||
interface IData with
|
||||
|
||||
|
||||
member _.Category = SQLiteCategoryData (conn, ser, log)
|
||||
member _.Page = SQLitePageData (conn, log)
|
||||
member _.Post = SQLitePostData (conn, log)
|
||||
@ -487,9 +507,9 @@ type SQLiteData(conn: SqliteConnection, log: ILogger<SQLiteData>, ser: JsonSeria
|
||||
member _.Upload = SQLiteUploadData (conn, log)
|
||||
member _.WebLog = SQLiteWebLogData (conn, log)
|
||||
member _.WebLogUser = SQLiteWebLogUserData (conn, log)
|
||||
|
||||
|
||||
member _.Serializer = ser
|
||||
|
||||
|
||||
member _.StartUp () = backgroundTask {
|
||||
do! ensureTables ()
|
||||
let! version = conn.customSingle<string> $"SELECT id FROM {Table.DbVersion}" [] _.GetString(0)
|
||||
|
@ -1,11 +1,16 @@
|
||||
/// Utility functions for manipulating data
|
||||
/// <summary>Utility functions for manipulating data</summary>
|
||||
[<RequireQualifiedAccess>]
|
||||
module internal MyWebLog.Data.Utils
|
||||
|
||||
open MyWebLog
|
||||
open MyWebLog.ViewModels
|
||||
|
||||
/// Create a category hierarchy from the given list of categories
|
||||
/// <summary>Create a category hierarchy from the given list of categories</summary>
|
||||
/// <param name="cats">The categories from which the list should be generated</param>
|
||||
/// <param name="parentId">The ID of the parent category for this list</param>
|
||||
/// <param name="slugBase">The base URL to use in slugs for categories at this level</param>
|
||||
/// <param name="parentNames">The names of parent categories for this level</param>
|
||||
/// <returns>An array of <c>DisplayCategory</c> instances sorted alphabetically by parent category</returns>
|
||||
let rec orderByHierarchy (cats: Category list) parentId slugBase parentNames = seq {
|
||||
for cat in cats |> List.filter (fun c -> c.ParentId = parentId) do
|
||||
let fullSlug = (match slugBase with Some it -> $"{it}/" | None -> "") + cat.Slug
|
||||
@ -19,48 +24,75 @@ let rec orderByHierarchy (cats: Category list) parentId slugBase parentNames = s
|
||||
yield! orderByHierarchy cats (Some cat.Id) (Some fullSlug) ([ cat.Name ] |> List.append parentNames)
|
||||
}
|
||||
|
||||
/// Get lists of items removed from and added to the given lists
|
||||
/// <summary>Get lists of items removed from and added to the given lists</summary>
|
||||
/// <typeparam name="T">The type of items in the list</typeparam>
|
||||
/// <typeparam name="U">The return type of the comparision function</typeparam>
|
||||
/// <param name="oldItems">The prior list</param>
|
||||
/// <param name="newItems">The current list</param>
|
||||
/// <param name="f">The function to use when comparing items in the list</param>
|
||||
/// <returns>A tuple with <c>fst</c> being added items and <c>snd</c> being removed items</returns>
|
||||
let diffLists<'T, 'U when 'U: equality> oldItems newItems (f: 'T -> 'U) =
|
||||
let diff compList = fun item -> not (compList |> List.exists (fun other -> f item = f other))
|
||||
List.filter (diff newItems) oldItems, List.filter (diff oldItems) newItems
|
||||
|
||||
/// Find the revisions added and removed
|
||||
/// <summary>Find the revisions added and removed</summary>
|
||||
/// <param name="oldRevs">The previous revisions</param>
|
||||
/// <param name="newRevs">The current revisions</param>
|
||||
/// <returns>A tuple with <c>fst</c> being added revisions and <c>snd</c> being removed revisions</returns>
|
||||
let diffRevisions (oldRevs: Revision list) newRevs =
|
||||
diffLists oldRevs newRevs (fun rev -> $"{rev.AsOf.ToUnixTimeTicks()}|{rev.Text}")
|
||||
|
||||
open MyWebLog.Converters
|
||||
open Newtonsoft.Json
|
||||
|
||||
/// Serialize an object to JSON
|
||||
/// <summary>Serialize an object to JSON</summary>
|
||||
/// <typeparam name="T">The type of the item being serialized</typeparam>
|
||||
/// <param name="ser">The JSON serializer whose settings should be used</param>
|
||||
/// <param name="item">The item to be serialized</param>
|
||||
/// <returns>A string with the given object serialized to JSON</returns>
|
||||
let serialize<'T> ser (item: 'T) =
|
||||
JsonConvert.SerializeObject(item, Json.settings ser)
|
||||
|
||||
/// Deserialize a JSON string
|
||||
/// <summary>Deserialize a JSON string</summary>
|
||||
/// <typeparam name="T">The type of the item being deserialized</typeparam>
|
||||
/// <param name="ser">The JSON serializer whose settings should be used</param>
|
||||
/// <param name="value">The string with the JSON representation of the item</param>
|
||||
/// <returns>The item deserialized from JSON</returns>
|
||||
let deserialize<'T> (ser: JsonSerializer) value =
|
||||
JsonConvert.DeserializeObject<'T>(value, Json.settings ser)
|
||||
|
||||
|
||||
open BitBadger.Documents
|
||||
|
||||
/// Create a document serializer using the given JsonSerializer
|
||||
/// <summary>Create a document serializer using the given JsonSerializer</summary>
|
||||
/// <param name="ser">The JSON.NET serializer on which the document serializer should be based</param>
|
||||
/// <returns>A document serializer instance</returns>
|
||||
let createDocumentSerializer ser =
|
||||
{ new IDocumentSerializer with
|
||||
member _.Serialize<'T>(it: 'T) : string = serialize ser it
|
||||
member _.Deserialize<'T>(it: string) : 'T = deserialize ser it
|
||||
}
|
||||
|
||||
/// Data migration utilities
|
||||
/// <summary>Data migration utilities</summary>
|
||||
module Migration =
|
||||
|
||||
|
||||
open Microsoft.Extensions.Logging
|
||||
|
||||
/// The current database version
|
||||
let currentDbVersion = "v2.2"
|
||||
/// <summary>The current database version</summary>
|
||||
let currentDbVersion = "v3"
|
||||
|
||||
/// Log a migration step
|
||||
/// <summary>Log a migration step</summary>
|
||||
/// <param name="log">The logger to which the message should be logged</param>
|
||||
/// <param name="migration">The migration being run</param>
|
||||
/// <param name="message">The log message</param>
|
||||
let logStep<'T> (log: ILogger<'T>) migration message =
|
||||
log.LogInformation $"Migrating %s{migration}: %s{message}"
|
||||
|
||||
/// Notify the user that a backup/restore
|
||||
/// <summary>Notify the user that a backup/restore is required to migrate</summary>
|
||||
/// <param name="log">The logger to which the message should be logged</param>
|
||||
/// <param name="oldVersion">The old (current) version of the database</param>
|
||||
/// <param name="newVersion">The new (application) version required</param>
|
||||
/// <param name="webLogs">All web logs contained in the database</param>
|
||||
let backupAndRestoreRequired log oldVersion newVersion webLogs =
|
||||
logStep log $"%s{oldVersion} to %s{newVersion}" "Requires Using Action"
|
||||
|
||||
@ -74,7 +106,6 @@ module Migration =
|
||||
yield! webLogs |> List.map (fun (url, slug) -> $"./myWebLog backup %s{url} {oldVersion}.%s{slug}.json") ]
|
||||
|> String.concat "\n"
|
||||
|> log.LogWarning
|
||||
|
||||
|
||||
log.LogCritical "myWebLog will now exit"
|
||||
exit 1 |> ignore
|
||||
|
@ -3,29 +3,29 @@
|
||||
open MyWebLog
|
||||
open NodaTime
|
||||
|
||||
/// A category under which a post may be identified
|
||||
/// <summary>A category under which a post may be identified</summary>
|
||||
[<CLIMutable; NoComparison; NoEquality>]
|
||||
type Category = {
|
||||
/// The ID of the category
|
||||
/// <summary>The ID of the category</summary>
|
||||
Id: CategoryId
|
||||
|
||||
/// The ID of the web log to which the category belongs
|
||||
/// <summary>The ID of the web log to which the category belongs</summary>
|
||||
WebLogId: WebLogId
|
||||
|
||||
/// The displayed name
|
||||
/// <summary>The displayed name</summary>
|
||||
Name: string
|
||||
|
||||
/// The slug (used in category URLs)
|
||||
/// <summary>The slug (used in category URLs)</summary>
|
||||
Slug: string
|
||||
|
||||
/// A longer description of the category
|
||||
/// <summary>A longer description of the category</summary>
|
||||
Description: string option
|
||||
|
||||
/// The parent ID of this category (if a subcategory)
|
||||
/// <summary>The parent ID of this category (if a subcategory)</summary>
|
||||
ParentId: CategoryId option
|
||||
} with
|
||||
|
||||
/// An empty category
|
||||
|
||||
/// <summary>An empty category</summary>
|
||||
static member Empty =
|
||||
{ Id = CategoryId.Empty
|
||||
WebLogId = WebLogId.Empty
|
||||
@ -35,38 +35,38 @@ type Category = {
|
||||
ParentId = None }
|
||||
|
||||
|
||||
/// A comment on a post
|
||||
/// <summary>A comment on a post</summary>
|
||||
[<CLIMutable; NoComparison; NoEquality>]
|
||||
type Comment = {
|
||||
/// The ID of the comment
|
||||
/// <summary>The ID of the comment</summary>
|
||||
Id: CommentId
|
||||
|
||||
/// The ID of the post to which this comment applies
|
||||
/// <summary>The ID of the post to which this comment applies</summary>
|
||||
PostId: PostId
|
||||
|
||||
/// The ID of the comment to which this comment is a reply
|
||||
/// <summary>The ID of the comment to which this comment is a reply</summary>
|
||||
InReplyToId: CommentId option
|
||||
|
||||
/// The name of the commentor
|
||||
/// <summary>The name of the commentor</summary>
|
||||
Name: string
|
||||
|
||||
/// The e-mail address of the commentor
|
||||
/// <summary>The e-mail address of the commentor</summary>
|
||||
Email: string
|
||||
|
||||
/// The URL of the commentor's personal website
|
||||
/// <summary>The URL of the commentor's personal website</summary>
|
||||
Url: string option
|
||||
|
||||
/// The status of the comment
|
||||
/// <summary>The status of the comment</summary>
|
||||
Status: CommentStatus
|
||||
|
||||
/// When the comment was posted
|
||||
/// <summary>When the comment was posted</summary>
|
||||
PostedOn: Instant
|
||||
|
||||
/// The text of the comment
|
||||
/// <summary>The text of the comment</summary>
|
||||
Text: string
|
||||
} with
|
||||
|
||||
/// An empty comment
|
||||
|
||||
/// <summary>An empty comment</summary>
|
||||
static member Empty =
|
||||
{ Id = CommentId.Empty
|
||||
PostId = PostId.Empty
|
||||
@ -79,50 +79,53 @@ type Comment = {
|
||||
Text = "" }
|
||||
|
||||
|
||||
/// A page (text not associated with a date/time)
|
||||
/// <summary>A page (text not associated with a date/time)</summary>
|
||||
[<CLIMutable; NoComparison; NoEquality>]
|
||||
type Page = {
|
||||
/// The ID of this page
|
||||
/// <summary>The ID of this page</summary>
|
||||
Id: PageId
|
||||
|
||||
/// The ID of the web log to which this page belongs
|
||||
/// <summary>The ID of the web log to which this page belongs</summary>
|
||||
WebLogId: WebLogId
|
||||
|
||||
/// The ID of the author of this page
|
||||
/// <summary>The ID of the author of this page</summary>
|
||||
AuthorId: WebLogUserId
|
||||
|
||||
/// The title of the page
|
||||
/// <summary>The title of the page</summary>
|
||||
Title: string
|
||||
|
||||
/// The link at which this page is displayed
|
||||
/// <summary>The link at which this page is displayed</summary>
|
||||
Permalink: Permalink
|
||||
|
||||
/// When this page was published
|
||||
/// <summary>When this page was published</summary>
|
||||
PublishedOn: Instant
|
||||
|
||||
/// When this page was last updated
|
||||
/// <summary>When this page was last updated</summary>
|
||||
UpdatedOn: Instant
|
||||
|
||||
/// Whether this page shows as part of the web log's navigation
|
||||
/// <summary>Whether this page shows as part of the web log's navigation</summary>
|
||||
IsInPageList: bool
|
||||
|
||||
/// The template to use when rendering this page
|
||||
/// <summary>The template to use when rendering this page</summary>
|
||||
Template: string option
|
||||
|
||||
/// The current text of the page
|
||||
/// <summary>The current text of the page</summary>
|
||||
Text: string
|
||||
|
||||
/// Metadata for this page
|
||||
/// <summary>Metadata for this page</summary>
|
||||
Metadata: MetaItem list
|
||||
|
||||
/// Permalinks at which this page may have been previously served (useful for migrated content)
|
||||
|
||||
/// <summary>Permalinks at which this page may have been previously served (useful for migrated content)</summary>
|
||||
PriorPermalinks: Permalink list
|
||||
|
||||
/// Revisions of this page
|
||||
/// <summary>Revisions of this page</summary>
|
||||
Revisions: Revision list
|
||||
|
||||
/// <summary>Common OpenGraph information for this post</summary>
|
||||
OpenGraph: OpenGraphProperties option
|
||||
} with
|
||||
|
||||
/// An empty page
|
||||
|
||||
/// <summary>An empty page</summary>
|
||||
static member Empty =
|
||||
{ Id = PageId.Empty
|
||||
WebLogId = WebLogId.Empty
|
||||
@ -136,62 +139,66 @@ type Page = {
|
||||
Text = ""
|
||||
Metadata = []
|
||||
PriorPermalinks = []
|
||||
Revisions = [] }
|
||||
Revisions = []
|
||||
OpenGraph = None }
|
||||
|
||||
|
||||
/// A web log post
|
||||
/// <summary>A web log post</summary>
|
||||
[<CLIMutable; NoComparison; NoEquality>]
|
||||
type Post = {
|
||||
/// The ID of this post
|
||||
/// <summary>The ID of this post</summary>
|
||||
Id: PostId
|
||||
|
||||
/// The ID of the web log to which this post belongs
|
||||
/// <summary>The ID of the web log to which this post belongs</summary>
|
||||
WebLogId: WebLogId
|
||||
|
||||
/// The ID of the author of this post
|
||||
/// <summary>The ID of the author of this post</summary>
|
||||
AuthorId: WebLogUserId
|
||||
|
||||
/// The status
|
||||
/// <summary>The status</summary>
|
||||
Status: PostStatus
|
||||
|
||||
/// The title
|
||||
/// <summary>The title</summary>
|
||||
Title: string
|
||||
|
||||
/// The link at which the post resides
|
||||
/// <summary>The link at which the post resides</summary>
|
||||
Permalink: Permalink
|
||||
|
||||
/// The instant on which the post was originally published
|
||||
/// <summary>The instant on which the post was originally published</summary>
|
||||
PublishedOn: Instant option
|
||||
|
||||
/// The instant on which the post was last updated
|
||||
/// <summary>The instant on which the post was last updated</summary>
|
||||
UpdatedOn: Instant
|
||||
|
||||
/// The template to use in displaying the post
|
||||
/// <summary>The template to use in displaying the post</summary>
|
||||
Template: string option
|
||||
|
||||
/// The text of the post in HTML (ready to display) format
|
||||
|
||||
/// <summary>The text of the post in HTML (ready to display) format</summary>
|
||||
Text: string
|
||||
|
||||
/// The Ids of the categories to which this is assigned
|
||||
/// <summary>The Ids of the categories to which this is assigned</summary>
|
||||
CategoryIds: CategoryId list
|
||||
|
||||
/// The tags for the post
|
||||
/// <summary>The tags for the post</summary>
|
||||
Tags: string list
|
||||
|
||||
/// Podcast episode information for this post
|
||||
/// <summary>Podcast episode information for this post</summary>
|
||||
Episode: Episode option
|
||||
|
||||
/// Metadata for the post
|
||||
|
||||
/// <summary>Metadata for the post</summary>
|
||||
Metadata: MetaItem list
|
||||
|
||||
/// Permalinks at which this post may have been previously served (useful for migrated content)
|
||||
|
||||
/// <summary>Permalinks at which this post may have been previously served (useful for migrated content)</summary>
|
||||
PriorPermalinks: Permalink list
|
||||
|
||||
/// The revisions for this post
|
||||
/// <summary>The revisions for this post</summary>
|
||||
Revisions: Revision list
|
||||
|
||||
/// <summary>OpenGraph information for this post</summary>
|
||||
OpenGraph: OpenGraphProperties option
|
||||
} with
|
||||
|
||||
/// An empty post
|
||||
|
||||
/// <summary>An empty post</summary>
|
||||
static member Empty =
|
||||
{ Id = PostId.Empty
|
||||
WebLogId = WebLogId.Empty
|
||||
@ -208,139 +215,145 @@ type Post = {
|
||||
Episode = None
|
||||
Metadata = []
|
||||
PriorPermalinks = []
|
||||
Revisions = [] }
|
||||
Revisions = []
|
||||
OpenGraph = None }
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// A mapping between a tag and its URL value, used to translate restricted characters (ex. "#1" -> "number-1")
|
||||
/// </summary>
|
||||
[<CLIMutable; NoComparison; NoEquality>]
|
||||
type TagMap = {
|
||||
/// The ID of this tag mapping
|
||||
/// <summary>The ID of this tag mapping</summary>
|
||||
Id: TagMapId
|
||||
|
||||
/// The ID of the web log to which this tag mapping belongs
|
||||
|
||||
/// <summary>The ID of the web log to which this tag mapping belongs</summary>
|
||||
WebLogId: WebLogId
|
||||
|
||||
/// The tag which should be mapped to a different value in links
|
||||
|
||||
/// <summary>The tag which should be mapped to a different value in links</summary>
|
||||
Tag: string
|
||||
|
||||
/// The value by which the tag should be linked
|
||||
|
||||
/// <summary>The value by which the tag should be linked</summary>
|
||||
UrlValue: string
|
||||
} with
|
||||
|
||||
/// An empty tag mapping
|
||||
|
||||
/// <summary>An empty tag mapping</summary>
|
||||
static member Empty =
|
||||
{ Id = TagMapId.Empty; WebLogId = WebLogId.Empty; Tag = ""; UrlValue = "" }
|
||||
|
||||
|
||||
/// A theme
|
||||
/// <summary>A theme</summary>
|
||||
[<CLIMutable; NoComparison; NoEquality>]
|
||||
type Theme = {
|
||||
/// The ID / path of the theme
|
||||
/// <summary>The ID / path of the theme</summary>
|
||||
Id: ThemeId
|
||||
|
||||
/// A long name of the theme
|
||||
|
||||
/// <summary>A long name of the theme</summary>
|
||||
Name: string
|
||||
|
||||
/// The version of the theme
|
||||
|
||||
/// <summary>The version of the theme</summary>
|
||||
Version: string
|
||||
|
||||
/// The templates for this theme
|
||||
|
||||
/// <summary>The templates for this theme</summary>
|
||||
Templates: ThemeTemplate list
|
||||
} with
|
||||
|
||||
/// An empty theme
|
||||
|
||||
/// <summary>An empty theme</summary>
|
||||
static member Empty =
|
||||
{ Id = ThemeId.Empty; Name = ""; Version = ""; Templates = [] }
|
||||
|
||||
|
||||
/// A theme asset (a file served as part of a theme, at /themes/[theme]/[asset-path])
|
||||
/// <summary>A theme asset (a file served as part of a theme, at /themes/[theme]/[asset-path])</summary>
|
||||
[<CLIMutable; NoComparison; NoEquality>]
|
||||
type ThemeAsset = {
|
||||
/// The ID of the asset (consists of theme and path)
|
||||
/// <summary>The ID of the asset (consists of theme and path)</summary>
|
||||
Id: ThemeAssetId
|
||||
|
||||
/// The updated date (set from the file date from the ZIP archive)
|
||||
|
||||
/// <summary>The updated date (set from the file date from the ZIP archive)</summary>
|
||||
UpdatedOn: Instant
|
||||
|
||||
/// The data for the asset
|
||||
|
||||
/// <summary>The data for the asset</summary>
|
||||
Data: byte array
|
||||
} with
|
||||
|
||||
/// An empty theme asset
|
||||
|
||||
/// <summary>An empty theme asset</summary>
|
||||
static member Empty =
|
||||
{ Id = ThemeAssetId.Empty; UpdatedOn = Noda.epoch; Data = [||] }
|
||||
|
||||
|
||||
/// An uploaded file
|
||||
/// <summary>An uploaded file</summary>
|
||||
[<CLIMutable; NoComparison; NoEquality>]
|
||||
type Upload = {
|
||||
/// The ID of the upload
|
||||
/// <summary>The ID of the upload</summary>
|
||||
Id: UploadId
|
||||
|
||||
/// The ID of the web log to which this upload belongs
|
||||
|
||||
/// <summary>The ID of the web log to which this upload belongs</summary>
|
||||
WebLogId: WebLogId
|
||||
|
||||
/// The link at which this upload is served
|
||||
|
||||
/// <summary>The link at which this upload is served</summary>
|
||||
Path: Permalink
|
||||
|
||||
/// The updated date/time for this upload
|
||||
|
||||
/// <summary>The updated date/time for this upload</summary>
|
||||
UpdatedOn: Instant
|
||||
|
||||
/// The data for the upload
|
||||
|
||||
/// <summary>The data for the upload</summary>
|
||||
Data: byte array
|
||||
} with
|
||||
|
||||
/// An empty upload
|
||||
|
||||
/// <summary>An empty upload</summary>
|
||||
static member Empty =
|
||||
{ Id = UploadId.Empty; WebLogId = WebLogId.Empty; Path = Permalink.Empty; UpdatedOn = Noda.epoch; Data = [||] }
|
||||
|
||||
|
||||
open Newtonsoft.Json
|
||||
|
||||
/// A web log
|
||||
/// <summary>A web log</summary>
|
||||
[<CLIMutable; NoComparison; NoEquality>]
|
||||
type WebLog = {
|
||||
/// The ID of the web log
|
||||
/// <summary>The ID of the web log</summary>
|
||||
Id: WebLogId
|
||||
|
||||
/// The name of the web log
|
||||
/// <summary>The name of the web log</summary>
|
||||
Name: string
|
||||
|
||||
/// The slug of the web log
|
||||
/// <summary>The slug of the web log</summary>
|
||||
Slug: string
|
||||
|
||||
/// A subtitle for the web log
|
||||
|
||||
/// <summary>A subtitle for the web log</summary>
|
||||
Subtitle: string option
|
||||
|
||||
/// The default page ("posts" or a page Id)
|
||||
/// <summary>The default page ("posts" or a page Id)</summary>
|
||||
DefaultPage: string
|
||||
|
||||
/// The number of posts to display on pages of posts
|
||||
/// <summary>The number of posts to display on pages of posts</summary>
|
||||
PostsPerPage: int
|
||||
|
||||
/// The ID of the theme (also the path within /themes)
|
||||
/// <summary>The ID of the theme (also the path within /themes)</summary>
|
||||
ThemeId: ThemeId
|
||||
|
||||
/// The URL base
|
||||
/// <summary>The URL base</summary>
|
||||
UrlBase: string
|
||||
|
||||
/// The time zone in which dates/times should be displayed
|
||||
/// <summary>The time zone in which dates/times should be displayed</summary>
|
||||
TimeZone: string
|
||||
|
||||
/// The RSS options for this web log
|
||||
|
||||
/// <summary>The RSS options for this web log</summary>
|
||||
Rss: RssOptions
|
||||
|
||||
/// Whether to automatically load htmx
|
||||
|
||||
/// <summary>Whether to automatically load htmx</summary>
|
||||
AutoHtmx: bool
|
||||
|
||||
/// Where uploads are placed
|
||||
|
||||
/// <summary>Where uploads are placed</summary>
|
||||
Uploads: UploadDestination
|
||||
|
||||
/// Redirect rules for this weblog
|
||||
/// <summary>Redirect rules for this weblog</summary>
|
||||
RedirectRules: RedirectRule list
|
||||
|
||||
/// <summary>Whether to automatically apply OpenGraph properties to all pages / posts</summary>
|
||||
AutoOpenGraph: bool
|
||||
} with
|
||||
|
||||
/// An empty web log
|
||||
|
||||
/// <summary>An empty web log</summary>
|
||||
static member Empty =
|
||||
{ Id = WebLogId.Empty
|
||||
Name = ""
|
||||
@ -354,9 +367,12 @@ type WebLog = {
|
||||
Rss = RssOptions.Empty
|
||||
AutoHtmx = false
|
||||
Uploads = Database
|
||||
RedirectRules = [] }
|
||||
|
||||
RedirectRules = []
|
||||
AutoOpenGraph = true }
|
||||
|
||||
/// <summary>
|
||||
/// Any extra path where this web log is hosted (blank if web log is hosted at the root of the domain)
|
||||
/// </summary>
|
||||
[<JsonIgnore>]
|
||||
member this.ExtraPath =
|
||||
let pathParts = this.UrlBase.Split "://"
|
||||
@ -365,16 +381,28 @@ type WebLog = {
|
||||
else
|
||||
let path = pathParts[1].Split "/"
|
||||
if path.Length > 1 then $"""/{path |> Array.skip 1 |> String.concat "/"}""" else ""
|
||||
|
||||
/// Generate an absolute URL for the given link
|
||||
|
||||
/// <summary>Generate an absolute URL for the given link</summary>
|
||||
/// <param name="permalink">The permalink for which an absolute URL should be generated</param>
|
||||
/// <returns>An absolute URL for the given link</returns>
|
||||
member this.AbsoluteUrl(permalink: Permalink) =
|
||||
$"{this.UrlBase}/{permalink}"
|
||||
|
||||
/// Generate a relative URL for the given link
|
||||
|
||||
/// <summary>Convert a string URL to an absolute URL for this web log if required</summary>
|
||||
/// <param name="url">The URL which may be translated to an absolute one</param>
|
||||
/// <returns>The given URL if it was already absolute, or a corresponding absolute URL if not</returns>
|
||||
member this.UrlToAbsolute(url: string) =
|
||||
if url.StartsWith "http" then url else this.AbsoluteUrl(Permalink url)
|
||||
|
||||
/// <summary>Generate a relative URL for the given link</summary>
|
||||
/// <param name="permalink">The permalink for which a relative URL should be generated</param>
|
||||
/// <returns>A relative URL for the given link</returns>
|
||||
member this.RelativeUrl(permalink: Permalink) =
|
||||
$"{this.ExtraPath}/{permalink}"
|
||||
|
||||
/// Convert an Instant (UTC reference) to the web log's local date/time
|
||||
|
||||
/// <summary>Convert an Instant (UTC reference) to the web log's local date/time</summary>
|
||||
/// <param name="date">The UTC <c>Instant</c> to be converted</param>
|
||||
/// <returns>The local date/time for this web log</returns>
|
||||
member this.LocalTime(date: Instant) =
|
||||
DateTimeZoneProviders.Tzdb.GetZoneOrNull this.TimeZone
|
||||
|> Option.ofObj
|
||||
@ -382,44 +410,44 @@ type WebLog = {
|
||||
|> Option.defaultValue (date.ToDateTimeUtc())
|
||||
|
||||
|
||||
/// A user of the web log
|
||||
/// <summary>A user of the web log</summary>
|
||||
[<CLIMutable; NoComparison; NoEquality>]
|
||||
type WebLogUser = {
|
||||
/// The ID of the user
|
||||
/// <summary>The ID of the user</summary>
|
||||
Id: WebLogUserId
|
||||
|
||||
/// The ID of the web log to which this user belongs
|
||||
/// <summary>The ID of the web log to which this user belongs</summary>
|
||||
WebLogId: WebLogId
|
||||
|
||||
/// The user name (e-mail address)
|
||||
/// <summary>The user name (e-mail address)</summary>
|
||||
Email: string
|
||||
|
||||
/// The user's first name
|
||||
/// <summary>The user's first name</summary>
|
||||
FirstName: string
|
||||
|
||||
/// The user's last name
|
||||
/// <summary>The user's last name</summary>
|
||||
LastName: string
|
||||
|
||||
/// The user's preferred name
|
||||
/// <summary>The user's preferred name</summary>
|
||||
PreferredName: string
|
||||
|
||||
/// The hash of the user's password
|
||||
/// <summary>The hash of the user's password</summary>
|
||||
PasswordHash: string
|
||||
|
||||
/// The URL of the user's personal site
|
||||
/// <summary>The URL of the user's personal site</summary>
|
||||
Url: string option
|
||||
|
||||
/// The user's access level
|
||||
/// <summary>The user's access level</summary>
|
||||
AccessLevel: AccessLevel
|
||||
|
||||
/// When the user was created
|
||||
|
||||
/// <summary>When the user was created</summary>
|
||||
CreatedOn: Instant
|
||||
|
||||
/// When the user last logged on
|
||||
|
||||
/// <summary>When the user last logged on</summary>
|
||||
LastSeenOn: Instant option
|
||||
} with
|
||||
|
||||
/// An empty web log user
|
||||
|
||||
/// <summary>An empty web log user</summary>
|
||||
static member Empty =
|
||||
{ Id = WebLogUserId.Empty
|
||||
WebLogId = WebLogId.Empty
|
||||
@ -432,8 +460,8 @@ type WebLogUser = {
|
||||
AccessLevel = Author
|
||||
CreatedOn = Noda.epoch
|
||||
LastSeenOn = None }
|
||||
|
||||
/// Get the user's displayed name
|
||||
|
||||
/// <summary>Get the user's displayed name</summary>
|
||||
[<JsonIgnore>]
|
||||
member this.DisplayName =
|
||||
(seq { (match this.PreferredName with "" -> this.FirstName | n -> n); " "; this.LastName }
|
||||
|
@ -7,11 +7,11 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Markdig" Version="0.37.0" />
|
||||
<PackageReference Include="Markdown.ColorCode" Version="2.2.2" />
|
||||
<PackageReference Include="Markdig" Version="0.41.3" />
|
||||
<PackageReference Include="Markdown.ColorCode" Version="3.0.0" />
|
||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||
<PackageReference Include="NodaTime" Version="3.1.11" />
|
||||
<PackageReference Update="FSharp.Core" Version="8.0.300" />
|
||||
<PackageReference Include="NodaTime" Version="3.2.2" />
|
||||
<PackageReference Update="FSharp.Core" Version="9.0.300" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -90,7 +90,7 @@ let explicitRatingConverterTests = testList "ExplicitRatingConverter" [
|
||||
}
|
||||
]
|
||||
|
||||
/// Unit tests for the MarkupText type
|
||||
/// Unit tests for the MarkupTextConverter type
|
||||
let markupTextConverterTests = testList "MarkupTextConverter" [
|
||||
let opts = JsonSerializerSettings()
|
||||
opts.Converters.Add(MarkupTextConverter())
|
||||
@ -104,6 +104,20 @@ let markupTextConverterTests = testList "MarkupTextConverter" [
|
||||
}
|
||||
]
|
||||
|
||||
/// Unit tests for the OpenGraphTypeConverter type
|
||||
let openGraphTypeConverterTests = testList "OpenGraphTypeConverter" [
|
||||
let opts = JsonSerializerSettings()
|
||||
opts.Converters.Add(OpenGraphTypeConverter())
|
||||
test "succeeds when serializing" {
|
||||
let after = JsonConvert.SerializeObject(VideoTvShow, opts)
|
||||
Expect.equal after "\"video.tv_show\"" "OpenGraph type serialized incorrectly"
|
||||
}
|
||||
test "succeeds when deserializing" {
|
||||
let after = JsonConvert.DeserializeObject<OpenGraphType>("\"book\"", opts)
|
||||
Expect.equal after Book "OpenGraph type deserialized incorrectly"
|
||||
}
|
||||
]
|
||||
|
||||
/// Unit tests for the PermalinkConverter type
|
||||
let permalinkConverterTests = testList "PermalinkConverter" [
|
||||
let opts = JsonSerializerSettings()
|
||||
@ -257,6 +271,7 @@ let configureTests = test "Json.configure succeeds" {
|
||||
Expect.hasCountOf ser.Converters 1u (has typeof<CustomFeedSourceConverter>) "Custom feed source converter not found"
|
||||
Expect.hasCountOf ser.Converters 1u (has typeof<ExplicitRatingConverter>) "Explicit rating converter not found"
|
||||
Expect.hasCountOf ser.Converters 1u (has typeof<MarkupTextConverter>) "Markup text converter not found"
|
||||
Expect.hasCountOf ser.Converters 1u (has typeof<OpenGraphTypeConverter>) "OpenGraph type converter not found"
|
||||
Expect.hasCountOf ser.Converters 1u (has typeof<PermalinkConverter>) "Permalink converter not found"
|
||||
Expect.hasCountOf ser.Converters 1u (has typeof<PageIdConverter>) "Page ID converter not found"
|
||||
Expect.hasCountOf ser.Converters 1u (has typeof<PodcastMediumConverter>) "Podcast medium converter not found"
|
||||
@ -282,6 +297,7 @@ let all = testList "Converters" [
|
||||
customFeedSourceConverterTests
|
||||
explicitRatingConverterTests
|
||||
markupTextConverterTests
|
||||
openGraphTypeConverterTests
|
||||
permalinkConverterTests
|
||||
pageIdConverterTests
|
||||
podcastMediumConverterTests
|
||||
|
@ -1,6 +1,6 @@
|
||||
/// <summary>
|
||||
/// Integration tests for <see cref="IPageData" /> implementations
|
||||
/// </summary>
|
||||
/// </summary>
|
||||
module PageDataTests
|
||||
|
||||
open System
|
||||
@ -35,8 +35,9 @@ let ``Add succeeds`` (data: IData) = task {
|
||||
Text = "<h1>A new page</h1>"
|
||||
Metadata = [ { Name = "Meta Item"; Value = "Meta Value" } ]
|
||||
PriorPermalinks = [ Permalink "2024/the-new-page.htm" ]
|
||||
Revisions = [ { AsOf = Noda.epoch + Duration.FromDays 3; Text = Html "<h1>A new page</h1>" } ] }
|
||||
do! data.Page.Add page
|
||||
Revisions = [ { AsOf = Noda.epoch + Duration.FromDays 3; Text = Html "<h1>A new page</h1>" } ]
|
||||
OpenGraph = Some { OpenGraphProperties.Empty with Type = Book } }
|
||||
do! data.Page.Add page
|
||||
let! stored = data.Page.FindFullById (PageId "added-page") (WebLogId "test")
|
||||
Expect.isSome stored "The page should have been added"
|
||||
let pg = stored.Value
|
||||
@ -53,6 +54,7 @@ let ``Add succeeds`` (data: IData) = task {
|
||||
Expect.equal pg.Metadata page.Metadata "Metadata not saved properly"
|
||||
Expect.equal pg.PriorPermalinks page.PriorPermalinks "Prior permalinks not saved properly"
|
||||
Expect.equal pg.Revisions page.Revisions "Revisions not saved properly"
|
||||
Expect.equal pg.OpenGraph page.OpenGraph "OpenGraph properties not saved properly"
|
||||
}
|
||||
|
||||
let ``All succeeds`` (data: IData) = task {
|
||||
|
@ -1,6 +1,6 @@
|
||||
/// <summary>
|
||||
/// Integration tests for <see cref="IPostData" /> implementations
|
||||
/// </summary>
|
||||
/// </summary>
|
||||
module PostDataTests
|
||||
|
||||
open System
|
||||
@ -54,7 +54,7 @@ let ``Add succeeds`` (data: IData) = task {
|
||||
{ Id = PostId "a-new-post"
|
||||
WebLogId = WebLogId "test"
|
||||
AuthorId = WebLogUserId "test-author"
|
||||
Status = Published
|
||||
Status = Published
|
||||
Title = "A New Test Post"
|
||||
Permalink = Permalink "2020/test-post.html"
|
||||
PublishedOn = Some (Noda.epoch + Duration.FromMinutes 1L)
|
||||
@ -66,7 +66,8 @@ let ``Add succeeds`` (data: IData) = task {
|
||||
Episode = Some { Episode.Empty with Media = "test-ep.mp3" }
|
||||
Metadata = [ { Name = "Meta"; Value = "Data" } ]
|
||||
PriorPermalinks = [ Permalink "2020/test-post-a.html" ]
|
||||
Revisions = [ { AsOf = Noda.epoch + Duration.FromMinutes 1L; Text = Html "<p>Test text here" } ] }
|
||||
Revisions = [ { AsOf = Noda.epoch + Duration.FromMinutes 1L; Text = Html "<p>Test text here" } ]
|
||||
OpenGraph = Some { OpenGraphProperties.Empty with Type = VideoMovie } }
|
||||
do! data.Post.Add post
|
||||
let! stored = data.Post.FindFullById post.Id post.WebLogId
|
||||
Expect.isSome stored "The added post should have been retrieved"
|
||||
@ -87,6 +88,7 @@ let ``Add succeeds`` (data: IData) = task {
|
||||
Expect.equal it.Metadata post.Metadata "Metadata items not saved properly"
|
||||
Expect.equal it.PriorPermalinks post.PriorPermalinks "Prior permalinks not saved properly"
|
||||
Expect.equal it.Revisions post.Revisions "Revisions not saved properly"
|
||||
Expect.equal it.OpenGraph post.OpenGraph "OpenGraph properties not saved correctly"
|
||||
}
|
||||
|
||||
let ``CountByStatus succeeds`` (data: IData) = task {
|
||||
|
@ -1,6 +1,6 @@
|
||||
/// <summary>
|
||||
/// Integration tests for <see cref="IWebLogData" /> implementations
|
||||
/// </summary>
|
||||
/// </summary>
|
||||
module WebLogDataTests
|
||||
|
||||
open System
|
||||
@ -25,14 +25,15 @@ let ``Add succeeds`` (data: IData) = task {
|
||||
Rss =
|
||||
{ IsFeedEnabled = true
|
||||
FeedName = "my-feed.xml"
|
||||
ItemsInFeed = None
|
||||
ItemsInFeed = None
|
||||
IsCategoryEnabled = false
|
||||
IsTagEnabled = false
|
||||
Copyright = Some "go for it"
|
||||
CustomFeeds = [] }
|
||||
AutoHtmx = true
|
||||
Uploads = Disk
|
||||
RedirectRules = [ { From = "/here"; To = "/there"; IsRegex = false } ] }
|
||||
RedirectRules = [ { From = "/here"; To = "/there"; IsRegex = false } ]
|
||||
AutoOpenGraph = false }
|
||||
let! webLog = data.WebLog.FindById (WebLogId "new-weblog")
|
||||
Expect.isSome webLog "The web log should have been returned"
|
||||
let it = webLog.Value
|
||||
@ -48,6 +49,7 @@ let ``Add succeeds`` (data: IData) = task {
|
||||
Expect.isTrue it.AutoHtmx "Auto htmx flag is incorrect"
|
||||
Expect.equal it.Uploads Disk "Upload destination is incorrect"
|
||||
Expect.equal it.RedirectRules [ { From = "/here"; To = "/there"; IsRegex = false } ] "Redirect rules are incorrect"
|
||||
Expect.isFalse it.AutoOpenGraph "Auto OpenGraph flag is incorrect"
|
||||
let rss = it.Rss
|
||||
Expect.isTrue rss.IsFeedEnabled "Is feed enabled flag is incorrect"
|
||||
Expect.equal rss.FeedName "my-feed.xml" "Feed name is incorrect"
|
||||
|
@ -34,6 +34,18 @@ let webLogTests = testList "WebLog" [
|
||||
"https://my.site/blog/page.html"
|
||||
"Absolute URL is incorrect"
|
||||
}
|
||||
testList "UrlToAbsolute" [
|
||||
test "succeeds for relative URL" {
|
||||
Expect.equal
|
||||
({ WebLog.Empty with UrlBase = "https://my.site" }.UrlToAbsolute "blog/page.html")
|
||||
"https://my.site/blog/page.html"
|
||||
"Absolute URL is incorrect"
|
||||
}
|
||||
test "succeeds for absolute URL" {
|
||||
Expect.equal
|
||||
(WebLog.Empty.UrlToAbsolute "https://test.units") "https://test.units" "Absolute URL is incorrect"
|
||||
}
|
||||
]
|
||||
testList "RelativeUrl" [
|
||||
test "succeeds for domain root URL" {
|
||||
Expect.equal
|
||||
|
@ -257,6 +257,383 @@ let markupTextTests = testList "MarkupText" [
|
||||
]
|
||||
]
|
||||
|
||||
/// Unit tests for the OpenGraphAudio type
|
||||
let openGraphAudioTests = testList "OpenGraphAudio" [
|
||||
let webLog = { WebLog.Empty with UrlBase = "https://unit.test/taco" }
|
||||
let transform = webLog.UrlToAbsolute
|
||||
testList "ToProperties" [
|
||||
test "succeeds with minimum required" {
|
||||
let props = Array.ofSeq ({ OpenGraphAudio.Empty with Url = "http://test.this" }.ToProperties transform)
|
||||
Expect.hasLength props 1 "There should be one property"
|
||||
Expect.equal props[0] ("og:audio", "http://test.this") "The URL was not written correctly"
|
||||
}
|
||||
test "succeeds with secure URL" {
|
||||
let props = Array.ofSeq ({ OpenGraphAudio.Empty with Url = "https://test.this" }.ToProperties transform)
|
||||
Expect.hasLength props 2 "There should be two properties"
|
||||
Expect.equal props[0] ("og:audio", "https://test.this") "The URL was not written correctly"
|
||||
Expect.equal
|
||||
props[1] ("og:audio:secure_url", "https://test.this") "The Secure URL was not written correctly"
|
||||
}
|
||||
test "succeeds with all properties filled" {
|
||||
let props = Array.ofSeq ({ Url = "http://test.this"; Type = Some "audio/mpeg" }.ToProperties transform)
|
||||
Expect.hasLength props 2 "There should be two properties"
|
||||
Expect.equal props[0] ("og:audio", "http://test.this") "The URL was not written correctly"
|
||||
Expect.equal props[1] ("og:audio:type", "audio/mpeg") "The MIME type was not written correctly"
|
||||
}
|
||||
test "succeeds when deriving AAC and transforming URL" {
|
||||
let props = Array.ofSeq ({ OpenGraphAudio.Empty with Url = "this/cool.file.aac" }.ToProperties transform)
|
||||
Expect.hasLength props 3 "There should be three properties"
|
||||
Expect.equal
|
||||
props[0]
|
||||
("og:audio", "https://unit.test/taco/this/cool.file.aac")
|
||||
"The URL was not transformed correctly"
|
||||
Expect.equal
|
||||
props[1]
|
||||
("og:audio:secure_url", "https://unit.test/taco/this/cool.file.aac")
|
||||
"The URL was not transformed correctly"
|
||||
Expect.equal props[2] ("og:audio:type", "audio/aac") "The MIME type for AAC was not derived correctly"
|
||||
}
|
||||
test "succeeds when deriving MP3" {
|
||||
let props = Array.ofSeq ({ OpenGraphAudio.Empty with Url = "an.other/song.mp3" }.ToProperties transform)
|
||||
Expect.hasLength props 3 "There should be three properties"
|
||||
Expect.equal props[2] ("og:audio:type", "audio/mpeg") "The MIME type for MP3 was not derived correctly"
|
||||
}
|
||||
test "succeeds when deriving OGA" {
|
||||
let props = Array.ofSeq ({ OpenGraphAudio.Empty with Url = "talks/speex.oga" }.ToProperties transform)
|
||||
Expect.hasLength props 3 "There should be three properties"
|
||||
Expect.equal props[2] ("og:audio:type", "audio/ogg") "The MIME type for OGA was not derived correctly"
|
||||
}
|
||||
test "succeeds when deriving WAV" {
|
||||
let props = Array.ofSeq ({ OpenGraphAudio.Empty with Url = "some/old.school.wav" }.ToProperties transform)
|
||||
Expect.hasLength props 3 "There should be three properties"
|
||||
Expect.equal props[2] ("og:audio:type", "audio/wav") "The MIME type for WAV was not derived correctly"
|
||||
}
|
||||
test "succeeds when deriving WEBA" {
|
||||
let props = Array.ofSeq ({ OpenGraphAudio.Empty with Url = "new/format/file.weba" }.ToProperties transform)
|
||||
Expect.hasLength props 3 "There should be three properties"
|
||||
Expect.equal props[2] ("og:audio:type", "audio/webm") "The MIME type for WEBA was not derived correctly"
|
||||
}
|
||||
test "succeeds when type cannot be derived" {
|
||||
let props = Array.ofSeq ({ OpenGraphAudio.Empty with Url = "profile.jpg" }.ToProperties transform)
|
||||
Expect.hasLength props 2 "There should be two properties (only URLs; no type derived)"
|
||||
}
|
||||
]
|
||||
]
|
||||
|
||||
/// Tests for the OpenGraphImage type
|
||||
let openGraphImageTests = testList "OpenGraphImage" [
|
||||
let webLog = { WebLog.Empty with UrlBase = "https://unit.test/taco" }
|
||||
let transform = webLog.UrlToAbsolute
|
||||
testList "ToProperties" [
|
||||
test "succeeds with minimum required" {
|
||||
let props = Array.ofSeq ({ OpenGraphImage.Empty with Url = "http://test.url" }.ToProperties transform)
|
||||
Expect.hasLength props 1 "There should be one property"
|
||||
Expect.equal props[0] ("og:image", "http://test.url") "The URL was not written correctly"
|
||||
}
|
||||
test "succeeds with secure URL" {
|
||||
let props = Array.ofSeq ({ OpenGraphImage.Empty with Url = "https://secure.url" }.ToProperties transform)
|
||||
Expect.hasLength props 2 "There should be two properties"
|
||||
Expect.equal props[0] ("og:image", "https://secure.url") "The URL was not written correctly"
|
||||
Expect.equal
|
||||
props[1] ("og:image:secure_url", "https://secure.url") "The Secure URL was not written correctly"
|
||||
}
|
||||
test "succeeds with all properties filled" {
|
||||
let props =
|
||||
{ Url = "http://test.this"
|
||||
Type = Some "image/jpeg"
|
||||
Width = Some 400
|
||||
Height = Some 600
|
||||
Alt = Some "This ought to be good" }.ToProperties transform
|
||||
|> Array.ofSeq
|
||||
Expect.hasLength props 5 "There should be five properties"
|
||||
Expect.equal props[0] ("og:image", "http://test.this") "The URL was not written correctly"
|
||||
Expect.equal props[1] ("og:image:type", "image/jpeg") "The MIME type was not written correctly"
|
||||
Expect.equal props[2] ("og:image:width", "400") "The width was not written correctly"
|
||||
Expect.equal props[3] ("og:image:height", "600") "The height was not written correctly"
|
||||
Expect.equal props[4] ("og:image:alt", "This ought to be good") "The alt text was not written correctly"
|
||||
}
|
||||
test "succeeds when deriving BMP and transforming URL" {
|
||||
let props = Array.ofSeq ({ OpenGraphImage.Empty with Url = "old/windows.bmp" }.ToProperties transform)
|
||||
Expect.hasLength props 3 "There should be three properties"
|
||||
Expect.equal
|
||||
props[0] ("og:image", "https://unit.test/taco/old/windows.bmp") "The URL was not transformed correctly"
|
||||
Expect.equal
|
||||
props[1]
|
||||
("og:image:secure_url", "https://unit.test/taco/old/windows.bmp")
|
||||
"The URL was not transformed correctly"
|
||||
Expect.equal props[2] ("og:image:type", "image/bmp") "The MIME type for BMP was not derived correctly"
|
||||
}
|
||||
test "succeeds when deriving GIF" {
|
||||
let props = Array.ofSeq ({ OpenGraphImage.Empty with Url = "its.a.soft.g.gif" }.ToProperties transform)
|
||||
Expect.hasLength props 3 "There should be three properties"
|
||||
Expect.equal props[2] ("og:image:type", "image/gif") "The MIME type for GIF was not derived correctly"
|
||||
}
|
||||
test "succeeds when deriving ICO" {
|
||||
let props = Array.ofSeq ({ OpenGraphImage.Empty with Url = "favicon.ico" }.ToProperties transform)
|
||||
Expect.hasLength props 3 "There should be three properties"
|
||||
Expect.equal
|
||||
props[2] ("og:image:type", "image/vnd.microsoft.icon") "The MIME type for ICO was not derived correctly"
|
||||
}
|
||||
test "succeeds when deriving JPEG" {
|
||||
let props = Array.ofSeq ({ OpenGraphImage.Empty with Url = "big/name/photo.jpeg" }.ToProperties transform)
|
||||
Expect.hasLength props 3 "There should be three properties"
|
||||
Expect.equal props[2] ("og:image:type", "image/jpeg") "The MIME type for JPEG was not derived correctly"
|
||||
}
|
||||
test "succeeds when deriving PNG" {
|
||||
let props = Array.ofSeq ({ OpenGraphImage.Empty with Url = "some/nice/graphic.png" }.ToProperties transform)
|
||||
Expect.hasLength props 3 "There should be three properties"
|
||||
Expect.equal props[2] ("og:image:type", "image/png") "The MIME type for PNG was not derived correctly"
|
||||
}
|
||||
test "succeeds when deriving SVG" {
|
||||
let props = Array.ofSeq ({ OpenGraphImage.Empty with Url = "fancy-new-vector.svg" }.ToProperties transform)
|
||||
Expect.hasLength props 3 "There should be three properties"
|
||||
Expect.equal props[2] ("og:image:type", "image/svg+xml") "The MIME type for SVG was not derived correctly"
|
||||
}
|
||||
test "succeeds when deriving TIF" {
|
||||
let props = Array.ofSeq ({ OpenGraphImage.Empty with Url = "tagged/file.tif" }.ToProperties transform)
|
||||
Expect.hasLength props 3 "There should be three properties"
|
||||
Expect.equal props[2] ("og:image:type", "image/tiff") "The MIME type for TIF was not derived correctly"
|
||||
}
|
||||
test "succeeds when deriving TIFF" {
|
||||
let props = Array.ofSeq ({ OpenGraphImage.Empty with Url = "tagged/file.two.tiff" }.ToProperties transform)
|
||||
Expect.hasLength props 3 "There should be three properties"
|
||||
Expect.equal props[2] ("og:image:type", "image/tiff") "The MIME type for TIFF was not derived correctly"
|
||||
}
|
||||
test "succeeds when deriving WEBP" {
|
||||
let props = Array.ofSeq ({ OpenGraphImage.Empty with Url = "modern/photo.webp" }.ToProperties transform)
|
||||
Expect.hasLength props 3 "There should be three properties"
|
||||
Expect.equal props[2] ("og:image:type", "image/webp") "The MIME type for WEBP was not derived correctly"
|
||||
}
|
||||
test "succeeds when type cannot be derived" {
|
||||
let props = Array.ofSeq ({ OpenGraphImage.Empty with Url = "intro.mp3" }.ToProperties transform)
|
||||
Expect.hasLength props 2 "There should be two properties (only URLs; no type derived)"
|
||||
}
|
||||
]
|
||||
]
|
||||
|
||||
/// Unit tests for the OpenGraphVideo type
|
||||
let openGraphVideoTests = testList "OpenGraphVideo" [
|
||||
let webLog = { WebLog.Empty with UrlBase = "https://unit.test/taco" }
|
||||
let transform = webLog.UrlToAbsolute
|
||||
testList "ToProperties" [
|
||||
test "succeeds with minimum required" {
|
||||
let props = Array.ofSeq ({ OpenGraphVideo.Empty with Url = "http://url.test" }.ToProperties transform)
|
||||
Expect.hasLength props 1 "There should be one property"
|
||||
Expect.equal props[0] ("og:video", "http://url.test") "The URL was not written correctly"
|
||||
}
|
||||
test "succeeds with secure URL" {
|
||||
let props = Array.ofSeq ({ OpenGraphVideo.Empty with Url = "https://url.secure" }.ToProperties transform)
|
||||
Expect.hasLength props 2 "There should be two properties"
|
||||
Expect.equal props[0] ("og:video", "https://url.secure") "The URL was not written correctly"
|
||||
Expect.equal
|
||||
props[1] ("og:video:secure_url", "https://url.secure") "The Secure URL was not written correctly"
|
||||
}
|
||||
test "succeeds with all properties filled" {
|
||||
let props =
|
||||
{ Url = "http://test.this"
|
||||
Type = Some "video/mpeg"
|
||||
Width = Some 1200
|
||||
Height = Some 900 }.ToProperties transform
|
||||
|> Array.ofSeq
|
||||
Expect.hasLength props 4 "There should be five properties"
|
||||
Expect.equal props[0] ("og:video", "http://test.this") "The URL was not written correctly"
|
||||
Expect.equal props[1] ("og:video:type", "video/mpeg") "The MIME type was not written correctly"
|
||||
Expect.equal props[2] ("og:video:width", "1200") "The width was not written correctly"
|
||||
Expect.equal props[3] ("og:video:height", "900") "The height was not written correctly"
|
||||
}
|
||||
test "succeeds when deriving AVI and transforming URL" {
|
||||
let props = Array.ofSeq ({ OpenGraphVideo.Empty with Url = "my.video.avi" }.ToProperties transform)
|
||||
Expect.hasLength props 3 "There should be three properties"
|
||||
Expect.equal
|
||||
props[0] ("og:video", "https://unit.test/taco/my.video.avi") "The URL not transformed correctly"
|
||||
Expect.equal
|
||||
props[1]
|
||||
("og:video:secure_url", "https://unit.test/taco/my.video.avi")
|
||||
"The URL not transformed correctly"
|
||||
Expect.equal props[2] ("og:video:type", "video/x-msvideo") "The MIME type for AVI was not derived correctly"
|
||||
}
|
||||
test "succeeds when deriving MP4" {
|
||||
let props = Array.ofSeq ({ OpenGraphVideo.Empty with Url = "chapters/1/01.mp4" }.ToProperties transform)
|
||||
Expect.hasLength props 3 "There should be three properties"
|
||||
Expect.equal props[2] ("og:video:type", "video/mp4") "The MIME type for MP4 was not derived correctly"
|
||||
}
|
||||
test "succeeds when deriving MPEG" {
|
||||
let props = Array.ofSeq ({ OpenGraphVideo.Empty with Url = "viral/video.mpeg" }.ToProperties transform)
|
||||
Expect.hasLength props 3 "There should be three properties"
|
||||
Expect.equal props[2] ("og:video:type", "video/mpeg") "The MIME type for MPEG was not derived correctly"
|
||||
}
|
||||
test "succeeds when deriving OGV" {
|
||||
let props =
|
||||
Array.ofSeq ({ OpenGraphVideo.Empty with Url = "open/video/example.ogv" }.ToProperties transform)
|
||||
Expect.hasLength props 3 "There should be three properties"
|
||||
Expect.equal props[2] ("og:video:type", "video/ogg") "The MIME type for OGV was not derived correctly"
|
||||
}
|
||||
test "succeeds when deriving WEBM" {
|
||||
let props = Array.ofSeq ({ OpenGraphVideo.Empty with Url = "images/hero.webm" }.ToProperties transform)
|
||||
Expect.hasLength props 3 "There should be three properties"
|
||||
Expect.equal props[2] ("og:video:type", "video/webm") "The MIME type for WEBM was not derived correctly"
|
||||
}
|
||||
test "succeeds when type cannot be derived" {
|
||||
let props = Array.ofSeq ({ OpenGraphVideo.Empty with Url = "favicon.ico" }.ToProperties transform)
|
||||
Expect.hasLength props 2 "There should be two properties (only URLs; no type derived)"
|
||||
}
|
||||
]
|
||||
]
|
||||
|
||||
/// Unit tests for the OpenGraphType type
|
||||
let openGraphTypeTests = testList "OpenGraphType" [
|
||||
testList "Parse" [
|
||||
test "succeeds for \"article\"" {
|
||||
Expect.equal (OpenGraphType.Parse "article") Article "\"article\" not parsed correctly"
|
||||
}
|
||||
test "succeeds for \"book\"" {
|
||||
Expect.equal (OpenGraphType.Parse "book") Book "\"book\" not parsed correctly"
|
||||
}
|
||||
test "succeeds for \"music.album\"" {
|
||||
Expect.equal (OpenGraphType.Parse "music.album") MusicAlbum "\"music.album\" not parsed correctly"
|
||||
}
|
||||
test "succeeds for \"music.playlist\"" {
|
||||
Expect.equal (OpenGraphType.Parse "music.playlist") MusicPlaylist "\"music.playlist\" not parsed correctly"
|
||||
}
|
||||
test "succeeds for \"music.radio_station\"" {
|
||||
Expect.equal
|
||||
(OpenGraphType.Parse "music.radio_station")
|
||||
MusicRadioStation
|
||||
"\"music.radio_station\" not parsed correctly"
|
||||
}
|
||||
test "succeeds for \"music.song\"" {
|
||||
Expect.equal (OpenGraphType.Parse "music.song") MusicSong "\"music.song\" not parsed correctly"
|
||||
}
|
||||
test "succeeds for \"payment.link\"" {
|
||||
Expect.equal (OpenGraphType.Parse "payment.link") PaymentLink "\"payment.link\" not parsed correctly"
|
||||
}
|
||||
test "succeeds for \"profile\"" {
|
||||
Expect.equal (OpenGraphType.Parse "profile") Profile "\"profile\" not parsed correctly"
|
||||
}
|
||||
test "succeeds for \"video.episode\"" {
|
||||
Expect.equal (OpenGraphType.Parse "video.episode") VideoEpisode "\"video.episode\" not parsed correctly"
|
||||
}
|
||||
test "succeeds for \"video.movie\"" {
|
||||
Expect.equal (OpenGraphType.Parse "video.movie") VideoMovie "\"video.movie\" not parsed correctly"
|
||||
}
|
||||
test "succeeds for \"video.other\"" {
|
||||
Expect.equal (OpenGraphType.Parse "video.other") VideoOther "\"video.other\" not parsed correctly"
|
||||
}
|
||||
test "succeeds for \"video.tv_show\"" {
|
||||
Expect.equal (OpenGraphType.Parse "video.tv_show") VideoTvShow "\"video.tv_show\" not parsed correctly"
|
||||
}
|
||||
test "succeeds for \"website\"" {
|
||||
Expect.equal (OpenGraphType.Parse "website") Website "\"website\" not parsed correctly"
|
||||
}
|
||||
test "fails for invalid type" {
|
||||
Expect.throwsT<ArgumentException>
|
||||
(fun () -> ignore (OpenGraphType.Parse "anthology")) "Invalid value should have raised an exception"
|
||||
}
|
||||
]
|
||||
test "Selections succeeds" {
|
||||
let it = OpenGraphType.Selections
|
||||
Expect.hasLength it 13 "There should be 13 selections"
|
||||
Expect.equal (List.head it) ("article", "Article") "Article not found where expected"
|
||||
Expect.equal (it |> List.item 1) ("book", "Book") "Book not found where expected"
|
||||
Expect.equal (it |> List.item 2) ("music.album", "Music: Album") "MusicAlbum not found where expected"
|
||||
Expect.equal (it |> List.item 3) ("music.playlist", "Music: Playlist") "MusicPlaylist not found where expected"
|
||||
Expect.equal
|
||||
(it |> List.item 4)
|
||||
("music.radio_station", "Music: Radio Station")
|
||||
"MusicRadioStation not found where expected"
|
||||
Expect.equal (it |> List.item 5) ("music.song", "Music: Song") "MusicSong not found where expected"
|
||||
Expect.equal (it |> List.item 6) ("payment.link", "Payment Link") "PaymentLink not found where expected"
|
||||
Expect.equal (it |> List.item 7) ("profile", "Profile") "Profile not found where expected"
|
||||
Expect.equal (it |> List.item 8) ("video.episode", "Video: Episode") "VideoEpisode not found where expected"
|
||||
Expect.equal (it |> List.item 9) ("video.movie", "Video: Movie") "VideoMovie not found where expected"
|
||||
Expect.equal (it |> List.item 10) ("video.other", "Video: Other") "VideoOther not found where expected"
|
||||
Expect.equal (it |> List.item 11) ("video.tv_show", "Video: TV Show") "VideoTvShow not found where expected"
|
||||
Expect.equal (it |> List.item 12) ("website", "Website") "Website not found where expected"
|
||||
}
|
||||
testList "ToString" [
|
||||
test "succeeds for Article" {
|
||||
Expect.equal (string Article) "article" "Article string incorrect"
|
||||
}
|
||||
test "succeeds for Book" {
|
||||
Expect.equal (string Book) "book" "Book string incorrect"
|
||||
}
|
||||
test "succeeds for MusicAlbum" {
|
||||
Expect.equal (string MusicAlbum) "music.album" "MusicAlbum string incorrect"
|
||||
}
|
||||
test "succeeds for MusicPlaylist" {
|
||||
Expect.equal (string MusicPlaylist) "music.playlist" "MusicPlaylist string incorrect"
|
||||
}
|
||||
test "succeeds for MusicRadioStation" {
|
||||
Expect.equal (string MusicRadioStation) "music.radio_station" "MusicRadioStation string incorrect"
|
||||
}
|
||||
test "succeeds for MusicSong" {
|
||||
Expect.equal (string MusicSong) "music.song" "MusicSong string incorrect"
|
||||
}
|
||||
test "succeeds for PaymentLink" {
|
||||
Expect.equal (string PaymentLink) "payment.link" "PaymentLink string incorrect"
|
||||
}
|
||||
test "succeeds for Profile" {
|
||||
Expect.equal (string Profile) "profile" "Profile string incorrect"
|
||||
}
|
||||
test "succeeds for VideoEpisode" {
|
||||
Expect.equal (string VideoEpisode) "video.episode" "VideoEpisode string incorrect"
|
||||
}
|
||||
test "succeeds for VideoMovie" {
|
||||
Expect.equal (string VideoMovie) "video.movie" "VideoMovie string incorrect"
|
||||
}
|
||||
test "succeeds for VideoOther" {
|
||||
Expect.equal (string VideoOther) "video.other" "VideoOther string incorrect"
|
||||
}
|
||||
test "succeeds for VideoTvShow" {
|
||||
Expect.equal (string VideoTvShow) "video.tv_show" "VideoTvShow string incorrect"
|
||||
}
|
||||
test "succeeds for Website" {
|
||||
Expect.equal (string Website) "website" "Website string incorrect"
|
||||
}
|
||||
]
|
||||
]
|
||||
|
||||
/// Unit tests for the OpenGraphProperties type
|
||||
let openGraphPropertiesTests = testList "OpenGraphProperties" [
|
||||
testList "Properties" [
|
||||
test "succeeds with minimal values" {
|
||||
let props =
|
||||
{ OpenGraphProperties.Empty with
|
||||
Image = { OpenGraphImage.Empty with Url = "http://this.aint.nothing" } }
|
||||
.ToProperties WebLog.Empty.UrlToAbsolute
|
||||
|> Array.ofSeq
|
||||
Expect.hasLength props 2 "There should have been two properties"
|
||||
Expect.equal props[0] ("og:type", "article") "Type not written correctly"
|
||||
Expect.equal props[1] ("og:image", "http://this.aint.nothing") "Image URL not written correctly"
|
||||
}
|
||||
test "succeeds with all values" {
|
||||
let props =
|
||||
{ Type = Book
|
||||
Image = { OpenGraphImage.Empty with Url = "http://this.image.file" }
|
||||
Audio = Some { OpenGraphAudio.Empty with Url = "http://this.audio.file" }
|
||||
Description = Some "This is a unit test"
|
||||
Determiner = Some "a"
|
||||
Locale = Some "en_US"
|
||||
LocaleAlternate = Some [ "en_UK"; "es_MX" ]
|
||||
Video = Some { OpenGraphVideo.Empty with Url = "http://this.video.file" }
|
||||
Other = Some [ { Name = "book.publisher"; Value = "Yep" } ] }
|
||||
.ToProperties WebLog.Empty.UrlToAbsolute
|
||||
|> Array.ofSeq
|
||||
Expect.hasLength props 10 "There should have been ten properties"
|
||||
Expect.equal props[0] ("og:type", "book") "Type not written correctly"
|
||||
Expect.equal props[1] ("og:image", "http://this.image.file") "Image URL not written correctly"
|
||||
Expect.equal props[2] ("og:description", "This is a unit test") "Description not written correctly"
|
||||
Expect.equal props[3] ("og:determiner", "a") "Determiner not written correctly"
|
||||
Expect.equal props[4] ("og:locale", "en_US") "Locale not written correctly"
|
||||
Expect.equal props[5] ("og:locale:alternate", "en_UK") "1st Alternate Locale not written correctly"
|
||||
Expect.equal props[6] ("og:locale:alternate", "es_MX") "2nd Alternate Locale not written correctly"
|
||||
Expect.equal props[7] ("og:audio", "http://this.audio.file") "Audio URL not written correctly"
|
||||
Expect.equal props[8] ("og:video", "http://this.video.file") "Video URL not written correctly"
|
||||
Expect.equal props[9] ("book.publisher", "Yep") "Other property not written correctly"
|
||||
}
|
||||
]
|
||||
]
|
||||
|
||||
/// Unit tests for the PodcastMedium type
|
||||
let podcastMediumTests = testList "PodcastMedium" [
|
||||
testList "Parse" [
|
||||
@ -407,6 +784,11 @@ let all = testList "SupportTypes" [
|
||||
explicitRatingTests
|
||||
episodeTests
|
||||
markupTextTests
|
||||
openGraphAudioTests
|
||||
openGraphImageTests
|
||||
openGraphVideoTests
|
||||
openGraphTypeTests
|
||||
openGraphPropertiesTests
|
||||
podcastMediumTests
|
||||
postStatusTests
|
||||
customFeedSourceTests
|
||||
|
@ -206,6 +206,25 @@ let private testFullPage =
|
||||
Revisions =
|
||||
[ { AsOf = Noda.epoch + Duration.FromHours 1; Text = Markdown "# Howdy!" }
|
||||
{ AsOf = Noda.epoch; Text = Html "<h1>howdy</h1>" } ]
|
||||
OpenGraph =
|
||||
Some { Type = Book
|
||||
Image =
|
||||
{ Url = "https://unit.test/it.png"
|
||||
Type = Some "test/png"
|
||||
Width = Some 1
|
||||
Height = Some 2
|
||||
Alt = Some "huh" }
|
||||
Audio = Some { Url = "https://unit.test/it.mp3"; Type = Some "test/mpeg-3" }
|
||||
Description = Some "This is cool"
|
||||
Determiner = Some "the"
|
||||
Locale = Some "en-US"
|
||||
LocaleAlternate = Some [ "es-MX"; "es-ES" ]
|
||||
Video =
|
||||
Some { Url = "https://unit.test/it.mp4"
|
||||
Type = Some "test/mpeg-4"
|
||||
Width = Some 5
|
||||
Height = Some 6 }
|
||||
Other = Some [ { Name = "the-other"; Value = "the-value" } ] }
|
||||
Metadata = [ { Name = "Test"; Value = "me" }; { Name = "Two"; Value = "2" } ] }
|
||||
|
||||
/// A full post used to test various models
|
||||
@ -221,6 +240,25 @@ let testFullPost =
|
||||
Text = "<p>A post!</p>"
|
||||
CategoryIds = [ CategoryId "cat-a"; CategoryId "cat-b"; CategoryId "cat-n" ]
|
||||
Tags = [ "demo"; "post" ]
|
||||
OpenGraph =
|
||||
Some { Type = MusicAlbum
|
||||
Image =
|
||||
{ Url = "https://unit.test/it.jpg"
|
||||
Type = Some "test/jpg"
|
||||
Width = Some 100
|
||||
Height = Some 200
|
||||
Alt = Some "it is a jpeg" }
|
||||
Audio = Some { Url = "https://unit.test/that.mp3"; Type = Some "test/mp3" }
|
||||
Description = Some "Just a post"
|
||||
Determiner = Some "a"
|
||||
Locale = Some "es-MX"
|
||||
LocaleAlternate = Some [ "es-ES"; "en-EN" ]
|
||||
Video =
|
||||
Some { Url = "https://unit.test/that.mp4"
|
||||
Type = Some "test/mp4"
|
||||
Width = Some 50
|
||||
Height = Some 60 }
|
||||
Other = Some [ { Name = "an-other"; Value = "a-value" } ] }
|
||||
Metadata = [ { Name = "A Meta"; Value = "A Value" } ]
|
||||
Revisions =
|
||||
[ { AsOf = Noda.epoch + Duration.FromDays 365; Text = Html "<p>A post!</p>" }
|
||||
@ -233,7 +271,7 @@ let testFullPost =
|
||||
ImageUrl = Some "uploads/podcast-cover.jpg"
|
||||
Subtitle = Some "Narration"
|
||||
Explicit = Some Clean
|
||||
Chapters = None
|
||||
Chapters = None
|
||||
ChapterFile = Some "uploads/1970/01/chapters.txt"
|
||||
ChapterType = Some "chapters"
|
||||
ChapterWaypoints = Some true
|
||||
@ -266,9 +304,28 @@ let editCommonModelTests = testList "EditCommonModel" [
|
||||
Expect.equal model.Template "" "Template not filled properly"
|
||||
Expect.equal model.Source "HTML" "Source not filled properly"
|
||||
Expect.equal model.Text "" "Text not set properly"
|
||||
Expect.equal model.MetaNames.Length 1 "MetaNames should have one entry"
|
||||
Expect.isFalse model.AssignOpenGraph "OpenGraph properties should not have been assigned"
|
||||
Expect.equal model.OpenGraphType "" "OpenGraph type not filled properly"
|
||||
Expect.equal model.OpenGraphImageUrl "" "OpenGraph image URL not filled properly"
|
||||
Expect.equal model.OpenGraphImageType "" "OpenGraph image type not filled properly"
|
||||
Expect.equal model.OpenGraphImageWidth "" "OpenGraph image width not filled properly"
|
||||
Expect.equal model.OpenGraphImageHeight "" "OpenGraph image height not filled properly"
|
||||
Expect.equal model.OpenGraphImageAlt "" "OpenGraph image alt text not filled properly"
|
||||
Expect.equal model.OpenGraphAudioUrl "" "OpenGraph audio URL not filled properly"
|
||||
Expect.equal model.OpenGraphAudioType "" "OpenGraph audio type not filled properly"
|
||||
Expect.equal model.OpenGraphDescription "" "OpenGraph description not filled properly"
|
||||
Expect.equal model.OpenGraphDeterminer "" "OpenGraph determiner not filled properly"
|
||||
Expect.equal model.OpenGraphLocale "" "OpenGraph locale not filled properly"
|
||||
Expect.equal model.OpenGraphAlternateLocales "" "OpenGraph alt locales not filled properly"
|
||||
Expect.equal model.OpenGraphVideoUrl "" "OpenGraph video URL not filled properly"
|
||||
Expect.equal model.OpenGraphVideoType "" "OpenGraph video type not filled properly"
|
||||
Expect.equal model.OpenGraphVideoWidth "" "OpenGraph video width not filled properly"
|
||||
Expect.equal model.OpenGraphVideoHeight "" "OpenGraph video height not filled properly"
|
||||
Expect.isEmpty model.OpenGraphExtraNames "OpenGraph extra names not filled properly"
|
||||
Expect.isEmpty model.OpenGraphExtraValues "OpenGraph extra values not filled properly"
|
||||
Expect.hasLength model.MetaNames 1 "MetaNames should have one entry"
|
||||
Expect.equal model.MetaNames[0] "" "Meta name not set properly"
|
||||
Expect.equal model.MetaValues.Length 1 "MetaValues should have one entry"
|
||||
Expect.hasLength model.MetaValues 1 "MetaValues should have one entry"
|
||||
Expect.equal model.MetaValues[0] "" "Meta value not set properly"
|
||||
}
|
||||
test "succeeds for filled page" {
|
||||
@ -280,10 +337,31 @@ let editCommonModelTests = testList "EditCommonModel" [
|
||||
Expect.equal model.Template "bork" "Template not filled properly"
|
||||
Expect.equal model.Source "Markdown" "Source not filled properly"
|
||||
Expect.equal model.Text "# Howdy!" "Text not filled properly"
|
||||
Expect.equal model.MetaNames.Length 2 "MetaNames should have two entries"
|
||||
Expect.isTrue model.AssignOpenGraph "OpenGraph properties should have been assigned"
|
||||
Expect.equal model.OpenGraphType "book" "OpenGraph type not filled properly"
|
||||
Expect.equal model.OpenGraphImageUrl "https://unit.test/it.png" "OpenGraph image URL not filled properly"
|
||||
Expect.equal model.OpenGraphImageType "test/png" "OpenGraph image type not filled properly"
|
||||
Expect.equal model.OpenGraphImageWidth "1" "OpenGraph image width not filled properly"
|
||||
Expect.equal model.OpenGraphImageHeight "2" "OpenGraph image height not filled properly"
|
||||
Expect.equal model.OpenGraphImageAlt "huh" "OpenGraph image alt text not filled properly"
|
||||
Expect.equal model.OpenGraphAudioUrl "https://unit.test/it.mp3" "OpenGraph audio URL not filled properly"
|
||||
Expect.equal model.OpenGraphAudioType "test/mpeg-3" "OpenGraph audio type not filled properly"
|
||||
Expect.equal model.OpenGraphDescription "This is cool" "OpenGraph description not filled properly"
|
||||
Expect.equal model.OpenGraphDeterminer "the" "OpenGraph determiner not filled properly"
|
||||
Expect.equal model.OpenGraphLocale "en-US" "OpenGraph locale not filled properly"
|
||||
Expect.equal model.OpenGraphAlternateLocales "es-MX, es-ES" "OpenGraph alt locales not filled properly"
|
||||
Expect.equal model.OpenGraphVideoUrl "https://unit.test/it.mp4" "OpenGraph video URL not filled properly"
|
||||
Expect.equal model.OpenGraphVideoType "test/mpeg-4" "OpenGraph video type not filled properly"
|
||||
Expect.equal model.OpenGraphVideoWidth "5" "OpenGraph video width not filled properly"
|
||||
Expect.equal model.OpenGraphVideoHeight "6" "OpenGraph video height not filled properly"
|
||||
Expect.hasLength model.OpenGraphExtraNames 1 "OpenGraph extra names should have had 1 entry"
|
||||
Expect.equal model.OpenGraphExtraNames[0] "the-other" "OpenGraph extra names not filled properly"
|
||||
Expect.hasLength model.OpenGraphExtraValues 1 "OpenGraph extra values should have had 1 entry"
|
||||
Expect.equal model.OpenGraphExtraValues[0] "the-value" "OpenGraph extra values not filled properly"
|
||||
Expect.hasLength model.MetaNames 2 "MetaNames should have two entries"
|
||||
Expect.equal model.MetaNames[0] "Test" "Meta name 0 not set properly"
|
||||
Expect.equal model.MetaNames[1] "Two" "Meta name 1 not set properly"
|
||||
Expect.equal model.MetaValues.Length 2 "MetaValues should have two entries"
|
||||
Expect.hasLength model.MetaValues 2 "MetaValues should have two entries"
|
||||
Expect.equal model.MetaValues[0] "me" "Meta value 0 not set properly"
|
||||
Expect.equal model.MetaValues[1] "2" "Meta value 1 not set properly"
|
||||
}
|
||||
@ -298,9 +376,28 @@ let editCommonModelTests = testList "EditCommonModel" [
|
||||
Expect.equal model.Source "HTML" "Source not filled properly"
|
||||
Expect.equal model.Text "" "Text not filled properly"
|
||||
Expect.equal model.Template "" "Template not filled properly"
|
||||
Expect.equal model.MetaNames.Length 1 "MetaNames not filled properly"
|
||||
Expect.isFalse model.AssignOpenGraph "OpenGraph properties should not have been assigned"
|
||||
Expect.equal model.OpenGraphType "" "OpenGraph type not filled properly"
|
||||
Expect.equal model.OpenGraphImageUrl "" "OpenGraph image URL not filled properly"
|
||||
Expect.equal model.OpenGraphImageType "" "OpenGraph image type not filled properly"
|
||||
Expect.equal model.OpenGraphImageWidth "" "OpenGraph image width not filled properly"
|
||||
Expect.equal model.OpenGraphImageHeight "" "OpenGraph image height not filled properly"
|
||||
Expect.equal model.OpenGraphImageAlt "" "OpenGraph image alt text not filled properly"
|
||||
Expect.equal model.OpenGraphAudioUrl "" "OpenGraph audio URL not filled properly"
|
||||
Expect.equal model.OpenGraphAudioType "" "OpenGraph audio type not filled properly"
|
||||
Expect.equal model.OpenGraphDescription "" "OpenGraph description not filled properly"
|
||||
Expect.equal model.OpenGraphDeterminer "" "OpenGraph determiner not filled properly"
|
||||
Expect.equal model.OpenGraphLocale "" "OpenGraph locale not filled properly"
|
||||
Expect.equal model.OpenGraphAlternateLocales "" "OpenGraph alt locales not filled properly"
|
||||
Expect.equal model.OpenGraphVideoUrl "" "OpenGraph video URL not filled properly"
|
||||
Expect.equal model.OpenGraphVideoType "" "OpenGraph video type not filled properly"
|
||||
Expect.equal model.OpenGraphVideoWidth "" "OpenGraph video width not filled properly"
|
||||
Expect.equal model.OpenGraphVideoHeight "" "OpenGraph video height not filled properly"
|
||||
Expect.isEmpty model.OpenGraphExtraNames "OpenGraph extra names not filled properly"
|
||||
Expect.isEmpty model.OpenGraphExtraValues "OpenGraph extra values not filled properly"
|
||||
Expect.hasLength model.MetaNames 1 "MetaNames not filled properly"
|
||||
Expect.equal model.MetaNames[0] "" "Meta name 0 not filled properly"
|
||||
Expect.equal model.MetaValues.Length 1 "MetaValues not filled properly"
|
||||
Expect.hasLength model.MetaValues 1 "MetaValues not filled properly"
|
||||
Expect.equal model.MetaValues[0] "" "Meta value 0 not filled properly"
|
||||
}
|
||||
test "succeeds for full post with external chapters" {
|
||||
@ -312,12 +409,111 @@ let editCommonModelTests = testList "EditCommonModel" [
|
||||
Expect.equal model.Source "HTML" "Source not filled properly"
|
||||
Expect.equal model.Text "<p>A post!</p>" "Text not filled properly"
|
||||
Expect.equal model.Template "demo" "Template not filled properly"
|
||||
Expect.equal model.MetaNames.Length 1 "MetaNames not filled properly"
|
||||
Expect.isTrue model.AssignOpenGraph "OpenGraph properties should have been assigned"
|
||||
Expect.equal model.OpenGraphType "music.album" "OpenGraph type not filled properly"
|
||||
Expect.equal model.OpenGraphImageUrl "https://unit.test/it.jpg" "OpenGraph image URL not filled properly"
|
||||
Expect.equal model.OpenGraphImageType "test/jpg" "OpenGraph image type not filled properly"
|
||||
Expect.equal model.OpenGraphImageWidth "100" "OpenGraph image width not filled properly"
|
||||
Expect.equal model.OpenGraphImageHeight "200" "OpenGraph image height not filled properly"
|
||||
Expect.equal model.OpenGraphImageAlt "it is a jpeg" "OpenGraph image alt text not filled properly"
|
||||
Expect.equal model.OpenGraphAudioUrl "https://unit.test/that.mp3" "OpenGraph audio URL not filled properly"
|
||||
Expect.equal model.OpenGraphAudioType "test/mp3" "OpenGraph audio type not filled properly"
|
||||
Expect.equal model.OpenGraphDescription "Just a post" "OpenGraph description not filled properly"
|
||||
Expect.equal model.OpenGraphDeterminer "a" "OpenGraph determiner not filled properly"
|
||||
Expect.equal model.OpenGraphLocale "es-MX" "OpenGraph locale not filled properly"
|
||||
Expect.equal model.OpenGraphAlternateLocales "es-ES, en-EN" "OpenGraph alt locales not filled properly"
|
||||
Expect.equal model.OpenGraphVideoUrl "https://unit.test/that.mp4" "OpenGraph video URL not filled properly"
|
||||
Expect.equal model.OpenGraphVideoType "test/mp4" "OpenGraph video type not filled properly"
|
||||
Expect.equal model.OpenGraphVideoWidth "50" "OpenGraph video width not filled properly"
|
||||
Expect.equal model.OpenGraphVideoHeight "60" "OpenGraph video height not filled properly"
|
||||
Expect.hasLength model.OpenGraphExtraNames 1 "OpenGraph extra names should have had 1 entry"
|
||||
Expect.equal model.OpenGraphExtraNames[0] "an-other" "OpenGraph extra names not filled properly"
|
||||
Expect.hasLength model.OpenGraphExtraValues 1 "OpenGraph extra values should have had 1 entry"
|
||||
Expect.equal model.OpenGraphExtraValues[0] "a-value" "OpenGraph extra values not filled properly"
|
||||
Expect.hasLength model.MetaNames 1 "MetaNames not filled properly"
|
||||
Expect.equal model.MetaNames[0] "A Meta" "Meta name 0 not filled properly"
|
||||
Expect.equal model.MetaValues.Length 1 "MetaValues not filled properly"
|
||||
Expect.hasLength model.MetaValues 1 "MetaValues not filled properly"
|
||||
Expect.equal model.MetaValues[0] "A Value" "Meta value 0 not filled properly"
|
||||
}
|
||||
]
|
||||
testList "ToOpenGraph" [
|
||||
test "succeeds when OpenGraph properties are not assigned" {
|
||||
Expect.isNone (EditCommonModel().ToOpenGraph()) "No OpenGraph properties should have returned None"
|
||||
}
|
||||
test "succeeds when minimal OpenGraph properties are assigned" {
|
||||
let model = EditCommonModel()
|
||||
model.AssignOpenGraph <- true
|
||||
model.OpenGraphType <- string Article
|
||||
model.OpenGraphImageUrl <- "https://unit.test/img.tiff"
|
||||
let tryOg = model.ToOpenGraph()
|
||||
Expect.isSome tryOg "There should have been a set of OpenGraph properties returned"
|
||||
let og = tryOg.Value
|
||||
Expect.equal og.Type Article "OpenGraph type not filled correctly"
|
||||
Expect.equal og.Image.Url "https://unit.test/img.tiff" "OpenGraph image URL not filled properly"
|
||||
Expect.isNone og.Image.Type "OpenGraph image type should have been None"
|
||||
Expect.isNone og.Image.Width "OpenGraph image width should have been None"
|
||||
Expect.isNone og.Image.Height "OpenGraph image height should have been None"
|
||||
Expect.isNone og.Image.Alt "OpenGraph image alt text should have been None"
|
||||
Expect.isNone og.Audio "OpenGraph audio should have been None"
|
||||
Expect.isNone og.Description "OpenGraph description should have been None"
|
||||
Expect.isNone og.Determiner "OpenGraph determiner should have been None"
|
||||
Expect.isNone og.Locale "OpenGraph locale should have been None"
|
||||
Expect.isNone og.LocaleAlternate "OpenGraph alt locales should have been None"
|
||||
Expect.isNone og.Video "OpenGraph video should have been None"
|
||||
Expect.isNone og.Other "OpenGraph other properties should have been None"
|
||||
}
|
||||
test "succeeds when all OpenGraph properties are assigned" {
|
||||
let model = EditCommonModel()
|
||||
model.AssignOpenGraph <- true
|
||||
model.OpenGraphType <- string VideoMovie
|
||||
model.OpenGraphImageUrl <- "https://unit.test/still.jpg"
|
||||
model.OpenGraphImageType <- "still/jpg"
|
||||
model.OpenGraphImageWidth <- "17"
|
||||
model.OpenGraphImageHeight <- "24"
|
||||
model.OpenGraphImageAlt <- "a still from the film"
|
||||
model.OpenGraphAudioUrl <- "https://unit.test/movie.mp3"
|
||||
model.OpenGraphAudioType <- "audio/mp-three"
|
||||
model.OpenGraphDescription <- "Powerful. Stunning."
|
||||
model.OpenGraphDeterminer <- "the"
|
||||
model.OpenGraphLocale <- "en-EN"
|
||||
model.OpenGraphAlternateLocales <- "es-ES, pt-PT"
|
||||
model.OpenGraphVideoUrl <- "https://unit.test/movie.avi"
|
||||
model.OpenGraphVideoType <- "video/outdated"
|
||||
model.OpenGraphVideoWidth <- "1024"
|
||||
model.OpenGraphVideoHeight <- "768"
|
||||
model.OpenGraphExtraNames <- [| "og:duration"; "og:rating" |]
|
||||
model.OpenGraphExtraValues <- [| "1:30:27"; "G" |]
|
||||
let tryOg = model.ToOpenGraph()
|
||||
Expect.isSome tryOg "There should have been a set of OpenGraph properties returned"
|
||||
let og = tryOg.Value
|
||||
Expect.equal og.Type VideoMovie "OpenGraph type not filled correctly"
|
||||
Expect.equal og.Image.Url "https://unit.test/still.jpg" "OpenGraph image URL not filled properly"
|
||||
Expect.equal og.Image.Type (Some "still/jpg") "OpenGraph image type not filled properly"
|
||||
Expect.equal og.Image.Width (Some 17) "OpenGraph image width not filled properly"
|
||||
Expect.equal og.Image.Height (Some 24) "OpenGraph image height not filled properly"
|
||||
Expect.equal og.Image.Alt (Some "a still from the film") "OpenGraph image alt text not filled properly"
|
||||
Expect.isSome og.Audio "OpenGraph audio should have been filled"
|
||||
Expect.equal og.Audio.Value.Url "https://unit.test/movie.mp3" "OpenGraph audio URL not filled properly"
|
||||
Expect.equal og.Audio.Value.Type (Some "audio/mp-three") "OpenGraph audio type not filled properly"
|
||||
Expect.equal og.Description (Some "Powerful. Stunning.") "OpenGraph description not filled properly"
|
||||
Expect.equal og.Determiner (Some "the") "OpenGraph determiner not filled properly"
|
||||
Expect.equal og.Locale (Some "en-EN") "OpenGraph locale not filled properly"
|
||||
Expect.isSome og.LocaleAlternate "OpenGraph alt locales not filled properly"
|
||||
Expect.hasLength og.LocaleAlternate.Value 2 "There should have been 2 alternate locales"
|
||||
Expect.equal og.LocaleAlternate.Value [ "es-ES"; "pt-PT" ] "OpenGraph alt locales are incorrect"
|
||||
Expect.isSome og.Video "OpenGraph video should have been filled"
|
||||
Expect.equal og.Video.Value.Url "https://unit.test/movie.avi" "OpenGraph video URL not filled properly"
|
||||
Expect.equal og.Video.Value.Type (Some "video/outdated") "OpenGraph video type not filled properly"
|
||||
Expect.equal og.Video.Value.Width (Some 1024) "OpenGraph video width not filled properly"
|
||||
Expect.equal og.Video.Value.Height (Some 768) "OpenGraph video height not filled properly"
|
||||
Expect.isSome og.Other "OpenGraph other properties should have been filled"
|
||||
Expect.hasLength og.Other.Value 2 "There should have been 2 extra properties"
|
||||
Expect.equal
|
||||
og.Other.Value
|
||||
[ { Name = "og:duration"; Value = "1:30:27" }; { Name = "og:rating"; Value = "G" } ]
|
||||
"OpenGraph extra properties not filled properly"
|
||||
}
|
||||
]
|
||||
]
|
||||
|
||||
/// Unit tests for the EditCustomFeedModel type
|
||||
@ -514,11 +710,13 @@ let editPageModelTests = testList "EditPageModel" [
|
||||
let model = EditPageModel.FromPage { Page.Empty with Id = PageId "abc" }
|
||||
Expect.equal model.Id "abc" "Parent fields not filled properly"
|
||||
Expect.isFalse model.IsShownInPageList "IsShownInPageList should not have been set"
|
||||
Expect.isFalse model.AssignOpenGraph "OpenGraph properties should not be assigned"
|
||||
}
|
||||
test "succeeds for filled page" {
|
||||
let model = EditPageModel.FromPage testFullPage
|
||||
Expect.equal model.Id "the-page" "Parent fields not filled properly"
|
||||
Expect.isTrue model.IsShownInPageList "IsShownInPageList should have been set"
|
||||
Expect.isTrue model.AssignOpenGraph "OpenGraph properties should have been assigned"
|
||||
}
|
||||
]
|
||||
testList "UpdatePage" [
|
||||
@ -534,13 +732,12 @@ let editPageModelTests = testList "EditPageModel" [
|
||||
Expect.isFalse page.IsInPageList "IsInPageList should have been unset"
|
||||
Expect.equal page.Template (Some "bork") "Template not filled properly"
|
||||
Expect.equal page.Text "<h1 id=\"howdy\">Howdy!</h1>\n" "Text not filled properly"
|
||||
Expect.equal page.Metadata.Length 2 "There should be 2 metadata items"
|
||||
let item1 = List.item 0 page.Metadata
|
||||
Expect.equal item1.Name "Test" "Meta item 0 name not filled properly"
|
||||
Expect.equal item1.Value "me" "Meta item 0 value not filled properly"
|
||||
let item2 = List.item 1 page.Metadata
|
||||
Expect.equal item2.Name "Two" "Meta item 1 name not filled properly"
|
||||
Expect.equal item2.Value "2" "Meta item 1 value not filled properly"
|
||||
Expect.equal page.OpenGraph testFullPage.OpenGraph "OpenGraph properties should be unchanged"
|
||||
Expect.hasLength page.Metadata 2 "There should be 2 metadata items"
|
||||
Expect.equal
|
||||
page.Metadata
|
||||
[ { Name = "Test"; Value = "me" }; { Name = "Two"; Value = "2" } ]
|
||||
"Metadata not filled properly"
|
||||
Expect.equal page.Revisions.Length 2 "There should be 2 revisions"
|
||||
let rev1 = List.item 0 page.Revisions
|
||||
Expect.equal rev1.AsOf (Noda.epoch + Duration.FromHours 1) "Revision 0 as-of not filled properly"
|
||||
@ -558,6 +755,7 @@ let editPageModelTests = testList "EditPageModel" [
|
||||
model.IsShownInPageList <- false
|
||||
model.Source <- "HTML"
|
||||
model.Text <- "<h1>Howdy, partners!</h1>"
|
||||
model.AssignOpenGraph <- false
|
||||
model.MetaNames <- [| "banana"; "apple"; "grape" |]
|
||||
model.MetaValues <- [| "monkey"; "zebra"; "ape" |]
|
||||
let now = Noda.epoch + Duration.FromDays 7
|
||||
@ -569,16 +767,14 @@ let editPageModelTests = testList "EditPageModel" [
|
||||
Expect.isFalse page.IsInPageList "IsInPageList should not have been set"
|
||||
Expect.isNone page.Template "Template not filled properly"
|
||||
Expect.equal page.Text "<h1>Howdy, partners!</h1>" "Text not filled properly"
|
||||
Expect.isNone page.OpenGraph "OpenGraph properties not cleared properly"
|
||||
Expect.equal page.Metadata.Length 3 "There should be 3 metadata items"
|
||||
let item1 = List.item 0 page.Metadata
|
||||
Expect.equal item1.Name "apple" "Meta item 0 name not filled properly"
|
||||
Expect.equal item1.Value "zebra" "Meta item 0 value not filled properly"
|
||||
let item2 = List.item 1 page.Metadata
|
||||
Expect.equal item2.Name "banana" "Meta item 1 name not filled properly"
|
||||
Expect.equal item2.Value "monkey" "Meta item 1 value not filled properly"
|
||||
let item3 = List.item 2 page.Metadata
|
||||
Expect.equal item3.Name "grape" "Meta item 2 name not filled properly"
|
||||
Expect.equal item3.Value "ape" "Meta item 2 value not filled properly"
|
||||
Expect.equal
|
||||
page.Metadata
|
||||
[ { Name = "apple"; Value = "zebra" }
|
||||
{ Name = "banana"; Value = "monkey" }
|
||||
{ Name = "grape"; Value = "ape" } ]
|
||||
"Metadata not filled properly"
|
||||
Expect.equal page.Revisions.Length 3 "There should be 3 revisions"
|
||||
Expect.equal page.Revisions.Head.AsOf now "Head revision as-of not filled properly"
|
||||
Expect.equal
|
||||
@ -593,6 +789,7 @@ let editPostModelTests = testList "EditPostModel" [
|
||||
test "succeeds for empty post" {
|
||||
let model = EditPostModel.FromPost WebLog.Empty { Post.Empty with Id = PostId "la-la-la" }
|
||||
Expect.equal model.Id "la-la-la" "Parent fields not filled properly"
|
||||
Expect.isFalse model.AssignOpenGraph "OpenGraph properties should not be assigned"
|
||||
Expect.equal model.Tags "" "Tags not filled properly"
|
||||
Expect.isEmpty model.CategoryIds "CategoryIds not filled properly"
|
||||
Expect.equal model.Status (string Draft) "Status not filled properly"
|
||||
@ -624,6 +821,7 @@ let editPostModelTests = testList "EditPostModel" [
|
||||
test "succeeds for full post with external chapters" {
|
||||
let model = EditPostModel.FromPost { WebLog.Empty with TimeZone = "Etc/GMT+1" } testFullPost
|
||||
Expect.equal model.Id "a-post" "Parent fields not filled properly"
|
||||
Expect.isTrue model.AssignOpenGraph "OpenGraph properties should have been assigned"
|
||||
Expect.equal model.Tags "demo, post" "Tags not filled properly"
|
||||
Expect.equal model.CategoryIds [| "cat-a"; "cat-b"; "cat-n" |] "CategoryIds not filled properly"
|
||||
Expect.equal model.Status (string Published) "Status not filled properly"
|
||||
@ -666,7 +864,7 @@ let editPostModelTests = testList "EditPostModel" [
|
||||
{ testFullPost.Episode.Value with
|
||||
Chapters = Some []
|
||||
ChapterFile = None
|
||||
ChapterType = None } }
|
||||
ChapterType = None } }
|
||||
Expect.equal model.ChapterSource "internal" "ChapterSource not filled properly"
|
||||
}
|
||||
]
|
||||
@ -677,8 +875,9 @@ let editPostModelTests = testList "EditPostModel" [
|
||||
model.Source <- "HTML"
|
||||
model.Text <- "<p>An updated post!</p>"
|
||||
model.Tags <- "Zebras, Aardvarks, , Turkeys"
|
||||
model.Template <- "updated"
|
||||
model.Template <- "updated"
|
||||
model.CategoryIds <- [| "cat-x"; "cat-y" |]
|
||||
model.AssignOpenGraph <- false
|
||||
model.MetaNames <- [| "Zed Meta"; "A Meta" |]
|
||||
model.MetaValues <- [| "A Value"; "Zed Value" |]
|
||||
model.Media <- "an-updated-ep.mp3"
|
||||
@ -688,7 +887,7 @@ let editPostModelTests = testList "EditPostModel" [
|
||||
model.ImageUrl <- "updated-cover.png"
|
||||
model.Subtitle <- "Talking"
|
||||
model.Explicit <- "no"
|
||||
model.ChapterSource <- "external"
|
||||
model.ChapterSource <- "external"
|
||||
model.ChapterFile <- "updated-chapters.txt"
|
||||
model.ChapterType <- "indexes"
|
||||
model.TranscriptUrl <- "updated-transcript.txt"
|
||||
@ -696,7 +895,7 @@ let editPostModelTests = testList "EditPostModel" [
|
||||
model.TranscriptLang <- "ES-mx"
|
||||
model.SeasonNumber <- 4
|
||||
model.SeasonDescription <- "Season Fo"
|
||||
model.EpisodeNumber <- "432.1"
|
||||
model.EpisodeNumber <- "432.1"
|
||||
model.EpisodeDescription <- "Four Three Two pt One"
|
||||
model
|
||||
testList "UpdatePost" [
|
||||
@ -711,11 +910,12 @@ let editPostModelTests = testList "EditPostModel" [
|
||||
Expect.equal post.Tags [ "aardvarks"; "turkeys"; "zebras" ] "Tags not filled properly"
|
||||
Expect.equal post.Template (Some "updated") "Template not filled properly"
|
||||
Expect.equal post.CategoryIds [ CategoryId "cat-x"; CategoryId "cat-y" ] "Categories not filled properly"
|
||||
Expect.equal post.Metadata.Length 2 "There should have been 2 meta items"
|
||||
Expect.equal post.Metadata[0].Name "A Meta" "Meta item 0 name not filled properly"
|
||||
Expect.equal post.Metadata[0].Value "Zed Value" "Meta item 0 value not filled properly"
|
||||
Expect.equal post.Metadata[1].Name "Zed Meta" "Meta item 1 name not filled properly"
|
||||
Expect.equal post.Metadata[1].Value "A Value" "Meta item 1 value not filled properly"
|
||||
Expect.isNone post.OpenGraph "OpenGraph properties should have been cleared"
|
||||
Expect.hasLength post.Metadata 2 "There should have been 2 meta items"
|
||||
Expect.equal
|
||||
post.Metadata
|
||||
[ { Name = "A Meta"; Value = "Zed Value" }; { Name = "Zed Meta"; Value = "A Value" } ]
|
||||
"Metadata not filled properly"
|
||||
Expect.equal post.Revisions.Length 3 "There should have been 3 revisions"
|
||||
Expect.equal
|
||||
post.Revisions[0].AsOf (Noda.epoch + Duration.FromDays 400) "Revision 0 AsOf not filled properly"
|
||||
@ -760,7 +960,7 @@ let editPostModelTests = testList "EditPostModel" [
|
||||
minModel.SeasonNumber <- 0
|
||||
minModel.SeasonDescription <- ""
|
||||
minModel.EpisodeNumber <- ""
|
||||
minModel.EpisodeDescription <- ""
|
||||
minModel.EpisodeDescription <- ""
|
||||
let post = minModel.UpdatePost testFullPost (Noda.epoch + Duration.FromDays 500)
|
||||
Expect.isSome post.Episode "There should have been a podcast episode"
|
||||
let ep = post.Episode.Value
|
||||
@ -785,7 +985,7 @@ let editPostModelTests = testList "EditPostModel" [
|
||||
}
|
||||
test "succeeds for a podcast episode with internal chapters" {
|
||||
let minModel = updatedModel ()
|
||||
minModel.ChapterSource <- "internal"
|
||||
minModel.ChapterSource <- "internal"
|
||||
minModel.ChapterFile <- ""
|
||||
minModel.ChapterType <- ""
|
||||
let post = minModel.UpdatePost testFullPost (Noda.epoch + Duration.FromDays 500)
|
||||
@ -977,7 +1177,7 @@ let editUserModelTests = testList "EditUserModel" [
|
||||
let model =
|
||||
{ Id = "test-user"
|
||||
AccessLevel = "WebLogAdmin"
|
||||
Email = "again@example.com"
|
||||
Email = "again@example.com"
|
||||
Url = ""
|
||||
FirstName = "Another"
|
||||
LastName = "One"
|
||||
@ -1115,10 +1315,10 @@ let postListItemTests = testList "PostListItem" [
|
||||
{ Post.Empty with
|
||||
Id = PostId "full-post"
|
||||
AuthorId = WebLogUserId "me"
|
||||
Status = Published
|
||||
Status = Published
|
||||
Title = "Finished Product"
|
||||
Permalink = Permalink "2021/post.html"
|
||||
PublishedOn = Some (Noda.epoch + Duration.FromHours 12)
|
||||
PublishedOn = Some (Noda.epoch + Duration.FromHours 12)
|
||||
UpdatedOn = Noda.epoch + Duration.FromHours 13
|
||||
Text = """<a href="/other-post.html">Click</a>"""
|
||||
CategoryIds = [ CategoryId "z"; CategoryId "y" ]
|
||||
@ -1157,13 +1357,14 @@ let settingsModelTests = testList "SettingsModel" [
|
||||
let model =
|
||||
SettingsModel.FromWebLog
|
||||
{ WebLog.Empty with
|
||||
Name = "The Web Log"
|
||||
Slug = "the-web-log"
|
||||
DefaultPage = "this-one"
|
||||
PostsPerPage = 18
|
||||
TimeZone = "America/Denver"
|
||||
ThemeId = ThemeId "my-theme"
|
||||
AutoHtmx = true }
|
||||
Name = "The Web Log"
|
||||
Slug = "the-web-log"
|
||||
DefaultPage = "this-one"
|
||||
PostsPerPage = 18
|
||||
TimeZone = "America/Denver"
|
||||
ThemeId = ThemeId "my-theme"
|
||||
AutoHtmx = true
|
||||
AutoOpenGraph = false }
|
||||
Expect.equal model.Name "The Web Log" "Name not filled properly"
|
||||
Expect.equal model.Slug "the-web-log" "Slug not filled properly"
|
||||
Expect.equal model.Subtitle "" "Subtitle not filled properly"
|
||||
@ -1173,6 +1374,7 @@ let settingsModelTests = testList "SettingsModel" [
|
||||
Expect.equal model.ThemeId "my-theme" "ThemeId not filled properly"
|
||||
Expect.isTrue model.AutoHtmx "AutoHtmx should have been set"
|
||||
Expect.equal model.Uploads "Database" "Uploads not filled properly"
|
||||
Expect.isFalse model.AutoOpenGraph "AutoOpenGraph should have been unset"
|
||||
}
|
||||
test "succeeds with a subtitle" {
|
||||
let model = SettingsModel.FromWebLog { WebLog.Empty with Subtitle = Some "sub here!" }
|
||||
@ -1182,15 +1384,16 @@ let settingsModelTests = testList "SettingsModel" [
|
||||
testList "Update" [
|
||||
test "succeeds with no subtitle" {
|
||||
let webLog =
|
||||
{ Name = "Interesting"
|
||||
Slug = "some-stuff"
|
||||
Subtitle = ""
|
||||
DefaultPage = "that-one"
|
||||
PostsPerPage = 8
|
||||
TimeZone = "America/Chicago"
|
||||
ThemeId = "test-theme"
|
||||
AutoHtmx = true
|
||||
Uploads = "Disk" }.Update WebLog.Empty
|
||||
{ Name = "Interesting"
|
||||
Slug = "some-stuff"
|
||||
Subtitle = ""
|
||||
DefaultPage = "that-one"
|
||||
PostsPerPage = 8
|
||||
TimeZone = "America/Chicago"
|
||||
ThemeId = "test-theme"
|
||||
AutoHtmx = true
|
||||
Uploads = "Disk"
|
||||
AutoOpenGraph = false }.Update WebLog.Empty
|
||||
Expect.equal webLog.Name "Interesting" "Name not filled properly"
|
||||
Expect.equal webLog.Slug "some-stuff" "Slug not filled properly"
|
||||
Expect.isNone webLog.Subtitle "Subtitle should not have had a value"
|
||||
@ -1200,6 +1403,7 @@ let settingsModelTests = testList "SettingsModel" [
|
||||
Expect.equal webLog.ThemeId (ThemeId "test-theme") "ThemeId not filled properly"
|
||||
Expect.isTrue webLog.AutoHtmx "AutoHtmx should have been set"
|
||||
Expect.equal webLog.Uploads Disk "Uploads not filled properly"
|
||||
Expect.isFalse webLog.AutoOpenGraph "AutoOpenGraph should have been unset"
|
||||
}
|
||||
test "succeeds with a subtitle" {
|
||||
let webLog = { SettingsModel.FromWebLog WebLog.Empty with Subtitle = "Sub" }.Update WebLog.Empty
|
||||
|
@ -26,9 +26,9 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Expecto" Version="10.2.1" />
|
||||
<PackageReference Include="Expecto" Version="10.2.3" />
|
||||
<PackageReference Include="ThrowawayDb.Postgres" Version="1.4.0" />
|
||||
<PackageReference Update="FSharp.Core" Version="8.0.300" />
|
||||
<PackageReference Update="FSharp.Core" Version="9.0.300" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
@ -10,20 +10,19 @@ let sqliteOnly = (RethinkDbDataTests.env "SQLITE_ONLY" "0") = "1"
|
||||
let postgresOnly = (RethinkDbDataTests.env "PG_ONLY" "0") = "1"
|
||||
|
||||
/// Whether any of the data tests are being isolated
|
||||
let dbOnly = rethinkOnly || sqliteOnly || postgresOnly
|
||||
|
||||
/// Whether to only run the unit tests (skip database/integration tests)
|
||||
let unitOnly = (RethinkDbDataTests.env "UNIT_ONLY" "0") = "1"
|
||||
let allDatabases = not (rethinkOnly || sqliteOnly || postgresOnly)
|
||||
|
||||
let allTests = testList "MyWebLog" [
|
||||
if not dbOnly then testList "Domain" [ SupportTypesTests.all; DataTypesTests.all; ViewModelsTests.all ]
|
||||
if not unitOnly then
|
||||
testList "Data" [
|
||||
if not dbOnly then ConvertersTests.all
|
||||
if not dbOnly then UtilsTests.all
|
||||
if not dbOnly || (dbOnly && rethinkOnly) then RethinkDbDataTests.all
|
||||
if not dbOnly || (dbOnly && sqliteOnly) then SQLiteDataTests.all
|
||||
if not dbOnly || (dbOnly && postgresOnly) then PostgresDataTests.all
|
||||
// Skip unit tests if running an isolated database test
|
||||
if allDatabases then
|
||||
testList "Domain" [ SupportTypesTests.all; DataTypesTests.all; ViewModelsTests.all ]
|
||||
testList "Data (Unit)" [ ConvertersTests.all; UtilsTests.all ]
|
||||
// Whether to skip integration tests
|
||||
if RethinkDbDataTests.env "UNIT_ONLY" "0" <> "1" then
|
||||
testList "Data (Integration)" [
|
||||
if allDatabases || rethinkOnly then RethinkDbDataTests.all
|
||||
if allDatabases || sqliteOnly then SQLiteDataTests.all
|
||||
if allDatabases || postgresOnly then PostgresDataTests.all
|
||||
]
|
||||
]
|
||||
|
||||
|
@ -3,30 +3,30 @@
|
||||
open Microsoft.AspNetCore.Http
|
||||
open MyWebLog.Data
|
||||
|
||||
/// Extension properties on HTTP context for web log
|
||||
/// <summary>Extension properties on HTTP context for web log</summary>
|
||||
[<AutoOpen>]
|
||||
module Extensions =
|
||||
|
||||
|
||||
open System.Security.Claims
|
||||
open Microsoft.AspNetCore.Antiforgery
|
||||
open Microsoft.Extensions.Configuration
|
||||
open Microsoft.Extensions.DependencyInjection
|
||||
|
||||
|
||||
/// Hold variable for the configured generator string
|
||||
let mutable private generatorString: string option = None
|
||||
|
||||
|
||||
type HttpContext with
|
||||
|
||||
/// The anti-CSRF service
|
||||
|
||||
/// <summary>The anti-CSRF service</summary>
|
||||
member this.AntiForgery = this.RequestServices.GetRequiredService<IAntiforgery>()
|
||||
|
||||
/// The cross-site request forgery token set for this request
|
||||
|
||||
/// <summary>The cross-site request forgery token set for this request</summary>
|
||||
member this.CsrfTokenSet = this.AntiForgery.GetAndStoreTokens this
|
||||
|
||||
/// The data implementation
|
||||
/// <summary>The data implementation</summary>
|
||||
member this.Data = this.RequestServices.GetRequiredService<IData>()
|
||||
|
||||
/// The generator string
|
||||
|
||||
/// <summary>The generator string</summary>
|
||||
member this.Generator =
|
||||
match generatorString with
|
||||
| Some gen -> gen
|
||||
@ -38,20 +38,22 @@ module Extensions =
|
||||
| None -> Some "generator not configured"
|
||||
generatorString.Value
|
||||
|
||||
/// The access level for the current user
|
||||
/// <summary>The access level for the current user</summary>
|
||||
member this.UserAccessLevel =
|
||||
this.User.Claims
|
||||
|> Seq.tryFind (fun claim -> claim.Type = ClaimTypes.Role)
|
||||
|> Option.map (fun claim -> AccessLevel.Parse claim.Value)
|
||||
|
||||
/// The user ID for the current request
|
||||
/// <summary>The user ID for the current request</summary>
|
||||
member this.UserId =
|
||||
WebLogUserId (this.User.Claims |> Seq.find (fun c -> c.Type = ClaimTypes.NameIdentifier)).Value
|
||||
|
||||
/// The web log for the current request
|
||||
/// <summary>The web log for the current request</summary>
|
||||
member this.WebLog = this.Items["webLog"] :?> WebLog
|
||||
|
||||
/// Does the current user have the requested level of access?
|
||||
|
||||
/// <summary>Does the current user have the required level of access?</summary>
|
||||
/// <param name="level">The required level of access</param>
|
||||
/// <returns>True if the user has the required access, false if not</returns>
|
||||
member this.HasAccessLevel level =
|
||||
defaultArg (this.UserAccessLevel |> Option.map _.HasAccess(level)) false
|
||||
|
||||
@ -64,30 +66,33 @@ open System.Collections.Concurrent
|
||||
/// <remarks>This is filled by the middleware via the first request for each host, and can be updated via the web log
|
||||
/// settings update page</remarks>
|
||||
module WebLogCache =
|
||||
|
||||
|
||||
open System.Text.RegularExpressions
|
||||
|
||||
/// A redirect rule that caches compiled regular expression rules
|
||||
/// <summary>A redirect rule that caches compiled regular expression rules</summary>
|
||||
type CachedRedirectRule =
|
||||
/// A straight text match rule
|
||||
| Text of string * string
|
||||
/// A regular expression match rule
|
||||
| RegEx of Regex * string
|
||||
/// <summary>A straight text match rule</summary>
|
||||
| Text of string * string
|
||||
/// <summary>A regular expression match rule</summary>
|
||||
| RegEx of Regex * string
|
||||
|
||||
/// The cache of web log details
|
||||
let mutable private _cache : WebLog list = []
|
||||
let mutable private _cache: WebLog list = []
|
||||
|
||||
/// Redirect rules with compiled regular expressions
|
||||
let mutable private _redirectCache = ConcurrentDictionary<WebLogId, CachedRedirectRule list> ()
|
||||
let mutable private _redirectCache = ConcurrentDictionary<WebLogId, CachedRedirectRule list>()
|
||||
|
||||
/// Try to get the web log for the current request (longest matching URL base wins)
|
||||
let tryGet (path : string) =
|
||||
/// <summary>Try to get the web log for the current request (longest matching URL base wins)</summary>
|
||||
/// <param name="path">The path for the current request</param>
|
||||
/// <returns>Some with the web log matching the URL, or None if none is found</returns>
|
||||
let tryGet (path: string) =
|
||||
_cache
|
||||
|> List.filter (fun wl -> path.StartsWith wl.UrlBase)
|
||||
|> List.sortByDescending _.UrlBase.Length
|
||||
|> List.tryHead
|
||||
|
||||
/// Cache the web log for a particular host
|
||||
/// <summary>Cache the web log for a particular host</summary>
|
||||
/// <param name="webLog">The web log to be cached</param>
|
||||
let set webLog =
|
||||
_cache <- webLog :: (_cache |> List.filter (fun wl -> wl.Id <> webLog.Id))
|
||||
_redirectCache[webLog.Id] <-
|
||||
@ -100,166 +105,124 @@ module WebLogCache =
|
||||
RegEx(Regex(pattern, RegexOptions.Compiled ||| RegexOptions.IgnoreCase), urlTo)
|
||||
else
|
||||
Text(relUrl it.From, urlTo))
|
||||
|
||||
/// Get all cached web logs
|
||||
|
||||
/// <summary>Get all cached web logs</summary>
|
||||
/// <returns>All cached web logs</returns>
|
||||
let all () =
|
||||
_cache
|
||||
|
||||
/// Fill the web log cache from the database
|
||||
|
||||
/// <summary>Fill the web log cache from the database</summary>
|
||||
/// <param name="data">The data implementation from which web logs will be retrieved</param>
|
||||
let fill (data: IData) = backgroundTask {
|
||||
let! webLogs = data.WebLog.All()
|
||||
webLogs |> List.iter set
|
||||
}
|
||||
|
||||
/// Get the cached redirect rules for the given web log
|
||||
|
||||
/// <summary>Get the cached redirect rules for the given web log</summary>
|
||||
/// <param name="webLogId">The ID of the web log for which rules should be retrieved</param>
|
||||
/// <returns>The redirect rules for the given web log ID</returns>
|
||||
let redirectRules webLogId =
|
||||
_redirectCache[webLogId]
|
||||
|
||||
/// Is the given theme in use by any web logs?
|
||||
|
||||
/// <summary>Is the given theme in use by any web logs?</summary>
|
||||
/// <param name="themeId">The ID of the theme whose use should be checked</param>
|
||||
/// <returns>True if any web logs are using the given theme, false if not</returns>
|
||||
let isThemeInUse themeId =
|
||||
_cache |> List.exists (fun wl -> wl.ThemeId = themeId)
|
||||
|
||||
|
||||
/// A cache of page information needed to display the page list in templates
|
||||
/// <summary>A cache of page information needed to display the page list in templates</summary>
|
||||
module PageListCache =
|
||||
|
||||
|
||||
open MyWebLog.ViewModels
|
||||
|
||||
|
||||
/// Cache of displayed pages
|
||||
let private _cache = ConcurrentDictionary<WebLogId, DisplayPage array> ()
|
||||
|
||||
let private _cache = ConcurrentDictionary<WebLogId, DisplayPage array>()
|
||||
|
||||
/// Fill the page list for the given web log
|
||||
let private fillPages (webLog: WebLog) pages =
|
||||
_cache[webLog.Id] <-
|
||||
pages
|
||||
|> List.map (fun pg -> DisplayPage.FromPage webLog { pg with Text = "" })
|
||||
|> Array.ofList
|
||||
|
||||
/// Are there pages cached for this web log?
|
||||
|
||||
/// <summary>Are there pages cached for this web log?</summary>
|
||||
/// <param name="ctx">The <c>HttpContext</c> for the current request</param>
|
||||
/// <returns>True if the current web log has any pages cached, false if not</returns>
|
||||
let exists (ctx: HttpContext) = _cache.ContainsKey ctx.WebLog.Id
|
||||
|
||||
/// Get the pages for the web log for this request
|
||||
|
||||
/// <summary>Get the pages for the web log for this request</summary>
|
||||
/// <param name="ctx">The <c>HttpContext</c> for the current request</param>
|
||||
/// <returns>The page list for the current web log</returns>
|
||||
let get (ctx: HttpContext) = _cache[ctx.WebLog.Id]
|
||||
|
||||
/// Update the pages for the current web log
|
||||
let update (ctx: HttpContext) = backgroundTask {
|
||||
let! pages = ctx.Data.Page.FindListed ctx.WebLog.Id
|
||||
fillPages ctx.WebLog pages
|
||||
}
|
||||
|
||||
/// Refresh the pages for the given web log
|
||||
|
||||
/// <summary>Refresh the pages for the given web log</summary>
|
||||
/// <param name="webLog">The web log for which pages should be refreshed</param>
|
||||
/// <param name="data">The data implementation from which pages should be retrieved</param>
|
||||
let refresh (webLog: WebLog) (data: IData) = backgroundTask {
|
||||
let! pages = data.Page.FindListed webLog.Id
|
||||
fillPages webLog pages
|
||||
}
|
||||
|
||||
/// <summary>Update the pages for the current web log</summary>
|
||||
/// <param name="ctx">The <c>HttpContext</c> for the current request</param>
|
||||
let update (ctx: HttpContext) =
|
||||
refresh ctx.WebLog ctx.Data
|
||||
|
||||
/// Cache of all categories, indexed by web log
|
||||
|
||||
/// <summary>Cache of all categories, indexed by web log</summary>
|
||||
module CategoryCache =
|
||||
|
||||
|
||||
open MyWebLog.ViewModels
|
||||
|
||||
|
||||
/// The cache itself
|
||||
let private _cache = ConcurrentDictionary<WebLogId, DisplayCategory array> ()
|
||||
|
||||
/// Are there categories cached for this web log?
|
||||
let private _cache = ConcurrentDictionary<WebLogId, DisplayCategory array>()
|
||||
|
||||
/// <summary>Are there categories cached for this web log?</summary>
|
||||
/// <param name="ctx">The <c>HttpContext</c> for the current request</param>
|
||||
/// <returns>True if the current web logs has any categories cached, false if not</returns>
|
||||
let exists (ctx: HttpContext) = _cache.ContainsKey ctx.WebLog.Id
|
||||
|
||||
/// Get the categories for the web log for this request
|
||||
|
||||
/// <summary>Get the categories for the web log for this request</summary>
|
||||
/// <param name="ctx">The <c>HttpContext</c> for the current request</param>
|
||||
/// <returns>The categories for the current web log</returns>
|
||||
let get (ctx: HttpContext) = _cache[ctx.WebLog.Id]
|
||||
|
||||
/// Update the cache with fresh data
|
||||
let update (ctx: HttpContext) = backgroundTask {
|
||||
let! cats = ctx.Data.Category.FindAllForView ctx.WebLog.Id
|
||||
_cache[ctx.WebLog.Id] <- cats
|
||||
}
|
||||
|
||||
/// Refresh the category cache for the given web log
|
||||
|
||||
/// <summary>Refresh the category cache for the given web log</summary>
|
||||
/// <param name="webLogId">The ID of the web log for which the cache should be refreshed</param>
|
||||
/// <param name="data">The data implementation from which categories should be retrieved</param>
|
||||
let refresh webLogId (data: IData) = backgroundTask {
|
||||
let! cats = data.Category.FindAllForView webLogId
|
||||
_cache[webLogId] <- cats
|
||||
}
|
||||
|
||||
|
||||
/// Cache for parsed templates
|
||||
module TemplateCache =
|
||||
|
||||
open System
|
||||
open System.Text.RegularExpressions
|
||||
open DotLiquid
|
||||
|
||||
/// Cache of parsed templates
|
||||
let private _cache = ConcurrentDictionary<string, Template> ()
|
||||
|
||||
/// Custom include parameter pattern
|
||||
let private hasInclude = Regex("""{% include_template \"(.*)\" %}""", RegexOptions.None, TimeSpan.FromSeconds 2)
|
||||
|
||||
/// Get a template for the given theme and template name
|
||||
let get (themeId: ThemeId) (templateName: string) (data: IData) = backgroundTask {
|
||||
let templatePath = $"{themeId}/{templateName}"
|
||||
match _cache.ContainsKey templatePath with
|
||||
| true -> return Ok _cache[templatePath]
|
||||
| false ->
|
||||
match! data.Theme.FindById themeId with
|
||||
| Some theme ->
|
||||
match theme.Templates |> List.tryFind (fun t -> t.Name = templateName) with
|
||||
| Some template ->
|
||||
let mutable text = template.Text
|
||||
let mutable childNotFound = ""
|
||||
while hasInclude.IsMatch text do
|
||||
let child = hasInclude.Match text
|
||||
let childText =
|
||||
match theme.Templates |> List.tryFind (fun t -> t.Name = child.Groups[1].Value) with
|
||||
| Some childTemplate -> childTemplate.Text
|
||||
| None ->
|
||||
childNotFound <-
|
||||
if childNotFound = "" then child.Groups[1].Value
|
||||
else $"{childNotFound}; {child.Groups[1].Value}"
|
||||
""
|
||||
text <- text.Replace(child.Value, childText)
|
||||
if childNotFound <> "" then
|
||||
let s = if childNotFound.IndexOf ";" >= 0 then "s" else ""
|
||||
return Error $"Could not find the child template{s} {childNotFound} required by {templateName}"
|
||||
else
|
||||
_cache[templatePath] <- Template.Parse(text, SyntaxCompatibility.DotLiquid22)
|
||||
return Ok _cache[templatePath]
|
||||
| None ->
|
||||
return Error $"Theme ID {themeId} does not have a template named {templateName}"
|
||||
| None -> return Error $"Theme ID {themeId} does not exist"
|
||||
}
|
||||
|
||||
/// Get all theme/template names currently cached
|
||||
let allNames () =
|
||||
_cache.Keys |> Seq.sort |> Seq.toList
|
||||
|
||||
/// Invalidate all template cache entries for the given theme ID
|
||||
let invalidateTheme (themeId: ThemeId) =
|
||||
let keyPrefix = string themeId
|
||||
_cache.Keys
|
||||
|> Seq.filter _.StartsWith(keyPrefix)
|
||||
|> List.ofSeq
|
||||
|> List.iter (fun key -> match _cache.TryRemove key with _, _ -> ())
|
||||
|
||||
/// Remove all entries from the template cache
|
||||
let empty () =
|
||||
_cache.Clear()
|
||||
/// <summary>Update the cache with fresh data for the current web log</summary>
|
||||
/// <param name="ctx">The <c>HttpContext</c> for the current request</param>
|
||||
let update (ctx: HttpContext) =
|
||||
refresh ctx.WebLog.Id ctx.Data
|
||||
|
||||
|
||||
/// A cache of asset names by themes
|
||||
/// <summary>A cache of asset names by themes</summary>
|
||||
module ThemeAssetCache =
|
||||
|
||||
|
||||
/// A list of asset names for each theme
|
||||
let private _cache = ConcurrentDictionary<ThemeId, string list> ()
|
||||
|
||||
/// Retrieve the assets for the given theme ID
|
||||
let private _cache = ConcurrentDictionary<ThemeId, string list>()
|
||||
|
||||
/// <summary>Retrieve the assets for the given theme ID</summary>
|
||||
/// <param name="themeId">The ID of the theme whose assets should be returned</param>
|
||||
/// <returns>The assets for the given theme</returns>
|
||||
let get themeId = _cache[themeId]
|
||||
|
||||
/// Refresh the list of assets for the given theme
|
||||
|
||||
/// <summary>Refresh the list of assets for the given theme</summary>
|
||||
/// <param name="themeId">The ID of the theme whose assets should be refreshed</param>
|
||||
/// <param name="data">The data implementation from which assets should be retrieved</param>
|
||||
let refreshTheme themeId (data: IData) = backgroundTask {
|
||||
let! assets = data.ThemeAsset.FindByTheme themeId
|
||||
_cache[themeId] <- assets |> List.map (fun a -> match a.Id with ThemeAssetId (_, path) -> path)
|
||||
}
|
||||
|
||||
/// Fill the theme asset cache
|
||||
|
||||
/// <summary>Fill the theme asset cache</summary>
|
||||
/// <param name="data">The data implementation from which assets should be retrieved</param>
|
||||
let fill (data: IData) = backgroundTask {
|
||||
let! assets = data.ThemeAsset.All()
|
||||
for asset in assets do
|
||||
|
@ -28,13 +28,13 @@ module Dashboard =
|
||||
ListedPages = listed
|
||||
Categories = cats
|
||||
TopLevelCategories = topCats }
|
||||
return! adminPage "Dashboard" false next ctx (Views.WebLog.dashboard model)
|
||||
return! adminPage "Dashboard" next ctx (Views.WebLog.dashboard model)
|
||||
}
|
||||
|
||||
// GET /admin/administration
|
||||
let admin : HttpHandler = requireAccess Administrator >=> fun next ctx -> task {
|
||||
let! themes = ctx.Data.Theme.All()
|
||||
return! adminPage "myWebLog Administration" true next ctx (Views.Admin.dashboard themes)
|
||||
return! adminPage "myWebLog Administration" next ctx (Views.Admin.dashboard themes)
|
||||
}
|
||||
|
||||
/// Redirect the user to the admin dashboard
|
||||
@ -71,7 +71,7 @@ module Cache =
|
||||
let refreshTheme themeId : HttpHandler = requireAccess Administrator >=> fun next ctx -> task {
|
||||
let data = ctx.Data
|
||||
if themeId = "all" then
|
||||
TemplateCache.empty ()
|
||||
Template.Cache.empty ()
|
||||
do! ThemeAssetCache.fill data
|
||||
do! addMessage ctx
|
||||
{ UserMessage.Success with
|
||||
@ -79,7 +79,7 @@ module Cache =
|
||||
else
|
||||
match! data.Theme.FindById(ThemeId themeId) with
|
||||
| Some theme ->
|
||||
TemplateCache.invalidateTheme theme.Id
|
||||
Template.Cache.invalidateTheme theme.Id
|
||||
do! ThemeAssetCache.refreshTheme theme.Id data
|
||||
do! addMessage ctx
|
||||
{ UserMessage.Success with
|
||||
@ -98,7 +98,7 @@ module Category =
|
||||
// GET /admin/categories
|
||||
let all : HttpHandler = fun next ctx ->
|
||||
let response = fun next ctx ->
|
||||
adminPage "Categories" true next ctx (Views.WebLog.categoryList (ctx.Request.Query.ContainsKey "new"))
|
||||
adminPage "Categories" next ctx (Views.WebLog.categoryList (ctx.Request.Query.ContainsKey "new"))
|
||||
(withHxPushUrl (ctx.WebLog.RelativeUrl (Permalink "admin/categories")) >=> response) next ctx
|
||||
|
||||
// GET /admin/category/{id}/edit
|
||||
@ -115,7 +115,7 @@ module Category =
|
||||
| Some (title, cat) ->
|
||||
return!
|
||||
Views.WebLog.categoryEdit (EditCategoryModel.FromCategory cat)
|
||||
|> adminBarePage title true next ctx
|
||||
|> adminBarePage title next ctx
|
||||
| None -> return! Error.notFound next ctx
|
||||
}
|
||||
|
||||
@ -167,7 +167,7 @@ module RedirectRules =
|
||||
|
||||
// GET /admin/settings/redirect-rules
|
||||
let all : HttpHandler = fun next ctx ->
|
||||
adminPage "Redirect Rules" true next ctx (Views.WebLog.redirectList ctx.WebLog.RedirectRules)
|
||||
adminPage "Redirect Rules" next ctx (Views.WebLog.redirectList ctx.WebLog.RedirectRules)
|
||||
|
||||
// GET /admin/settings/redirect-rules/[index]
|
||||
let edit idx : HttpHandler = fun next ctx ->
|
||||
@ -182,7 +182,7 @@ module RedirectRules =
|
||||
Some
|
||||
("Edit", (Views.WebLog.redirectEdit (EditRedirectRuleModel.FromRule idx (List.item idx rules))))
|
||||
match titleAndView with
|
||||
| Some (title, view) -> adminBarePage $"{title} Redirect Rule" true next ctx view
|
||||
| Some (title, view) -> adminBarePage $"{title} Redirect Rule" next ctx view
|
||||
| None -> Error.notFound next ctx
|
||||
|
||||
/// Update the web log's redirect rules in the database, the request web log, and the web log cache
|
||||
@ -247,7 +247,7 @@ module TagMapping =
|
||||
// GET /admin/settings/tag-mappings
|
||||
let all : HttpHandler = fun next ctx -> task {
|
||||
let! mappings = ctx.Data.TagMap.FindByWebLog ctx.WebLog.Id
|
||||
return! adminBarePage "Tag Mapping List" true next ctx (Views.WebLog.tagMapList mappings)
|
||||
return! adminBarePage "Tag Mapping List" next ctx (Views.WebLog.tagMapList mappings)
|
||||
}
|
||||
|
||||
// GET /admin/settings/tag-mapping/{id}/edit
|
||||
@ -260,7 +260,7 @@ module TagMapping =
|
||||
| Some tm ->
|
||||
return!
|
||||
Views.WebLog.tagMapEdit (EditTagMapModel.FromMapping tm)
|
||||
|> adminBarePage (if isNew then "Add Tag Mapping" else $"Mapping for {tm.Tag} Tag") true next ctx
|
||||
|> adminBarePage (if isNew then "Add Tag Mapping" else $"Mapping for {tm.Tag} Tag") next ctx
|
||||
| None -> return! Error.notFound next ctx
|
||||
}
|
||||
|
||||
@ -302,12 +302,12 @@ module Theme =
|
||||
let! themes = ctx.Data.Theme.All ()
|
||||
return!
|
||||
Views.Admin.themeList (List.map (DisplayTheme.FromTheme WebLogCache.isThemeInUse) themes)
|
||||
|> adminBarePage "Themes" true next ctx
|
||||
|> adminBarePage "Themes" next ctx
|
||||
}
|
||||
|
||||
// GET /admin/theme/new
|
||||
let add : HttpHandler = requireAccess Administrator >=> fun next ctx ->
|
||||
adminBarePage "Upload a Theme File" true next ctx Views.Admin.themeUpload
|
||||
adminBarePage "Upload a Theme File" next ctx Views.Admin.themeUpload
|
||||
|
||||
/// Update the name and version for a theme based on the version.txt file, if present
|
||||
let private updateNameAndVersion (theme: Theme) (zip: ZipArchive) = backgroundTask {
|
||||
@ -398,7 +398,7 @@ module Theme =
|
||||
do! themeFile.CopyToAsync stream
|
||||
let! _ = loadFromZip themeId stream data
|
||||
do! ThemeAssetCache.refreshTheme themeId data
|
||||
TemplateCache.invalidateTheme themeId
|
||||
Template.Cache.invalidateTheme themeId
|
||||
// Ensure the themes directory exists
|
||||
let themeDir = Path.Combine(".", "themes")
|
||||
if not (Directory.Exists themeDir) then Directory.CreateDirectory themeDir |> ignore
|
||||
@ -464,7 +464,7 @@ module WebLog =
|
||||
return!
|
||||
Views.WebLog.webLogSettings
|
||||
(SettingsModel.FromWebLog ctx.WebLog) themes pages uploads (EditRssModel.FromRssOptions ctx.WebLog.Rss)
|
||||
|> adminPage "Web Log Settings" true next ctx
|
||||
|> adminPage "Web Log Settings" next ctx
|
||||
}
|
||||
|
||||
// POST /admin/settings
|
||||
|
@ -453,7 +453,7 @@ let editCustomFeed feedId : HttpHandler = requireAccess WebLogAdmin >=> fun next
|
||||
{ Name = string Blog; Value = "Blog" }
|
||||
]
|
||||
Views.WebLog.feedEdit (EditCustomFeedModel.FromFeed f) ratings mediums
|
||||
|> adminPage $"""{if feedId = "new" then "Add" else "Edit"} Custom RSS Feed""" true next ctx
|
||||
|> adminPage $"""{if feedId = "new" then "Add" else "Edit"} Custom RSS Feed""" next ctx
|
||||
| None -> Error.notFound next ctx
|
||||
|
||||
// POST /admin/settings/rss/save
|
||||
|
@ -19,113 +19,9 @@ type ISession with
|
||||
| item -> Some (JsonSerializer.Deserialize<'T> item)
|
||||
|
||||
|
||||
/// Keys used in the myWebLog-standard DotLiquid hash
|
||||
module ViewContext =
|
||||
|
||||
/// The anti cross-site request forgery (CSRF) token set to use for form submissions
|
||||
[<Literal>]
|
||||
let AntiCsrfTokens = "csrf"
|
||||
|
||||
/// The unified application view context
|
||||
[<Literal>]
|
||||
let AppViewContext = "app"
|
||||
|
||||
/// The categories for this web log
|
||||
[<Literal>]
|
||||
let Categories = "categories"
|
||||
|
||||
/// The main content of the view
|
||||
[<Literal>]
|
||||
let Content = "content"
|
||||
|
||||
/// The current page URL
|
||||
[<Literal>]
|
||||
let CurrentPage = "current_page"
|
||||
|
||||
/// The generator string for the current version of myWebLog
|
||||
[<Literal>]
|
||||
let Generator = "generator"
|
||||
|
||||
/// The HTML to load htmx from the unpkg CDN
|
||||
[<Literal>]
|
||||
let HtmxScript = "htmx_script"
|
||||
|
||||
/// Whether the current user has Administrator privileges
|
||||
[<Literal>]
|
||||
let IsAdministrator = "is_administrator"
|
||||
|
||||
/// Whether the current user has Author (or above) privileges
|
||||
[<Literal>]
|
||||
let IsAuthor = "is_author"
|
||||
|
||||
/// Whether the current view is displaying a category archive page
|
||||
[<Literal>]
|
||||
let IsCategory = "is_category"
|
||||
|
||||
/// Whether the current view is displaying the first page of a category archive
|
||||
[<Literal>]
|
||||
let IsCategoryHome = "is_category_home"
|
||||
|
||||
/// Whether the current user has Editor (or above) privileges
|
||||
[<Literal>]
|
||||
let IsEditor = "is_editor"
|
||||
|
||||
/// Whether the current view is the home page for the web log
|
||||
[<Literal>]
|
||||
let IsHome = "is_home"
|
||||
|
||||
/// Whether there is a user logged on
|
||||
[<Literal>]
|
||||
let IsLoggedOn = "is_logged_on"
|
||||
|
||||
/// Whether the current view is displaying a page
|
||||
[<Literal>]
|
||||
let IsPage = "is_page"
|
||||
|
||||
/// Whether the current view is displaying a post
|
||||
[<Literal>]
|
||||
let IsPost = "is_post"
|
||||
|
||||
/// Whether the current view is a tag archive page
|
||||
[<Literal>]
|
||||
let IsTag = "is_tag"
|
||||
|
||||
/// Whether the current view is the first page of a tag archive
|
||||
[<Literal>]
|
||||
let IsTagHome = "is_tag_home"
|
||||
|
||||
/// Whether the current user has Web Log Admin (or above) privileges
|
||||
[<Literal>]
|
||||
let IsWebLogAdmin = "is_web_log_admin"
|
||||
|
||||
/// Messages to be displayed to the user
|
||||
[<Literal>]
|
||||
let Messages = "messages"
|
||||
|
||||
/// The view model / form for the page
|
||||
[<Literal>]
|
||||
let Model = "model"
|
||||
|
||||
/// The listed pages for the web log
|
||||
[<Literal>]
|
||||
let PageList = "page_list"
|
||||
|
||||
/// The title of the page being displayed
|
||||
[<Literal>]
|
||||
let PageTitle = "page_title"
|
||||
|
||||
/// The slug for category or tag archive pages
|
||||
[<Literal>]
|
||||
let Slug = "slug"
|
||||
|
||||
/// The ID of the current user
|
||||
[<Literal>]
|
||||
let UserId = "user_id"
|
||||
|
||||
/// The current web log
|
||||
[<Literal>]
|
||||
let WebLog = "web_log"
|
||||
|
||||
/// Messages to be displayed to the user
|
||||
[<Literal>]
|
||||
let MESSAGES = "messages"
|
||||
|
||||
/// The HTTP item key for loading the session
|
||||
let private sessionLoadedKey = "session-loaded"
|
||||
@ -147,36 +43,25 @@ open MyWebLog.ViewModels
|
||||
/// Add a message to the user's session
|
||||
let addMessage (ctx: HttpContext) message = task {
|
||||
do! loadSession ctx
|
||||
let msg = match ctx.Session.TryGet<UserMessage list> ViewContext.Messages with Some it -> it | None -> []
|
||||
ctx.Session.Set(ViewContext.Messages, message :: msg)
|
||||
let msg = match ctx.Session.TryGet<UserMessage list> MESSAGES with Some it -> it | None -> []
|
||||
ctx.Session.Set(MESSAGES, message :: msg)
|
||||
}
|
||||
|
||||
/// Get any messages from the user's session, removing them in the process
|
||||
let messages (ctx: HttpContext) = task {
|
||||
do! loadSession ctx
|
||||
match ctx.Session.TryGet<UserMessage list> ViewContext.Messages with
|
||||
match ctx.Session.TryGet<UserMessage list> MESSAGES with
|
||||
| Some msg ->
|
||||
ctx.Session.Remove ViewContext.Messages
|
||||
ctx.Session.Remove MESSAGES
|
||||
return msg |> (List.rev >> Array.ofList)
|
||||
| None -> return [||]
|
||||
}
|
||||
|
||||
open MyWebLog
|
||||
open DotLiquid
|
||||
|
||||
/// Shorthand for creating a DotLiquid hash from an anonymous object
|
||||
let makeHash (values: obj) =
|
||||
Hash.FromAnonymousObject values
|
||||
|
||||
/// Create a hash with the page title filled
|
||||
let hashForPage (title: string) =
|
||||
makeHash {| page_title = title |}
|
||||
|
||||
/// Add a key to the hash, returning the modified hash
|
||||
// (note that the hash itself is mutated; this is only used to make it pipeable)
|
||||
let addToHash key (value: obj) (hash: Hash) =
|
||||
if hash.ContainsKey key then hash[key] <- value else hash.Add(key, value)
|
||||
hash
|
||||
/// Create a view context with the page title filled
|
||||
let viewCtxForPage title =
|
||||
{ AppViewContext.Empty with PageTitle = title }
|
||||
|
||||
open System.Security.Claims
|
||||
open Giraffe
|
||||
@ -184,7 +69,8 @@ open Giraffe.Htmx
|
||||
open Giraffe.ViewEngine
|
||||
|
||||
/// htmx script tag
|
||||
let private htmxScript = RenderView.AsString.htmlNode Htmx.Script.minified
|
||||
let private htmxScript (webLog: WebLog) =
|
||||
$"""<script src="{webLog.RelativeUrl(Permalink "htmx.min.js")}"></script>"""
|
||||
|
||||
/// Get the current user messages, and commit the session so that they are preserved
|
||||
let private getCurrentMessages ctx = task {
|
||||
@ -194,54 +80,31 @@ let private getCurrentMessages ctx = task {
|
||||
}
|
||||
|
||||
/// Generate the view context for a response
|
||||
let private generateViewContext pageTitle messages includeCsrf (ctx: HttpContext) =
|
||||
{ WebLog = ctx.WebLog
|
||||
UserId = ctx.User.Claims
|
||||
|> Seq.tryFind (fun claim -> claim.Type = ClaimTypes.NameIdentifier)
|
||||
|> Option.map (fun claim -> WebLogUserId claim.Value)
|
||||
PageTitle = pageTitle
|
||||
Csrf = if includeCsrf then Some ctx.CsrfTokenSet else None
|
||||
PageList = PageListCache.get ctx
|
||||
Categories = CategoryCache.get ctx
|
||||
CurrentPage = ctx.Request.Path.Value[1..]
|
||||
Messages = messages
|
||||
Generator = ctx.Generator
|
||||
HtmxScript = htmxScript
|
||||
IsAuthor = ctx.HasAccessLevel Author
|
||||
IsEditor = ctx.HasAccessLevel Editor
|
||||
IsWebLogAdmin = ctx.HasAccessLevel WebLogAdmin
|
||||
IsAdministrator = ctx.HasAccessLevel Administrator }
|
||||
let private generateViewContext messages viewCtx (ctx: HttpContext) =
|
||||
{ viewCtx with
|
||||
WebLog = ctx.WebLog
|
||||
UserId = ctx.User.Claims
|
||||
|> Seq.tryFind (fun claim -> claim.Type = ClaimTypes.NameIdentifier)
|
||||
|> Option.map (fun claim -> WebLogUserId claim.Value)
|
||||
Csrf = Some ctx.CsrfTokenSet
|
||||
PageList = PageListCache.get ctx
|
||||
Categories = CategoryCache.get ctx
|
||||
CurrentPage = ctx.Request.Path.Value[1..]
|
||||
Messages = messages
|
||||
Generator = ctx.Generator
|
||||
HtmxScript = htmxScript ctx.WebLog
|
||||
IsAuthor = ctx.HasAccessLevel Author
|
||||
IsEditor = ctx.HasAccessLevel Editor
|
||||
IsWebLogAdmin = ctx.HasAccessLevel WebLogAdmin
|
||||
IsAdministrator = ctx.HasAccessLevel Administrator }
|
||||
|
||||
|
||||
/// Populate the DotLiquid hash with standard information
|
||||
let addViewContext ctx (hash: Hash) = task {
|
||||
/// Update the view context with standard information (if it has not been done yet) or updated messages
|
||||
let updateViewContext ctx viewCtx = task {
|
||||
let! messages = getCurrentMessages ctx
|
||||
if hash.ContainsKey ViewContext.AppViewContext then
|
||||
let oldApp = hash[ViewContext.AppViewContext] :?> AppViewContext
|
||||
let newApp = { oldApp with Messages = Array.concat [ oldApp.Messages; messages ] }
|
||||
return
|
||||
hash
|
||||
|> addToHash ViewContext.AppViewContext newApp
|
||||
|> addToHash ViewContext.Messages newApp.Messages
|
||||
if viewCtx.Generator = "" then
|
||||
return generateViewContext messages viewCtx ctx
|
||||
else
|
||||
let app =
|
||||
generateViewContext (string hash[ViewContext.PageTitle]) messages
|
||||
(hash.ContainsKey ViewContext.AntiCsrfTokens) ctx
|
||||
return
|
||||
hash
|
||||
|> addToHash ViewContext.UserId (app.UserId |> Option.map string |> Option.defaultValue "")
|
||||
|> addToHash ViewContext.WebLog app.WebLog
|
||||
|> addToHash ViewContext.PageList app.PageList
|
||||
|> addToHash ViewContext.Categories app.Categories
|
||||
|> addToHash ViewContext.CurrentPage app.CurrentPage
|
||||
|> addToHash ViewContext.Messages app.Messages
|
||||
|> addToHash ViewContext.Generator app.Generator
|
||||
|> addToHash ViewContext.HtmxScript app.HtmxScript
|
||||
|> addToHash ViewContext.IsLoggedOn app.IsLoggedOn
|
||||
|> addToHash ViewContext.IsAuthor app.IsAuthor
|
||||
|> addToHash ViewContext.IsEditor app.IsEditor
|
||||
|> addToHash ViewContext.IsWebLogAdmin app.IsWebLogAdmin
|
||||
|> addToHash ViewContext.IsAdministrator app.IsAdministrator
|
||||
return { viewCtx with Messages = Array.concat [ viewCtx.Messages; messages ] }
|
||||
}
|
||||
|
||||
/// Is the request from htmx?
|
||||
@ -269,6 +132,7 @@ let redirectToGet url : HttpHandler = fun _ ctx -> task {
|
||||
}
|
||||
|
||||
/// The MIME type for podcast episode JSON chapters
|
||||
[<Literal>]
|
||||
let JSON_CHAPTERS = "application/json+chapters"
|
||||
|
||||
|
||||
@ -311,65 +175,65 @@ module Error =
|
||||
else ServerErrors.INTERNAL_ERROR message earlyReturn ctx)
|
||||
|
||||
|
||||
/// Render a view for the specified theme, using the specified template, layout, and hash
|
||||
let viewForTheme themeId template next ctx (hash: Hash) = task {
|
||||
let! hash = addViewContext ctx hash
|
||||
/// Render a view for the specified theme, using the specified template, layout, and context
|
||||
let viewForTheme themeId template next ctx (viewCtx: AppViewContext) = task {
|
||||
let! updated = updateViewContext ctx viewCtx
|
||||
|
||||
// NOTE: DotLiquid does not support {% render %} or {% include %} in its templates, so we will do a 2-pass render;
|
||||
// the net effect is a "layout" capability similar to Razor or Pug
|
||||
// NOTE: Although Fluid's view engine support implements layouts and sections, it also relies on the filesystem.
|
||||
// As we are loading templates from memory or a database, we do a 2-pass render; the first for the content,
|
||||
// the second for the overall page.
|
||||
|
||||
// Render view content...
|
||||
match! TemplateCache.get themeId template ctx.Data with
|
||||
match! Template.Cache.get themeId template ctx.Data with
|
||||
| Ok contentTemplate ->
|
||||
let _ = addToHash ViewContext.Content (contentTemplate.Render hash) hash
|
||||
let forLayout = { updated with Content = Template.render contentTemplate updated ctx.Data }
|
||||
// ...then render that content with its layout
|
||||
match! TemplateCache.get themeId (if isHtmx ctx then "layout-partial" else "layout") ctx.Data with
|
||||
| Ok layoutTemplate -> return! htmlString (layoutTemplate.Render hash) next ctx
|
||||
match! Template.Cache.get themeId (if isHtmx ctx then "layout-partial" else "layout") ctx.Data with
|
||||
| Ok layoutTemplate -> return! htmlString (Template.render layoutTemplate forLayout ctx.Data) next ctx
|
||||
| Error message -> return! Error.server message next ctx
|
||||
| Error message -> return! Error.server message next ctx
|
||||
}
|
||||
|
||||
/// Render a bare view for the specified theme, using the specified template and hash
|
||||
let bareForTheme themeId template next ctx (hash: Hash) = task {
|
||||
let! hash = addViewContext ctx hash
|
||||
/// Render a bare view for the specified theme, using the specified template and context
|
||||
let bareForTheme themeId template next ctx viewCtx = task {
|
||||
let! updated = updateViewContext ctx viewCtx
|
||||
let withContent = task {
|
||||
if hash.ContainsKey ViewContext.Content then return Ok hash
|
||||
if updated.Content = "" then
|
||||
match! Template.Cache.get themeId template ctx.Data with
|
||||
| Ok contentTemplate -> return Ok { updated with Content = Template.render contentTemplate updated ctx.Data }
|
||||
| Error message -> return Error message
|
||||
else
|
||||
match! TemplateCache.get themeId template ctx.Data with
|
||||
| Ok contentTemplate -> return Ok(addToHash ViewContext.Content (contentTemplate.Render hash) hash)
|
||||
| Error message -> return Error message
|
||||
return Ok viewCtx
|
||||
}
|
||||
match! withContent with
|
||||
| Ok completeHash ->
|
||||
| Ok completeCtx ->
|
||||
// Bare templates are rendered with layout-bare
|
||||
match! TemplateCache.get themeId "layout-bare" ctx.Data with
|
||||
match! Template.Cache.get themeId "layout-bare" ctx.Data with
|
||||
| Ok layoutTemplate ->
|
||||
return!
|
||||
(messagesToHeaders (hash[ViewContext.Messages] :?> UserMessage array)
|
||||
>=> htmlString (layoutTemplate.Render completeHash))
|
||||
(messagesToHeaders completeCtx.Messages >=> htmlString (Template.render layoutTemplate completeCtx ctx.Data))
|
||||
next ctx
|
||||
| Error message -> return! Error.server message next ctx
|
||||
| Error message -> return! Error.server message next ctx
|
||||
}
|
||||
|
||||
/// Return a view for the web log's default theme
|
||||
let themedView template next ctx hash = task {
|
||||
let! hash = addViewContext ctx hash
|
||||
return! viewForTheme (hash[ViewContext.WebLog] :?> WebLog).ThemeId template next ctx hash
|
||||
let themedView template next (ctx: HttpContext) viewCtx = task {
|
||||
return! viewForTheme ctx.WebLog.ThemeId template next ctx viewCtx
|
||||
}
|
||||
|
||||
/// Display a page for an admin endpoint
|
||||
let adminPage pageTitle includeCsrf next ctx (content: AppViewContext -> XmlNode list) = task {
|
||||
let adminPage pageTitle next ctx (content: AppViewContext -> XmlNode list) = task {
|
||||
let! messages = getCurrentMessages ctx
|
||||
let appCtx = generateViewContext pageTitle messages includeCsrf ctx
|
||||
let appCtx = generateViewContext messages (viewCtxForPage pageTitle) ctx
|
||||
let layout = if isHtmx ctx then Layout.partial else Layout.full
|
||||
return! htmlString (layout content appCtx |> RenderView.AsString.htmlDocument) next ctx
|
||||
}
|
||||
|
||||
/// Display a bare page for an admin endpoint
|
||||
let adminBarePage pageTitle includeCsrf next ctx (content: AppViewContext -> XmlNode list) = task {
|
||||
let adminBarePage pageTitle next ctx (content: AppViewContext -> XmlNode list) = task {
|
||||
let! messages = getCurrentMessages ctx
|
||||
let appCtx = generateViewContext pageTitle messages includeCsrf ctx
|
||||
let appCtx = generateViewContext messages (viewCtxForPage pageTitle) ctx
|
||||
return!
|
||||
( messagesToHeaders appCtx.Messages
|
||||
>=> htmlString (Layout.bare content appCtx |> RenderView.AsString.htmlDocument)) next ctx
|
||||
|
@ -17,7 +17,7 @@ let all pageNbr : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||
|> List.ofSeq
|
||||
return!
|
||||
Views.Page.pageList displayPages pageNbr (pages.Length > 25)
|
||||
|> adminPage "Pages" true next ctx
|
||||
|> adminPage "Pages" next ctx
|
||||
}
|
||||
|
||||
// GET /admin/page/{id}/edit
|
||||
@ -34,7 +34,7 @@ let edit pgId : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||
| Some (title, page) when canEdit page.AuthorId ctx ->
|
||||
let model = EditPageModel.FromPage page
|
||||
let! templates = templatesForTheme ctx "page"
|
||||
return! adminPage title true next ctx (Views.Page.pageEdit model templates)
|
||||
return! adminPage title next ctx (Views.Page.pageEdit model templates)
|
||||
| Some _ -> return! Error.notAuthorized next ctx
|
||||
| None -> return! Error.notFound next ctx
|
||||
}
|
||||
@ -56,7 +56,7 @@ let editPermalinks pgId : HttpHandler = requireAccess Author >=> fun next ctx ->
|
||||
return!
|
||||
ManagePermalinksModel.FromPage pg
|
||||
|> Views.Helpers.managePermalinks
|
||||
|> adminPage "Manage Prior Permalinks" true next ctx
|
||||
|> adminPage "Manage Prior Permalinks" next ctx
|
||||
| Some _ -> return! Error.notAuthorized next ctx
|
||||
| None -> return! Error.notFound next ctx
|
||||
}
|
||||
@ -84,7 +84,7 @@ let editRevisions pgId : HttpHandler = requireAccess Author >=> fun next ctx ->
|
||||
return!
|
||||
ManageRevisionsModel.FromPage pg
|
||||
|> Views.Helpers.manageRevisions
|
||||
|> adminPage "Manage Page Revisions" true next ctx
|
||||
|> adminPage "Manage Page Revisions" next ctx
|
||||
| Some _ -> return! Error.notAuthorized next ctx
|
||||
| None -> return! Error.notFound next ctx
|
||||
}
|
||||
@ -115,7 +115,7 @@ let private findPageRevision pgId revDate (ctx: HttpContext) = task {
|
||||
let previewRevision (pgId, revDate) : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||
match! findPageRevision pgId revDate ctx with
|
||||
| Some pg, Some rev when canEdit pg.AuthorId ctx ->
|
||||
return! adminBarePage "" false next ctx (Views.Helpers.commonPreview rev)
|
||||
return! adminBarePage "" next ctx (Views.Helpers.commonPreview rev)
|
||||
| Some _, Some _ -> return! Error.notAuthorized next ctx
|
||||
| None, _ | _, None -> return! Error.notFound next ctx
|
||||
}
|
||||
@ -141,7 +141,7 @@ let deleteRevision (pgId, revDate) : HttpHandler = requireAccess Author >=> fun
|
||||
| Some pg, Some rev when canEdit pg.AuthorId ctx ->
|
||||
do! ctx.Data.Page.Update { pg with Revisions = pg.Revisions |> List.filter (fun r -> r.AsOf <> rev.AsOf) }
|
||||
do! addMessage ctx { UserMessage.Success with Message = "Revision deleted successfully" }
|
||||
return! adminBarePage "" false next ctx (fun _ -> [])
|
||||
return! adminBarePage "" next ctx (fun _ -> [])
|
||||
| Some _, Some _ -> return! Error.notAuthorized next ctx
|
||||
| None, _
|
||||
| _, None -> return! Error.notFound next ctx
|
||||
|
@ -4,6 +4,7 @@ module MyWebLog.Handlers.Post
|
||||
open System
|
||||
open System.Collections.Generic
|
||||
open MyWebLog
|
||||
open MyWebLog.Views
|
||||
|
||||
/// Parse a slug and page number from an "everything else" URL
|
||||
let private parseSlugAndPage webLog (slugAndPage: string seq) =
|
||||
@ -87,30 +88,29 @@ let preparePostList webLog posts listType (url: string) pageNbr perPage (data: I
|
||||
OlderName = olderPost |> Option.map _.Title
|
||||
}
|
||||
return
|
||||
makeHash {||}
|
||||
|> addToHash ViewContext.Model model
|
||||
|> addToHash "tag_mappings" tagMappings
|
||||
|> addToHash ViewContext.IsPost (match listType with SinglePost -> true | _ -> false)
|
||||
{ AppViewContext.Empty with
|
||||
Payload = model
|
||||
TagMappings = Array.ofList tagMappings
|
||||
IsPost = (match listType with SinglePost -> true | _ -> false) }
|
||||
}
|
||||
|
||||
open Giraffe
|
||||
|
||||
// GET /page/{pageNbr}
|
||||
let pageOfPosts pageNbr : HttpHandler = fun next ctx -> task {
|
||||
let count = ctx.WebLog.PostsPerPage
|
||||
let data = ctx.Data
|
||||
let! posts = data.Post.FindPageOfPublishedPosts ctx.WebLog.Id pageNbr count
|
||||
let! hash = preparePostList ctx.WebLog posts PostList "" pageNbr count data
|
||||
let title =
|
||||
let count = ctx.WebLog.PostsPerPage
|
||||
let data = ctx.Data
|
||||
let! posts = data.Post.FindPageOfPublishedPosts ctx.WebLog.Id pageNbr count
|
||||
let! viewCtx = preparePostList ctx.WebLog posts PostList "" pageNbr count data
|
||||
let title =
|
||||
match pageNbr, ctx.WebLog.DefaultPage with
|
||||
| 1, "posts" -> None
|
||||
| _, "posts" -> Some $"Page {pageNbr}"
|
||||
| _, _ -> Some $"Page {pageNbr} « Posts"
|
||||
return!
|
||||
match title with Some ttl -> addToHash ViewContext.PageTitle ttl hash | None -> hash
|
||||
|> function
|
||||
| hash ->
|
||||
if pageNbr = 1 && ctx.WebLog.DefaultPage = "posts" then addToHash ViewContext.IsHome true hash else hash
|
||||
{ viewCtx with
|
||||
PageTitle = defaultArg title viewCtx.PageTitle
|
||||
IsHome = pageNbr = 1 && ctx.WebLog.DefaultPage = "posts" }
|
||||
|> themedView "index" next ctx
|
||||
}
|
||||
|
||||
@ -134,14 +134,15 @@ let pageOfCategorizedPosts slugAndPage : HttpHandler = fun next ctx -> task {
|
||||
match! data.Post.FindPageOfCategorizedPosts webLog.Id (getCategoryIds slug ctx) pageNbr webLog.PostsPerPage
|
||||
with
|
||||
| posts when List.length posts > 0 ->
|
||||
let! hash = preparePostList webLog posts CategoryList cat.Slug pageNbr webLog.PostsPerPage data
|
||||
let! viewCtx = preparePostList webLog posts CategoryList cat.Slug pageNbr webLog.PostsPerPage data
|
||||
let pgTitle = if pageNbr = 1 then "" else $""" <small class="archive-pg-nbr">(Page {pageNbr})</small>"""
|
||||
return!
|
||||
addToHash ViewContext.PageTitle $"{cat.Name}: Category Archive{pgTitle}" hash
|
||||
|> addToHash "subtitle" (defaultArg cat.Description "")
|
||||
|> addToHash ViewContext.IsCategory true
|
||||
|> addToHash ViewContext.IsCategoryHome (pageNbr = 1)
|
||||
|> addToHash ViewContext.Slug slug
|
||||
{ viewCtx with
|
||||
PageTitle = $"{cat.Name}: Category Archive{pgTitle}"
|
||||
Subtitle = cat.Description
|
||||
IsCategory = true
|
||||
IsCategoryHome = (pageNbr = 1)
|
||||
Slug = Some slug }
|
||||
|> themedView "index" next ctx
|
||||
| _ -> return! Error.notFound next ctx
|
||||
| None -> return! Error.notFound next ctx
|
||||
@ -156,7 +157,7 @@ let pageOfTaggedPosts slugAndPage : HttpHandler = fun next ctx -> task {
|
||||
let webLog = ctx.WebLog
|
||||
let data = ctx.Data
|
||||
match parseSlugAndPage webLog slugAndPage with
|
||||
| Some pageNbr, rawTag, isFeed ->
|
||||
| Some pageNbr, rawTag, isFeed ->
|
||||
let urlTag = HttpUtility.UrlDecode rawTag
|
||||
let! tag = backgroundTask {
|
||||
match! data.TagMap.FindByUrlValue urlTag webLog.Id with
|
||||
@ -169,13 +170,14 @@ let pageOfTaggedPosts slugAndPage : HttpHandler = fun next ctx -> task {
|
||||
else
|
||||
match! data.Post.FindPageOfTaggedPosts webLog.Id tag pageNbr webLog.PostsPerPage with
|
||||
| posts when List.length posts > 0 ->
|
||||
let! hash = preparePostList webLog posts TagList rawTag pageNbr webLog.PostsPerPage data
|
||||
let pgTitle = if pageNbr = 1 then "" else $""" <small class="archive-pg-nbr">(Page {pageNbr})</small>"""
|
||||
let! viewCtx = preparePostList webLog posts TagList rawTag pageNbr webLog.PostsPerPage data
|
||||
let pgTitle = if pageNbr = 1 then "" else $" <small class=\"archive-pg-nbr\">(Page {pageNbr})</small>"
|
||||
return!
|
||||
addToHash ViewContext.PageTitle $"Posts Tagged “{tag}”{pgTitle}" hash
|
||||
|> addToHash ViewContext.IsTag true
|
||||
|> addToHash ViewContext.IsTagHome (pageNbr = 1)
|
||||
|> addToHash ViewContext.Slug rawTag
|
||||
{ viewCtx with
|
||||
PageTitle = $"Posts Tagged “{tag}”{pgTitle}"
|
||||
IsTag = true
|
||||
IsTagHome = (pageNbr = 1)
|
||||
Slug = Some rawTag }
|
||||
|> themedView "index" next ctx
|
||||
// Other systems use hyphens for spaces; redirect if this is an old tag link
|
||||
| _ ->
|
||||
@ -200,9 +202,9 @@ let home : HttpHandler = fun next ctx -> task {
|
||||
match! ctx.Data.Page.FindById (PageId pageId) webLog.Id with
|
||||
| Some page ->
|
||||
return!
|
||||
hashForPage page.Title
|
||||
|> addToHash "page" (DisplayPage.FromPage webLog page)
|
||||
|> addToHash ViewContext.IsHome true
|
||||
{ viewCtxForPage page.Title with
|
||||
Payload = DisplayPage.FromPage webLog page
|
||||
IsHome = true }
|
||||
|> themedView (defaultArg page.Template "single-page") next ctx
|
||||
| None -> return! Error.notFound next ctx
|
||||
}
|
||||
@ -251,10 +253,10 @@ let chapters (post: Post) : HttpHandler = fun next ctx ->
|
||||
// GET /admin/posts
|
||||
// GET /admin/posts/page/{pageNbr}
|
||||
let all pageNbr : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||
let data = ctx.Data
|
||||
let! posts = data.Post.FindPageOfPosts ctx.WebLog.Id pageNbr 25
|
||||
let! hash = preparePostList ctx.WebLog posts AdminList "" pageNbr 25 data
|
||||
return! adminPage "Posts" true next ctx (Views.Post.list (hash[ViewContext.Model] :?> PostDisplay))
|
||||
let data = ctx.Data
|
||||
let! posts = data.Post.FindPageOfPosts ctx.WebLog.Id pageNbr 25
|
||||
let! viewCtx = preparePostList ctx.WebLog posts AdminList "" pageNbr 25 data
|
||||
return! adminPage "Posts" next ctx (Post.list viewCtx.Posts)
|
||||
}
|
||||
|
||||
// GET /admin/post/{id}/edit
|
||||
@ -278,7 +280,7 @@ let edit postId : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||
{ Name = string No; Value = "No" }
|
||||
{ Name = string Clean; Value = "Clean" }
|
||||
]
|
||||
return! adminPage title true next ctx (Views.Post.postEdit model templates ratings)
|
||||
return! adminPage title next ctx (Post.postEdit model templates ratings)
|
||||
| Some _ -> return! Error.notAuthorized next ctx
|
||||
| None -> return! Error.notFound next ctx
|
||||
}
|
||||
@ -298,8 +300,8 @@ let editPermalinks postId : HttpHandler = requireAccess Author >=> fun next ctx
|
||||
| Some post when canEdit post.AuthorId ctx ->
|
||||
return!
|
||||
ManagePermalinksModel.FromPost post
|
||||
|> Views.Helpers.managePermalinks
|
||||
|> adminPage "Manage Prior Permalinks" true next ctx
|
||||
|> managePermalinks
|
||||
|> adminPage "Manage Prior Permalinks" next ctx
|
||||
| Some _ -> return! Error.notAuthorized next ctx
|
||||
| None -> return! Error.notFound next ctx
|
||||
}
|
||||
@ -326,8 +328,8 @@ let editRevisions postId : HttpHandler = requireAccess Author >=> fun next ctx -
|
||||
| Some post when canEdit post.AuthorId ctx ->
|
||||
return!
|
||||
ManageRevisionsModel.FromPost post
|
||||
|> Views.Helpers.manageRevisions
|
||||
|> adminPage "Manage Post Revisions" true next ctx
|
||||
|> manageRevisions
|
||||
|> adminPage "Manage Post Revisions" next ctx
|
||||
| Some _ -> return! Error.notAuthorized next ctx
|
||||
| None -> return! Error.notFound next ctx
|
||||
}
|
||||
@ -359,7 +361,7 @@ let private findPostRevision postId revDate (ctx: HttpContext) = task {
|
||||
let previewRevision (postId, revDate) : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||
match! findPostRevision postId revDate ctx with
|
||||
| Some post, Some rev when canEdit post.AuthorId ctx ->
|
||||
return! adminBarePage "" false next ctx (Views.Helpers.commonPreview rev)
|
||||
return! adminBarePage "" next ctx (commonPreview rev)
|
||||
| Some _, Some _ -> return! Error.notAuthorized next ctx
|
||||
| None, _ | _, None -> return! Error.notFound next ctx
|
||||
}
|
||||
@ -385,7 +387,7 @@ let deleteRevision (postId, revDate) : HttpHandler = requireAccess Author >=> fu
|
||||
| Some post, Some rev when canEdit post.AuthorId ctx ->
|
||||
do! ctx.Data.Post.Update { post with Revisions = post.Revisions |> List.filter (fun r -> r.AsOf <> rev.AsOf) }
|
||||
do! addMessage ctx { UserMessage.Success with Message = "Revision deleted successfully" }
|
||||
return! adminBarePage "" false next ctx (fun _ -> [])
|
||||
return! adminBarePage "" next ctx (fun _ -> [])
|
||||
| Some _, Some _ -> return! Error.notAuthorized next ctx
|
||||
| None, _
|
||||
| _, None -> return! Error.notFound next ctx
|
||||
@ -399,8 +401,8 @@ let manageChapters postId : HttpHandler = requireAccess Author >=> fun next ctx
|
||||
&& Option.isSome post.Episode.Value.Chapters
|
||||
&& canEdit post.AuthorId ctx ->
|
||||
return!
|
||||
Views.Post.chapters false (ManageChaptersModel.Create post)
|
||||
|> adminPage "Manage Chapters" true next ctx
|
||||
Post.chapters false (ManageChaptersModel.Create post)
|
||||
|> adminPage "Manage Chapters" next ctx
|
||||
| Some _ | None -> return! Error.notFound next ctx
|
||||
}
|
||||
|
||||
@ -419,8 +421,8 @@ let editChapter (postId, index) : HttpHandler = requireAccess Author >=> fun nex
|
||||
match chapter with
|
||||
| Some chap ->
|
||||
return!
|
||||
Views.Post.chapterEdit (EditChapterModel.FromChapter post.Id index chap)
|
||||
|> adminBarePage (if index = -1 then "Add a Chapter" else "Edit Chapter") true next ctx
|
||||
Post.chapterEdit (EditChapterModel.FromChapter post.Id index chap)
|
||||
|> adminBarePage (if index = -1 then "Add a Chapter" else "Edit Chapter") next ctx
|
||||
| None -> return! Error.notFound next ctx
|
||||
| Some _ | None -> return! Error.notFound next ctx
|
||||
}
|
||||
@ -447,8 +449,8 @@ let saveChapter (postId, index) : HttpHandler = requireAccess Author >=> fun nex
|
||||
do! data.Post.Update updatedPost
|
||||
do! addMessage ctx { UserMessage.Success with Message = "Chapter saved successfully" }
|
||||
return!
|
||||
Views.Post.chapterList form.AddAnother (ManageChaptersModel.Create updatedPost)
|
||||
|> adminBarePage "Manage Chapters" true next ctx
|
||||
Post.chapterList form.AddAnother (ManageChaptersModel.Create updatedPost)
|
||||
|> adminBarePage "Manage Chapters" next ctx
|
||||
with
|
||||
| ex -> return! Error.server ex.Message next ctx
|
||||
else return! Error.notFound next ctx
|
||||
@ -471,8 +473,8 @@ let deleteChapter (postId, index) : HttpHandler = requireAccess Author >=> fun n
|
||||
do! data.Post.Update updatedPost
|
||||
do! addMessage ctx { UserMessage.Success with Message = "Chapter deleted successfully" }
|
||||
return!
|
||||
Views.Post.chapterList false (ManageChaptersModel.Create updatedPost)
|
||||
|> adminPage "Manage Chapters" true next ctx
|
||||
Post.chapterList false (ManageChaptersModel.Create updatedPost)
|
||||
|> adminPage "Manage Chapters" next ctx
|
||||
else return! Error.notFound next ctx
|
||||
| Some _ | None -> return! Error.notFound next ctx
|
||||
}
|
||||
|
@ -5,11 +5,12 @@ open Giraffe
|
||||
open Microsoft.AspNetCore.Http
|
||||
open MyWebLog
|
||||
|
||||
/// Module to resolve routes that do not match any other known route (web blog content)
|
||||
/// Module to resolve routes that do not match any other known route (web blog content)
|
||||
module CatchAll =
|
||||
|
||||
|
||||
open System.IO
|
||||
open MyWebLog.ViewModels
|
||||
|
||||
|
||||
/// Sequence where the first returned value is the proper handler for the link
|
||||
let private deriveAction (ctx: HttpContext) : HttpHandler seq =
|
||||
let webLog = ctx.WebLog
|
||||
@ -22,7 +23,19 @@ module CatchAll =
|
||||
let await it = (Async.AwaitTask >> Async.RunSynchronously) it
|
||||
seq {
|
||||
debug (fun () -> $"Considering URL {textLink}")
|
||||
// Home page directory without the directory slash
|
||||
let staticFileName =
|
||||
[ ctx.GetWebHostEnvironment().ContentRootPath
|
||||
"wwwroot"
|
||||
if textLink.Length > 1 then textLink[1..] else textLink ]
|
||||
|> String.concat (string Path.DirectorySeparatorChar)
|
||||
if File.Exists staticFileName then
|
||||
debug (fun () -> $"File {textLink} is a static file")
|
||||
yield
|
||||
File.GetLastWriteTimeUtc staticFileName
|
||||
|> System.DateTimeOffset
|
||||
|> Some
|
||||
|> streamFile true staticFileName None
|
||||
// Home page directory without the directory slash
|
||||
if textLink = "" then yield redirectTo true (webLog.RelativeUrl Permalink.Empty)
|
||||
let permalink = Permalink textLink[1..]
|
||||
// Current post
|
||||
@ -34,9 +47,8 @@ module CatchAll =
|
||||
yield Post.chapters post
|
||||
else
|
||||
yield fun next ctx ->
|
||||
Post.preparePostList webLog [ post ] Post.ListType.SinglePost "" 1 1 data
|
||||
|> await
|
||||
|> addToHash ViewContext.PageTitle post.Title
|
||||
{ await (Post.preparePostList webLog [ post ] Post.ListType.SinglePost "" 1 1 data) with
|
||||
PageTitle = post.Title }
|
||||
|> themedView (defaultArg post.Template "single-post") next ctx
|
||||
| None -> ()
|
||||
// Current page
|
||||
@ -44,16 +56,16 @@ module CatchAll =
|
||||
| Some page ->
|
||||
debug (fun () -> "Found page by permalink")
|
||||
yield fun next ctx ->
|
||||
hashForPage page.Title
|
||||
|> addToHash "page" (DisplayPage.FromPage webLog page)
|
||||
|> addToHash ViewContext.IsPage true
|
||||
{ viewCtxForPage page.Title with
|
||||
Payload = DisplayPage.FromPage webLog page
|
||||
IsPage = true }
|
||||
|> themedView (defaultArg page.Template "single-page") next ctx
|
||||
| None -> ()
|
||||
// RSS feed
|
||||
match Feed.deriveFeedType ctx textLink with
|
||||
| Some (feedType, postCount) ->
|
||||
debug (fun () -> "Found RSS feed")
|
||||
yield Feed.generate feedType postCount
|
||||
yield Feed.generate feedType postCount
|
||||
| None -> ()
|
||||
// Post differing only by trailing slash
|
||||
let altLink =
|
||||
@ -88,10 +100,9 @@ module CatchAll =
|
||||
let route : HttpHandler = fun next ctx ->
|
||||
match deriveAction ctx |> Seq.tryHead with Some handler -> handler next ctx | None -> Error.notFound next ctx
|
||||
|
||||
|
||||
/// Serve theme assets
|
||||
module Asset =
|
||||
|
||||
|
||||
// GET /theme/{theme}/{**path}
|
||||
let serve (urlParts: string seq) : HttpHandler = fun next ctx -> task {
|
||||
let path = urlParts |> Seq.skip 1 |> Seq.head
|
||||
@ -202,7 +213,7 @@ let router : HttpHandler = choose [
|
||||
])
|
||||
route "/upload/save" >=> Upload.save
|
||||
]
|
||||
DELETE >=> validateCsrf >=> choose [
|
||||
DELETE >=> choose [
|
||||
routef "/category/%s" Admin.Category.delete
|
||||
subRoute "/page" (choose [
|
||||
routef "/%s" Page.delete
|
||||
@ -229,7 +240,7 @@ let router : HttpHandler = choose [
|
||||
])
|
||||
GET_HEAD >=> routexp "/category/(.*)" Post.pageOfCategorizedPosts
|
||||
GET_HEAD >=> routef "/page/%i" Post.pageOfPosts
|
||||
GET_HEAD >=> routef "/page/%i/" Post.redirectToPageOfPosts
|
||||
GET_HEAD >=> routef "/page/%i/" Post.redirectToPageOfPosts
|
||||
GET_HEAD >=> routexp "/tag/(.*)" Post.pageOfTaggedPosts
|
||||
GET_HEAD >=> routexp "/themes/(.*)" Asset.serve
|
||||
GET_HEAD >=> routexp "/upload/(.*)" Upload.serve
|
||||
@ -243,7 +254,7 @@ let router : HttpHandler = choose [
|
||||
]
|
||||
])
|
||||
GET_HEAD >=> CatchAll.route
|
||||
Error.notFound
|
||||
//Error.notFound
|
||||
]
|
||||
|
||||
/// Wrap a router in a sub-route
|
||||
|
@ -120,12 +120,12 @@ let list : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||
|> Seq.append diskUploads
|
||||
|> Seq.sortByDescending (fun file -> file.UpdatedOn, file.Path)
|
||||
|> Views.WebLog.uploadList
|
||||
|> adminPage "Uploaded Files" true next ctx
|
||||
|> adminPage "Uploaded Files" next ctx
|
||||
}
|
||||
|
||||
// GET /admin/upload/new
|
||||
let showNew : HttpHandler = requireAccess Author >=> fun next ctx ->
|
||||
adminPage "Upload a File" true next ctx Views.WebLog.uploadNew
|
||||
adminPage "Upload a File" next ctx Views.WebLog.uploadNew
|
||||
|
||||
// POST /admin/upload/save
|
||||
let save : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||
|
@ -35,7 +35,7 @@ let logOn returnUrl : HttpHandler = fun next ctx ->
|
||||
match returnUrl with
|
||||
| Some _ -> returnUrl
|
||||
| None -> if ctx.Request.Query.ContainsKey "returnUrl" then Some ctx.Request.Query["returnUrl"].[0] else None
|
||||
adminPage "Log On" true next ctx (Views.User.logOn { LogOnModel.Empty with ReturnTo = returnTo })
|
||||
adminPage "Log On" next ctx (Views.User.logOn { LogOnModel.Empty with ReturnTo = returnTo })
|
||||
|
||||
|
||||
open System.Security.Claims
|
||||
@ -91,12 +91,12 @@ let private goAway : HttpHandler = RequestErrors.BAD_REQUEST "really?"
|
||||
// GET /admin/settings/users
|
||||
let all : HttpHandler = fun next ctx -> task {
|
||||
let! users = ctx.Data.WebLogUser.FindByWebLog ctx.WebLog.Id
|
||||
return! adminBarePage "User Administration" true next ctx (Views.User.userList users)
|
||||
return! adminBarePage "User Administration" next ctx (Views.User.userList users)
|
||||
}
|
||||
|
||||
/// Show the edit user page
|
||||
let private showEdit (model: EditUserModel) : HttpHandler = fun next ctx ->
|
||||
adminBarePage (if model.IsNew then "Add a New User" else "Edit User") true next ctx (Views.User.edit model)
|
||||
adminBarePage (if model.IsNew then "Add a New User" else "Edit User") next ctx (Views.User.edit model)
|
||||
|
||||
// GET /admin/settings/user/{id}/edit
|
||||
let edit usrId : HttpHandler = fun next ctx -> task {
|
||||
@ -139,7 +139,7 @@ let myInfo : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||
| Some user ->
|
||||
return!
|
||||
Views.User.myInfo (EditMyInfoModel.FromUser user) user
|
||||
|> adminPage "Edit Your Information" true next ctx
|
||||
|> adminPage "Edit Your Information" next ctx
|
||||
| None -> return! Error.notFound next ctx
|
||||
}
|
||||
|
||||
@ -164,7 +164,7 @@ let saveMyInfo : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||
do! addMessage ctx { UserMessage.Error with Message = "Passwords did not match; no updates made" }
|
||||
return!
|
||||
Views.User.myInfo { model with NewPassword = ""; NewPasswordConfirm = "" } user
|
||||
|> adminPage "Edit Your Information" true next ctx
|
||||
|> adminPage "Edit Your Information" next ctx
|
||||
| None -> return! Error.notFound next ctx
|
||||
}
|
||||
|
||||
|
@ -9,6 +9,8 @@
|
||||
<ItemGroup>
|
||||
<Content Include="appsettings*.json" CopyToOutputDirectory="Always" />
|
||||
<Compile Include="Caches.fs" />
|
||||
<Compile Include="ViewContext.fs" />
|
||||
<Compile Include="Template.fs" />
|
||||
<Compile Include="Views\Helpers.fs" />
|
||||
<Compile Include="Views\Admin.fs" />
|
||||
<Compile Include="Views\Page.fs" />
|
||||
@ -26,18 +28,20 @@
|
||||
<Compile Include="DotLiquidBespoke.fs" />
|
||||
<Compile Include="Maintenance.fs" />
|
||||
<Compile Include="Program.fs" />
|
||||
<Content Include="wwwroot\htmx.min.js" CopyToOutputDirectory="Always" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="BitBadger.AspNetCore.CanonicalDomains" Version="1.0.0" />
|
||||
<PackageReference Include="BitBadger.AspNetCore.CanonicalDomains" Version="1.1.0" />
|
||||
<PackageReference Include="DotLiquid" Version="2.2.692" />
|
||||
<PackageReference Include="Giraffe" Version="6.4.0" />
|
||||
<PackageReference Include="Giraffe.Htmx" Version="2.0.0" />
|
||||
<PackageReference Include="Giraffe.ViewEngine.Htmx" Version="2.0.0" />
|
||||
<PackageReference Include="NeoSmart.Caching.Sqlite.AspNetCore" Version="8.0.0" />
|
||||
<PackageReference Include="Fluid.Core" Version="2.24.0" />
|
||||
<PackageReference Include="Giraffe" Version="7.0.2" />
|
||||
<PackageReference Include="Giraffe.Htmx" Version="2.0.6" />
|
||||
<PackageReference Include="Giraffe.ViewEngine.Htmx" Version="2.0.6" />
|
||||
<PackageReference Include="NeoSmart.Caching.Sqlite.AspNetCore" Version="9.0.1" />
|
||||
<PackageReference Include="RethinkDB.DistributedCache" Version="1.0.0-rc1" />
|
||||
<PackageReference Include="System.ServiceModel.Syndication" Version="8.0.0" />
|
||||
<PackageReference Update="FSharp.Core" Version="8.0.300" />
|
||||
<PackageReference Include="System.ServiceModel.Syndication" Version="9.0.6" />
|
||||
<PackageReference Update="FSharp.Core" Version="9.0.300" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
@ -6,10 +6,10 @@ open MyWebLog
|
||||
|
||||
/// Middleware to derive the current web log
|
||||
type WebLogMiddleware(next: RequestDelegate, log: ILogger<WebLogMiddleware>) =
|
||||
|
||||
|
||||
/// Is the debug level enabled on the logger?
|
||||
let isDebug = log.IsEnabled LogLevel.Debug
|
||||
|
||||
|
||||
member _.InvokeAsync(ctx: HttpContext) = task {
|
||||
/// Create the full path of the request
|
||||
let path = $"{ctx.Request.Scheme}://{ctx.Request.Host.Value}{ctx.Request.Path.Value}"
|
||||
@ -26,13 +26,15 @@ type WebLogMiddleware(next: RequestDelegate, log: ILogger<WebLogMiddleware>) =
|
||||
}
|
||||
|
||||
|
||||
open Giraffe.Htmx
|
||||
|
||||
/// Middleware to check redirects for the current web log
|
||||
type RedirectRuleMiddleware(next: RequestDelegate, log: ILogger<RedirectRuleMiddleware>) =
|
||||
type RedirectRuleMiddleware(next: RequestDelegate, _log: ILogger<RedirectRuleMiddleware>) =
|
||||
|
||||
/// Shorthand for case-insensitive string equality
|
||||
let ciEquals str1 str2 =
|
||||
System.String.Equals(str1, str2, System.StringComparison.InvariantCultureIgnoreCase)
|
||||
|
||||
|
||||
member _.InvokeAsync(ctx: HttpContext) = task {
|
||||
let path = ctx.Request.Path.Value.ToLower()
|
||||
let matched =
|
||||
@ -44,6 +46,8 @@ type RedirectRuleMiddleware(next: RequestDelegate, log: ILogger<RedirectRuleMidd
|
||||
| WebLogCache.CachedRedirectRule.RegEx (regExFrom, patternTo) ->
|
||||
if regExFrom.IsMatch path then Some (regExFrom.Replace(path, patternTo)) else None)
|
||||
match matched with
|
||||
| Some url when url.StartsWith "http" && ctx.Request.IsHtmx ->
|
||||
do! ctx.Response.WriteAsync $"""<script>window.location.href = "{url}"</script>"""
|
||||
| Some url -> ctx.Response.Redirect(url, permanent = true)
|
||||
| None -> return! next.Invoke ctx
|
||||
}
|
||||
@ -59,11 +63,11 @@ open Npgsql
|
||||
|
||||
/// Logic to obtain a data connection and implementation based on configured values
|
||||
module DataImplementation =
|
||||
|
||||
|
||||
open MyWebLog.Converters
|
||||
open RethinkDb.Driver.FSharp
|
||||
open RethinkDb.Driver.Net
|
||||
|
||||
|
||||
/// Create an NpgsqlDataSource from the connection string, configuring appropriately
|
||||
let createNpgsqlDataSource (cfg: IConfiguration) =
|
||||
let builder = NpgsqlDataSourceBuilder(cfg.GetConnectionString "PostgreSQL")
|
||||
@ -83,12 +87,12 @@ module DataImplementation =
|
||||
let conn = Sqlite.Configuration.dbConn ()
|
||||
log.LogInformation $"Using SQLite database {conn.DataSource}"
|
||||
SQLiteData(conn, log, Json.configure (JsonSerializer.CreateDefault()))
|
||||
|
||||
|
||||
if hasConnStr "SQLite" then
|
||||
createSQLite (connStr "SQLite")
|
||||
elif hasConnStr "RethinkDB" then
|
||||
let log = sp.GetRequiredService<ILogger<RethinkDbData>>()
|
||||
let _ = Json.configure Converter.Serializer
|
||||
let _ = Json.configure Converter.Serializer
|
||||
let rethinkCfg = DataConfig.FromUri (connStr "RethinkDB")
|
||||
let conn = await (rethinkCfg.CreateConnectionAsync log)
|
||||
RethinkDbData(conn, rethinkCfg, log)
|
||||
@ -131,7 +135,7 @@ open Microsoft.AspNetCore.Authentication.Cookies
|
||||
open Microsoft.AspNetCore.Builder
|
||||
open Microsoft.AspNetCore.HttpOverrides
|
||||
open Microsoft.Extensions.Caching.Distributed
|
||||
open NeoSmart.Caching.Sqlite.AspNetCore
|
||||
open NeoSmart.Caching.Sqlite
|
||||
open RethinkDB.DistributedCache
|
||||
|
||||
[<EntryPoint>]
|
||||
@ -140,7 +144,7 @@ let main args =
|
||||
let builder = WebApplication.CreateBuilder(args)
|
||||
let _ = builder.Services.Configure<ForwardedHeadersOptions>(fun (opts : ForwardedHeadersOptions) ->
|
||||
opts.ForwardedHeaders <- ForwardedHeaders.XForwardedFor ||| ForwardedHeaders.XForwardedProto)
|
||||
let _ =
|
||||
let _ =
|
||||
builder.Services
|
||||
.AddAuthentication(CookieAuthenticationDefaults.AuthenticationScheme)
|
||||
.AddCookie(fun opts ->
|
||||
@ -150,17 +154,17 @@ let main args =
|
||||
let _ = builder.Services.AddLogging()
|
||||
let _ = builder.Services.AddAuthorization()
|
||||
let _ = builder.Services.AddAntiforgery()
|
||||
|
||||
|
||||
let sp = builder.Services.BuildServiceProvider()
|
||||
let data = DataImplementation.get sp
|
||||
let _ = builder.Services.AddSingleton<JsonSerializer> data.Serializer
|
||||
|
||||
|
||||
task {
|
||||
do! data.StartUp()
|
||||
do! WebLogCache.fill data
|
||||
do! ThemeAssetCache.fill data
|
||||
} |> Async.AwaitTask |> Async.RunSynchronously
|
||||
|
||||
|
||||
// Define distributed cache implementation based on data implementation
|
||||
match data with
|
||||
| :? RethinkDbData as rethink ->
|
||||
@ -189,18 +193,18 @@ let main args =
|
||||
Postgres.DistributedCache() :> IDistributedCache)
|
||||
()
|
||||
| _ -> ()
|
||||
|
||||
|
||||
let _ = builder.Services.AddSession(fun opts ->
|
||||
opts.IdleTimeout <- TimeSpan.FromMinutes 60
|
||||
opts.IdleTimeout <- TimeSpan.FromMinutes 60.
|
||||
opts.Cookie.HttpOnly <- true
|
||||
opts.Cookie.IsEssential <- true)
|
||||
let _ = builder.Services.AddGiraffe()
|
||||
|
||||
|
||||
// Set up DotLiquid
|
||||
DotLiquidBespoke.register ()
|
||||
|
||||
let app = builder.Build()
|
||||
|
||||
|
||||
match args |> Array.tryHead with
|
||||
| Some it when it = "init" -> Maintenance.createWebLog args app.Services
|
||||
| Some it when it = "import-links" -> Maintenance.importLinks args app.Services
|
||||
@ -222,13 +226,13 @@ let main args =
|
||||
if Directory.Exists themePath then
|
||||
for themeFile in Directory.EnumerateFiles(themePath, "*-theme.zip") do
|
||||
do! Maintenance.loadTheme [| ""; themeFile |] app.Services
|
||||
|
||||
|
||||
let _ = app.UseForwardedHeaders()
|
||||
|
||||
|
||||
(app.Services.GetRequiredService<IConfiguration>().GetSection "CanonicalDomains").Value
|
||||
|> (isNull >> not)
|
||||
|> function true -> app.UseCanonicalDomains() |> ignore | false -> ()
|
||||
|
||||
|
||||
let _ = app.UseCookiePolicy(CookiePolicyOptions (MinimumSameSitePolicy = SameSiteMode.Strict))
|
||||
let _ = app.UseMiddleware<WebLogMiddleware>()
|
||||
let _ = app.UseMiddleware<RedirectRuleMiddleware>()
|
||||
@ -241,5 +245,5 @@ let main args =
|
||||
app.Run()
|
||||
}
|
||||
|> Async.AwaitTask |> Async.RunSynchronously
|
||||
|
||||
|
||||
0 // Exit code
|
||||
|
377
src/MyWebLog/Template.fs
Normal file
377
src/MyWebLog/Template.fs
Normal file
@ -0,0 +1,377 @@
|
||||
/// <summary>Logic to work with Fluid templates</summary>
|
||||
module MyWebLog.Template
|
||||
|
||||
open System
|
||||
open System.Collections.Generic
|
||||
open System.IO
|
||||
open System.Text
|
||||
open Fluid
|
||||
open Fluid.Values
|
||||
open Microsoft.AspNetCore.Antiforgery
|
||||
open Microsoft.Extensions.FileProviders
|
||||
open MyWebLog
|
||||
open MyWebLog.ViewModels
|
||||
|
||||
/// Alias for ValueTask
|
||||
type VTask<'T> = System.Threading.Tasks.ValueTask<'T>
|
||||
|
||||
|
||||
/// <summary>Extensions on Fluid's TemplateContext object</summary>
|
||||
type TemplateContext with
|
||||
|
||||
/// <summary>Get the model of the context as an <tt>AppViewContext</tt> instance</summary>
|
||||
member this.App =
|
||||
this.Model.ToObjectValue() :?> AppViewContext
|
||||
|
||||
|
||||
/// <summary>Helper functions for filters and tags</summary>
|
||||
[<AutoOpen>]
|
||||
module private Helpers =
|
||||
|
||||
/// <summary>Does an asset exist for the current theme?</summary>
|
||||
/// <param name="fileName">The name of the asset</param>
|
||||
/// <param name="webLog">The current web log</param>
|
||||
/// <returns>True if the theme has the requested asset name, false if not</returns>
|
||||
let assetExists fileName (webLog: WebLog) =
|
||||
ThemeAssetCache.get webLog.ThemeId |> List.exists (fun it -> it = fileName)
|
||||
|
||||
/// <summary>Obtain the link from known types</summary>
|
||||
/// <param name="item">The <tt>FluidValue</tt> for the given parameter</param>
|
||||
/// <param name="linkFunc">The function to extract the value of the link into a string</param>
|
||||
/// <returns>The link as a string, or JavaScript to show an alert if a link cannot be determined</returns>
|
||||
let permalink (item: FluidValue) (linkFunc: Permalink -> string) =
|
||||
match item.Type with
|
||||
| FluidValues.String -> Some (item.ToStringValue())
|
||||
| FluidValues.Object ->
|
||||
match item.ToObjectValue() with
|
||||
| :? DisplayPage as page -> Some page.Permalink
|
||||
| :? PostListItem as post -> Some post.Permalink
|
||||
| :? Permalink as link -> Some (string link)
|
||||
| _ -> None
|
||||
| _ -> None
|
||||
|> function
|
||||
| Some link -> linkFunc (Permalink link)
|
||||
| None -> $"alert('unknown item type {item.Type}')"
|
||||
|
||||
/// <summary>Generate a link for theme asset (image, stylesheet, script, etc.)</summary>
|
||||
/// <param name="input">The name of the theme asset</param>
|
||||
/// <param name="ctx">The template context for the current template rendering</param>
|
||||
/// <returns>A relative URL for the given theme asset</returns>
|
||||
let themeAsset (input: FluidValue) (ctx: TemplateContext) =
|
||||
let app = ctx.App
|
||||
app.WebLog.RelativeUrl(Permalink $"themes/{app.WebLog.ThemeId}/{input.ToStringValue()}")
|
||||
|
||||
|
||||
/// <summary>Fluid template options customized with myWebLog filters</summary>
|
||||
/// <returns>A <tt>TemplateOptions</tt> instance with all myWebLog filters and types registered</returns>
|
||||
let options () =
|
||||
let sValue = StringValue >> VTask<FluidValue>
|
||||
|
||||
let it = TemplateOptions.Default
|
||||
it.MemberAccessStrategy.MemberNameStrategy <- MemberNameStrategies.SnakeCase
|
||||
[ // Domain types
|
||||
typeof<CustomFeed>; typeof<Episode>; typeof<Episode option>; typeof<MetaItem>; typeof<Page>; typeof<RssOptions>
|
||||
typeof<TagMap>; typeof<WebLog>
|
||||
// View models
|
||||
typeof<AppViewContext>; typeof<DisplayCategory>; typeof<DisplayPage>; typeof<EditPageModel>; typeof<PostDisplay>
|
||||
typeof<PostListItem>; typeof<UserMessage>
|
||||
// Framework types
|
||||
typeof<AntiforgeryTokenSet>; typeof<DateTime option>; typeof<int option>; typeof<KeyValuePair>
|
||||
typeof<MetaItem list>; typeof<string list>; typeof<string option>; typeof<TagMap list> ]
|
||||
|> List.iter it.MemberAccessStrategy.Register
|
||||
|
||||
// A filter to generate an absolute link
|
||||
it.Filters.AddFilter("absolute_link", fun input _ ctx -> sValue (permalink input ctx.App.WebLog.AbsoluteUrl))
|
||||
|
||||
// A filter to generate a link with posts categorized under the given category
|
||||
it.Filters.AddFilter("category_link",
|
||||
fun input _ ctx ->
|
||||
match input.ToObjectValue() with
|
||||
| :? DisplayCategory as cat -> Some cat.Slug
|
||||
| :? string as slug -> Some slug
|
||||
| _ -> None
|
||||
|> function
|
||||
| Some slug -> ctx.App.WebLog.RelativeUrl(Permalink $"category/{slug}/")
|
||||
| None -> $"alert('unknown category object type {input.Type}')"
|
||||
|> sValue)
|
||||
|
||||
// A filter to generate a link that will edit a page
|
||||
it.Filters.AddFilter("edit_page_link",
|
||||
fun input _ ctx ->
|
||||
match input.ToObjectValue() with
|
||||
| :? DisplayPage as page -> Some page.Id
|
||||
| :? string as theId -> Some theId
|
||||
| _ -> None
|
||||
|> function
|
||||
| Some pageId -> ctx.App.WebLog.RelativeUrl(Permalink $"admin/page/{pageId}/edit")
|
||||
| None -> $"alert('unknown page object type {input.Type}')"
|
||||
|> sValue)
|
||||
|
||||
// A filter to generate a link that will edit a post
|
||||
it.Filters.AddFilter("edit_post_link",
|
||||
fun input _ ctx ->
|
||||
match input.ToObjectValue() with
|
||||
| :? PostListItem as post -> Some post.Id
|
||||
| :? string as theId -> Some theId
|
||||
| _ -> None
|
||||
|> function
|
||||
| Some postId -> ctx.App.WebLog.RelativeUrl(Permalink $"admin/post/{postId}/edit")
|
||||
| None -> $"alert('unknown post object type {input.Type}')"
|
||||
|> sValue)
|
||||
|
||||
// A filter to generate nav links, highlighting the active link (starts-with match)
|
||||
it.Filters.AddFilter("nav_link",
|
||||
fun input args ctx ->
|
||||
let app = ctx.App
|
||||
let extraPath = app.WebLog.ExtraPath
|
||||
let path = if extraPath = "" then "" else $"{extraPath[1..]}/"
|
||||
let url = input.ToStringValue()
|
||||
seq {
|
||||
"<li class=nav-item><a class=\"nav-link"
|
||||
if app.CurrentPage.StartsWith $"{path}{url}" then " active"
|
||||
"\" href=\""
|
||||
app.WebLog.RelativeUrl(Permalink url)
|
||||
"\">"
|
||||
args.At(0).ToStringValue()
|
||||
"</a>"
|
||||
}
|
||||
|> String.concat ""
|
||||
|> sValue)
|
||||
|
||||
// A filter to generate a relative link
|
||||
it.Filters.AddFilter("relative_link", fun input _ ctx -> sValue (permalink input ctx.App.WebLog.RelativeUrl))
|
||||
|
||||
// A filter to generate a link with posts tagged with the given tag
|
||||
it.Filters.AddFilter("tag_link",
|
||||
fun input _ ctx ->
|
||||
let tag = input.ToStringValue()
|
||||
ctx.App.TagMappings
|
||||
|> Array.tryFind (fun it -> it.Tag = tag)
|
||||
|> function
|
||||
| Some tagMap -> tagMap.UrlValue
|
||||
| None -> tag.Replace(" ", "+")
|
||||
|> function tagUrl -> ctx.App.WebLog.RelativeUrl(Permalink $"tag/{tagUrl}/")
|
||||
|> sValue)
|
||||
|
||||
// A filter to generate a link for theme asset (image, stylesheet, script, etc.)
|
||||
it.Filters.AddFilter("theme_asset", fun input _ ctx -> sValue (themeAsset input ctx))
|
||||
|
||||
// A filter to retrieve the value of a meta item from a list
|
||||
// (shorter than `{% assign item = list | where: "Name", [name] | first %}{{ item.value }}`)
|
||||
it.Filters.AddFilter("value",
|
||||
fun input args ctx ->
|
||||
let name = args.At(0).ToStringValue()
|
||||
let picker (value: FluidValue) =
|
||||
let item = value.ToObjectValue() :?> MetaItem
|
||||
if item.Name = name then Some item.Value else None
|
||||
match input with
|
||||
| :? NilValue -> $"-- {name} not found --"
|
||||
| it ->
|
||||
(it :?> ArrayValue).Values
|
||||
|> Seq.tryPick picker
|
||||
|> Option.defaultValue $"-- {name} not found --"
|
||||
|> sValue)
|
||||
|
||||
it
|
||||
|
||||
|
||||
/// <summary>Fluid parser customized with myWebLog filters and tags</summary>
|
||||
let parser =
|
||||
// spacer
|
||||
let s = " "
|
||||
// Required return for tag delegates
|
||||
let ok () =
|
||||
VTask<Fluid.Ast.Completion> Fluid.Ast.Completion.Normal
|
||||
|
||||
let it = FluidParser()
|
||||
|
||||
// Create various items in the page header based on the state of the page being generated
|
||||
it.RegisterEmptyTag("page_head",
|
||||
fun writer encoder context ->
|
||||
let app = context.App
|
||||
let attrEnc = System.Web.HttpUtility.HtmlAttributeEncode
|
||||
|
||||
// OpenGraph tags
|
||||
let doOpenGraph =
|
||||
(app.WebLog.AutoOpenGraph && (app.IsPage || app.IsPost))
|
||||
|| (app.IsPage && Option.isSome app.Page.OpenGraph)
|
||||
|| (app.IsPost && Option.isSome app.Posts.Posts[0].OpenGraph)
|
||||
|
||||
if doOpenGraph then
|
||||
let writeOgProp (name, value) =
|
||||
writer.WriteLine $"""{s}<meta property=%s{name} content="{attrEnc value}">"""
|
||||
writeOgProp ("og:title", if app.IsPage then app.Page.Title else app.Posts.Posts[0].Title)
|
||||
writeOgProp ("og:site_name", app.WebLog.Name)
|
||||
if app.IsPage then app.Page.Permalink else app.Posts.Posts[0].Permalink
|
||||
|> Permalink
|
||||
|> app.WebLog.AbsoluteUrl
|
||||
|> function url -> writeOgProp ("og:url", url)
|
||||
match if app.IsPage then app.Page.OpenGraph else app.Posts.Posts[0].OpenGraph with
|
||||
| Some props -> props.ToProperties app.WebLog.UrlToAbsolute |> Seq.iter writeOgProp
|
||||
| None -> ()
|
||||
|
||||
writer.WriteLine $"""{s}<meta name=generator content="{app.Generator}">"""
|
||||
|
||||
// Theme assets
|
||||
if assetExists "style.css" app.WebLog then
|
||||
themeAsset (StringValue "style.css") context
|
||||
|> sprintf "%s<link rel=stylesheet href=\"%s\">" s
|
||||
|> writer.WriteLine
|
||||
if assetExists "favicon.ico" app.WebLog then
|
||||
themeAsset (StringValue "favicon.ico") context
|
||||
|> sprintf "%s<link rel=icon href=\"%s\">" s
|
||||
|> writer.WriteLine
|
||||
|
||||
// RSS feeds and canonical URLs
|
||||
let feedLink title url =
|
||||
let escTitle = System.Web.HttpUtility.HtmlAttributeEncode title
|
||||
let relUrl = app.WebLog.RelativeUrl(Permalink url)
|
||||
$"""{s}<link rel=alternate type="application/rss+xml" title="{escTitle}" href="{relUrl}">"""
|
||||
|
||||
if app.WebLog.Rss.IsFeedEnabled && app.IsHome then
|
||||
writer.WriteLine(feedLink app.WebLog.Name app.WebLog.Rss.FeedName)
|
||||
writer.WriteLine $"""{s}<link rel=canonical href="{app.WebLog.AbsoluteUrl Permalink.Empty}">"""
|
||||
|
||||
if app.WebLog.Rss.IsCategoryEnabled && app.IsCategoryHome then
|
||||
let slug = context.AmbientValues["slug"] :?> string
|
||||
writer.WriteLine(feedLink app.WebLog.Name $"category/{slug}/{app.WebLog.Rss.FeedName}")
|
||||
|
||||
if app.WebLog.Rss.IsTagEnabled && app.IsTagHome then
|
||||
let slug = context.AmbientValues["slug"] :?> string
|
||||
writer.WriteLine(feedLink app.WebLog.Name $"tag/{slug}/{app.WebLog.Rss.FeedName}")
|
||||
|
||||
if app.IsPost then
|
||||
let url = app.WebLog.AbsoluteUrl(Permalink app.Posts.Posts[0].Permalink)
|
||||
writer.WriteLine $"""{s}<link rel=canonical href="{url}">"""
|
||||
|
||||
if app.IsPage then
|
||||
let url = app.WebLog.AbsoluteUrl(Permalink app.Page.Permalink)
|
||||
writer.WriteLine $"""{s}<link rel=canonical href="{url}">"""
|
||||
|
||||
ok ())
|
||||
|
||||
// Create various items in the page footer based on the state of the page being generated
|
||||
it.RegisterEmptyTag("page_foot",
|
||||
fun writer encoder context ->
|
||||
let webLog = context.App.WebLog
|
||||
if webLog.AutoHtmx then
|
||||
context.App.WebLog.RelativeUrl(Permalink "htmx.min.js")
|
||||
|> sprintf "%s<script src=\"%s\"></script>" s
|
||||
|> writer.WriteLine
|
||||
if assetExists "script.js" webLog then
|
||||
themeAsset (StringValue "script.js") context
|
||||
|> sprintf "%s<script src=\"%s\"></script>" s
|
||||
|> writer.WriteLine
|
||||
ok ())
|
||||
|
||||
// Create links for a user to log on or off, and a dashboard link if they are logged off
|
||||
it.RegisterEmptyTag("user_links",
|
||||
fun writer encoder ctx ->
|
||||
let app = ctx.App
|
||||
let link it = app.WebLog.RelativeUrl(Permalink it)
|
||||
seq {
|
||||
"""<ul class="navbar-nav flex-grow-1 justify-content-end">"""
|
||||
match app.IsLoggedOn with
|
||||
| true ->
|
||||
$"""<li class=nav-item><a class=nav-link href="{link "admin/dashboard"}">Dashboard</a>"""
|
||||
$"""<li class=nav-item><a class=nav-link href="{link "user/log-off"}">Log Off</a>"""
|
||||
| false ->
|
||||
$"""<li class=nav-item><a class=nav-link href="{link "user/log-on"}">Log On</a>"""
|
||||
"</ul>"
|
||||
}
|
||||
|> Seq.iter writer.WriteLine
|
||||
ok())
|
||||
|
||||
it
|
||||
|
||||
|
||||
open MyWebLog.Data
|
||||
|
||||
/// <summary>Cache for parsed templates</summary>
|
||||
module Cache =
|
||||
|
||||
open System.Collections.Concurrent
|
||||
|
||||
/// Cache of parsed templates
|
||||
let private _cache = ConcurrentDictionary<string, IFluidTemplate> ()
|
||||
|
||||
/// <summary>Get a template for the given theme and template name</summary>
|
||||
/// <param name="themeId">The ID of the theme for which a template should be retrieved</param>
|
||||
/// <param name="templateName">The name of the template to retrieve</param>
|
||||
/// <param name="data">The data implementation from which the template should be retrieved (if not cached)</param>
|
||||
/// <returns>
|
||||
/// An <tt>Ok</tt> result with the template if it is found and valid, an <tt>Error</tt> result if not
|
||||
/// </returns>
|
||||
let get (themeId: ThemeId) (templateName: string) (data: IData) = backgroundTask {
|
||||
let templatePath = $"{themeId}/{templateName}"
|
||||
match _cache.ContainsKey templatePath with
|
||||
| true -> return Ok _cache[templatePath]
|
||||
| false ->
|
||||
match! data.Theme.FindById themeId with
|
||||
| Some theme ->
|
||||
match theme.Templates |> List.tryFind (fun t -> t.Name = templateName) with
|
||||
| Some template ->
|
||||
_cache[templatePath] <- parser.Parse(template.Text)
|
||||
return Ok _cache[templatePath]
|
||||
| None ->
|
||||
return Error $"Theme ID {themeId} does not have a template named {templateName}"
|
||||
| None -> return Error $"Theme ID {themeId} does not exist"
|
||||
}
|
||||
|
||||
/// <summary>Get all theme/template names currently cached</summary>
|
||||
/// <returns>All theme/template names current cached</returns>
|
||||
let allNames () =
|
||||
_cache.Keys |> Seq.sort |> Seq.toList
|
||||
|
||||
/// <summary>Invalidate all template cache entries for the given theme ID</summary>
|
||||
/// <param name="themeId">The ID of the theme whose cache should be invalidated</param>
|
||||
let invalidateTheme (themeId: ThemeId) =
|
||||
let keyPrefix = string themeId
|
||||
_cache.Keys
|
||||
|> Seq.filter _.StartsWith(keyPrefix)
|
||||
|> List.ofSeq
|
||||
|> List.iter (fun key -> match _cache.TryRemove key with _, _ -> ())
|
||||
|
||||
/// <summary>Remove all entries from the template cache</summary>
|
||||
let empty () =
|
||||
_cache.Clear()
|
||||
|
||||
|
||||
/// <summary>A file provider to retrieve files by theme</summary>
|
||||
type ThemeFileProvider(themeId: ThemeId, data: IData) =
|
||||
|
||||
interface IFileProvider with
|
||||
|
||||
member _.GetDirectoryContents _ =
|
||||
raise <| NotImplementedException "The theme file provider does not support directory listings"
|
||||
|
||||
member _.GetFileInfo path =
|
||||
match data.Theme.FindById themeId |> Async.AwaitTask |> Async.RunSynchronously with
|
||||
| Some theme ->
|
||||
match theme.Templates |> List.tryFind (fun t -> t.Name = path) with
|
||||
| Some template ->
|
||||
{ new IFileInfo with
|
||||
member _.Exists = true
|
||||
member _.IsDirectory = false
|
||||
member _.LastModified = DateTimeOffset.Now
|
||||
member _.Length = int64 template.Text.Length
|
||||
member _.Name = template.Name.Split '/' |> Array.last
|
||||
member _.PhysicalPath = null
|
||||
member _.CreateReadStream() =
|
||||
new MemoryStream(Encoding.UTF8.GetBytes template.Text) }
|
||||
| None -> NotFoundFileInfo path
|
||||
| None -> NotFoundFileInfo path
|
||||
|
||||
member _.Watch _ =
|
||||
raise <| NotImplementedException "The theme file provider does not support watching for changes"
|
||||
|
||||
|
||||
/// <summary>Render a template to a string</summary>
|
||||
/// <param name="template">The template to be rendered</param>
|
||||
/// <param name="viewCtx">The app context for rendering this template</param>
|
||||
/// <param name="data">The data implementation to use if required</param>
|
||||
/// <returns>The rendered template as a string</returns>
|
||||
let render (template: IFluidTemplate) (viewCtx: AppViewContext) data =
|
||||
let opts = options ()
|
||||
opts.FileProvider <- ThemeFileProvider(viewCtx.WebLog.ThemeId, data)
|
||||
template.Render(TemplateContext(viewCtx, opts, true))
|
132
src/MyWebLog/ViewContext.fs
Normal file
132
src/MyWebLog/ViewContext.fs
Normal file
@ -0,0 +1,132 @@
|
||||
/// <summary>View rendering context for myWebLog</summary>
|
||||
[<AutoOpen>]
|
||||
module MyWebLog.ViewContext
|
||||
|
||||
open Microsoft.AspNetCore.Antiforgery
|
||||
open MyWebLog.ViewModels
|
||||
|
||||
/// <summary>The rendering context for this application</summary>
|
||||
[<NoComparison; NoEquality>]
|
||||
type AppViewContext = {
|
||||
/// <summary>The web log for this request</summary>
|
||||
WebLog: WebLog
|
||||
|
||||
/// <summary>The ID of the current user</summary>
|
||||
UserId: WebLogUserId option
|
||||
|
||||
/// <summary>The title of the page being rendered</summary>
|
||||
PageTitle: string
|
||||
|
||||
/// <summary>The subtitle for the page</summary>
|
||||
Subtitle: string option
|
||||
|
||||
/// <summary>The anti-Cross Site Request Forgery (CSRF) token set to use when rendering a form</summary>
|
||||
Csrf: AntiforgeryTokenSet option
|
||||
|
||||
/// <summary>The page list for the web log</summary>
|
||||
PageList: DisplayPage array
|
||||
|
||||
/// <summary>Categories and post counts for the web log</summary>
|
||||
Categories: DisplayCategory array
|
||||
|
||||
/// <summary>Tag mappings</summary>
|
||||
TagMappings: TagMap array
|
||||
|
||||
/// <summary>The URL of the page being rendered</summary>
|
||||
CurrentPage: string
|
||||
|
||||
/// <summary>User messages</summary>
|
||||
Messages: UserMessage array
|
||||
|
||||
/// <summary>The generator string for the rendered page</summary>
|
||||
Generator: string
|
||||
|
||||
/// <summary>The payload for this page (see other properties that wrap this one)</summary>
|
||||
Payload: obj
|
||||
|
||||
/// <summary>The content of a page (wrapped when rendering the layout)</summary>
|
||||
Content: string
|
||||
|
||||
/// <summary>A string to load the minified htmx script</summary>
|
||||
HtmxScript: string
|
||||
|
||||
/// <summary>Whether the current user is an author</summary>
|
||||
IsAuthor: bool
|
||||
|
||||
/// <summary>Whether the current user is an editor (implies author)</summary>
|
||||
IsEditor: bool
|
||||
|
||||
/// <summary>Whether the current user is a web log administrator (implies author and editor)</summary>
|
||||
IsWebLogAdmin: bool
|
||||
|
||||
/// <summary>Whether the current user is an installation administrator (implies all web log rights)</summary>
|
||||
IsAdministrator: bool
|
||||
|
||||
/// <summary>Whether the current page is the home page of the web log</summary>
|
||||
IsHome: bool
|
||||
|
||||
/// <summary>Whether the current page is a category archive page</summary>
|
||||
IsCategory: bool
|
||||
|
||||
/// <summary>Whether the current page is a category archive home page</summary>
|
||||
IsCategoryHome: bool
|
||||
|
||||
/// <summary>Whether the current page is a tag archive page</summary>
|
||||
IsTag: bool
|
||||
|
||||
/// <summary>Whether the current page is a tag archive home page</summary>
|
||||
IsTagHome: bool
|
||||
|
||||
/// <summary>Whether the current page is a single post</summary>
|
||||
IsPost: bool
|
||||
|
||||
/// <summary>Whether the current page is a static page</summary>
|
||||
IsPage: bool
|
||||
|
||||
/// <summary>The slug for a category or tag</summary>
|
||||
Slug: string option
|
||||
} with
|
||||
|
||||
/// <summary>Whether there is a user logged on</summary>
|
||||
member this.IsLoggedOn = Option.isSome this.UserId
|
||||
|
||||
/// <summary>The payload for this page as a <c>DisplayPage</c></summary>
|
||||
member this.Page =
|
||||
this.Payload :?> DisplayPage
|
||||
|
||||
/// <summary>The payload for this page as a <c>PostDisplay</c></summary>
|
||||
member this.Posts =
|
||||
this.Payload :?> PostDisplay
|
||||
|
||||
/// <summary>The model for this view (prior versions used <c>model</c> for the v3 <c>payload</c>)</summary>
|
||||
member this.Model =
|
||||
this.Payload
|
||||
|
||||
/// <summary>An empty view context</summary>
|
||||
static member Empty =
|
||||
{ WebLog = WebLog.Empty
|
||||
UserId = None
|
||||
PageTitle = ""
|
||||
Subtitle = None
|
||||
Csrf = None
|
||||
PageList = [||]
|
||||
Categories = [||]
|
||||
TagMappings = [||]
|
||||
CurrentPage = ""
|
||||
Messages = [||]
|
||||
Generator = ""
|
||||
Payload = obj ()
|
||||
Content = ""
|
||||
HtmxScript = ""
|
||||
IsAuthor = false
|
||||
IsEditor = false
|
||||
IsWebLogAdmin = false
|
||||
IsAdministrator = false
|
||||
IsHome = false
|
||||
IsCategory = false
|
||||
IsCategoryHome = false
|
||||
IsTag = false
|
||||
IsTagHome = false
|
||||
IsPost = false
|
||||
IsPage = false
|
||||
Slug = None }
|
@ -6,9 +6,12 @@ open Giraffe.ViewEngine.Htmx
|
||||
open MyWebLog
|
||||
open MyWebLog.ViewModels
|
||||
|
||||
/// The administrator dashboard
|
||||
/// <summary>The administrator dashboard</summary>
|
||||
/// <param name="themes">The themes to display</param>
|
||||
/// <param name="app">The view context</param>
|
||||
/// <returns>The admin dashboard view</returns>
|
||||
let dashboard (themes: Theme list) app = [
|
||||
let templates = TemplateCache.allNames ()
|
||||
let templates = Template.Cache.allNames ()
|
||||
let cacheBaseUrl = relUrl app "admin/cache/"
|
||||
let webLogCacheUrl = $"{cacheBaseUrl}web-log/"
|
||||
let themeCacheUrl = $"{cacheBaseUrl}theme/"
|
||||
@ -55,7 +58,7 @@ let dashboard (themes: Theme list) app = [
|
||||
]
|
||||
div [ _class "row" ] [
|
||||
div [ _class "col-12 col-lg-6 pb-3" ] [
|
||||
div [ _class "card" ] [
|
||||
div [ _class "card" ] [
|
||||
header [ _class "card-header text-white bg-secondary" ] [ raw "Web Logs" ]
|
||||
div [ _class "card-body pb-0" ] [
|
||||
h6 [ _class "card-subtitle text-muted pb-3" ] [
|
||||
|
@ -1,7 +1,7 @@
|
||||
/// <summary>Helpers available for all myWebLog views</summary>
|
||||
[<AutoOpen>]
|
||||
module MyWebLog.Views.Helpers
|
||||
|
||||
open Microsoft.AspNetCore.Antiforgery
|
||||
open Giraffe.ViewEngine
|
||||
open Giraffe.ViewEngine.Accessibility
|
||||
open Giraffe.ViewEngine.Htmx
|
||||
@ -10,78 +10,35 @@ open MyWebLog.ViewModels
|
||||
open NodaTime
|
||||
open NodaTime.Text
|
||||
|
||||
/// The rendering context for this application
|
||||
[<NoComparison; NoEquality>]
|
||||
type AppViewContext = {
|
||||
/// The web log for this request
|
||||
WebLog: WebLog
|
||||
|
||||
/// The ID of the current user
|
||||
UserId: WebLogUserId option
|
||||
|
||||
/// The title of the page being rendered
|
||||
PageTitle: string
|
||||
|
||||
/// The anti-Cross Site Request Forgery (CSRF) token set to use when rendering a form
|
||||
Csrf: AntiforgeryTokenSet option
|
||||
|
||||
/// The page list for the web log
|
||||
PageList: DisplayPage array
|
||||
|
||||
/// Categories and post counts for the web log
|
||||
Categories: DisplayCategory array
|
||||
|
||||
/// The URL of the page being rendered
|
||||
CurrentPage: string
|
||||
|
||||
/// User messages
|
||||
Messages: UserMessage array
|
||||
|
||||
/// The generator string for the rendered page
|
||||
Generator: string
|
||||
|
||||
/// A string to load the minified htmx script
|
||||
HtmxScript: string
|
||||
|
||||
/// Whether the current user is an author
|
||||
IsAuthor: bool
|
||||
|
||||
/// Whether the current user is an editor (implies author)
|
||||
IsEditor: bool
|
||||
|
||||
/// Whether the current user is a web log administrator (implies author and editor)
|
||||
IsWebLogAdmin: bool
|
||||
|
||||
/// Whether the current user is an installation administrator (implies all web log rights)
|
||||
IsAdministrator: bool
|
||||
} with
|
||||
|
||||
/// Whether there is a user logged on
|
||||
member this.IsLoggedOn = Option.isSome this.UserId
|
||||
|
||||
|
||||
/// Create a relative URL for the current web log
|
||||
/// <summary>Create a relative URL for the current web log</summary>
|
||||
/// <param name="app">The app view context for the current view</param>
|
||||
/// <returns>A function that, given a string, will construct a relative URL</returns>
|
||||
let relUrl app =
|
||||
Permalink >> app.WebLog.RelativeUrl
|
||||
|
||||
/// Add a hidden input with the anti-Cross Site Request Forgery (CSRF) token
|
||||
/// <summary>Create a hidden input with the anti-Cross Site Request Forgery (CSRF) token</summary>
|
||||
/// <param name="app">The app view context for the current view</param>
|
||||
/// <returns>A hidden input with the CSRF token value</returns>
|
||||
let antiCsrf app =
|
||||
input [ _type "hidden"; _name app.Csrf.Value.FormFieldName; _value app.Csrf.Value.RequestToken ]
|
||||
|
||||
/// Shorthand for encoded text in a template
|
||||
/// <summary>Shorthand for encoded text in a template</summary>
|
||||
let txt = encodedText
|
||||
|
||||
/// Shorthand for raw text in a template
|
||||
/// <summary>Shorthand for raw text in a template</summary>
|
||||
let raw = rawText
|
||||
|
||||
/// Rel attribute to prevent opener information from being provided to the new window
|
||||
/// <summary><c>rel</c> attribute to prevent opener information from being provided to the new window</summary>
|
||||
let _relNoOpener = _rel "noopener"
|
||||
|
||||
/// The pattern for a long date
|
||||
/// <summary>The pattern for a long date</summary>
|
||||
let longDatePattern =
|
||||
ZonedDateTimePattern.CreateWithInvariantCulture("MMMM d, yyyy", DateTimeZoneProviders.Tzdb)
|
||||
|
||||
/// Create a long date
|
||||
/// <summary>Create a long date</summary>
|
||||
/// <param name="app">The app view context for the current view</param>
|
||||
/// <param name="instant">The instant from which a localized long date should be produced</param>
|
||||
/// <returns>A text node with the long date</returns>
|
||||
let longDate app (instant: Instant) =
|
||||
DateTimeZoneProviders.Tzdb[app.WebLog.TimeZone]
|
||||
|> Option.ofObj
|
||||
@ -89,11 +46,14 @@ let longDate app (instant: Instant) =
|
||||
|> Option.defaultValue "--"
|
||||
|> txt
|
||||
|
||||
/// The pattern for a short time
|
||||
/// <summary>The pattern for a short time</summary>
|
||||
let shortTimePattern =
|
||||
ZonedDateTimePattern.CreateWithInvariantCulture("h:mmtt", DateTimeZoneProviders.Tzdb)
|
||||
|
||||
/// Create a short time
|
||||
/// <summary>Create a short time</summary>
|
||||
/// <param name="app">The app view context for the current view</param>
|
||||
/// <param name="instant">The instant from which a localized short date should be produced</param>
|
||||
/// <returns>A text node with the short date</returns>
|
||||
let shortTime app (instant: Instant) =
|
||||
DateTimeZoneProviders.Tzdb[app.WebLog.TimeZone]
|
||||
|> Option.ofObj
|
||||
@ -101,11 +61,19 @@ let shortTime app (instant: Instant) =
|
||||
|> Option.defaultValue "--"
|
||||
|> txt
|
||||
|
||||
/// Display "Yes" or "No" based on the state of a boolean value
|
||||
/// <summary>Display "Yes" or "No" based on the state of a boolean value</summary>
|
||||
/// <param name="value">The true/false value</param>
|
||||
/// <returns>A text node with <c>Yes</c> if true, <c>No</c> if false</returns>
|
||||
let yesOrNo value =
|
||||
raw (if value then "Yes" else "No")
|
||||
|
||||
/// Extract an attribute value from a list of attributes, remove that attribute if it is found
|
||||
/// <summary>Extract an attribute value from a list of attributes, remove that attribute if it is found</summary>
|
||||
/// <param name="name">The name of the attribute to be extracted and removed</param>
|
||||
/// <param name="attrs">The list of attributes to be searched</param>
|
||||
/// <returns>
|
||||
/// A tuple with <c>fst</c> being <c>Some</c> with the attribute if found, <c>None</c> if not; and <c>snd</c>
|
||||
/// being the list of attributes with the extracted one removed
|
||||
/// </returns>
|
||||
let extractAttrValue name attrs =
|
||||
let valueAttr = attrs |> List.tryFind (fun x -> match x with KeyValue (key, _) when key = name -> true | _ -> false)
|
||||
match valueAttr with
|
||||
@ -113,8 +81,15 @@ let extractAttrValue name attrs =
|
||||
Some value,
|
||||
attrs |> List.filter (fun x -> match x with KeyValue (key, _) when key = name -> false | _ -> true)
|
||||
| Some _ | None -> None, attrs
|
||||
|
||||
/// Create a text input field
|
||||
|
||||
/// <summary>Create a text input field</summary>
|
||||
/// <param name="fieldType">The <c>input</c> field type</param>
|
||||
/// <param name="attrs">Attributes for the field</param>
|
||||
/// <param name="name">The name of the input field</param>
|
||||
/// <param name="labelText">The text of the <c>label</c> element associated with this <c>input</c></param>
|
||||
/// <param name="value">The value of the <c>input</c> field</param>
|
||||
/// <param name="extra">Any extra elements to include after the <c>input</c> and <c>label</c></param>
|
||||
/// <returns>A <c>div</c> element with the <c>input</c> field constructed</returns>
|
||||
let inputField fieldType attrs name labelText value extra =
|
||||
let fieldId, attrs = extractAttrValue "id" attrs
|
||||
let cssClass, attrs = extractAttrValue "class" attrs
|
||||
@ -127,23 +102,58 @@ let inputField fieldType attrs name labelText value extra =
|
||||
yield! extra
|
||||
]
|
||||
|
||||
/// Create a text input field
|
||||
/// <summary>Create a text input field</summary>
|
||||
/// <param name="attrs">Attributes for the field</param>
|
||||
/// <param name="name">The name of the input field</param>
|
||||
/// <param name="labelText">The text of the <c>label</c> element associated with this <c>input</c></param>
|
||||
/// <param name="value">The value of the <c>input</c> field</param>
|
||||
/// <param name="extra">Any extra elements to include after the <c>input</c> and <c>label</c></param>
|
||||
/// <returns>A <c>div</c> element with the <input type=text> field constructed</returns>
|
||||
let textField attrs name labelText value extra =
|
||||
inputField "text" attrs name labelText value extra
|
||||
|
||||
/// Create a number input field
|
||||
/// <summary>Create a number input field</summary>
|
||||
/// <param name="attrs">Attributes for the field</param>
|
||||
/// <param name="name">The name of the input field</param>
|
||||
/// <param name="labelText">The text of the <c>label</c> element associated with this <c>input</c></param>
|
||||
/// <param name="value">The value of the <c>input</c> field</param>
|
||||
/// <param name="extra">Any extra elements to include after the <c>input</c> and <c>label</c></param>
|
||||
/// <returns>A <c>div</c> element with the <input type=number> field constructed</returns>
|
||||
let numberField attrs name labelText value extra =
|
||||
inputField "number" attrs name labelText value extra
|
||||
|
||||
/// Create an e-mail input field
|
||||
/// <summary>Create an e-mail input field</summary>
|
||||
/// <param name="attrs">Attributes for the field</param>
|
||||
/// <param name="name">The name of the input field</param>
|
||||
/// <param name="labelText">The text of the <c>label</c> element associated with this <c>input</c></param>
|
||||
/// <param name="value">The value of the <c>input</c> field</param>
|
||||
/// <param name="extra">Any extra elements to include after the <c>input</c> and <c>label</c></param>
|
||||
/// <returns>A <c>div</c> element with the <input type=email> field constructed</returns>
|
||||
let emailField attrs name labelText value extra =
|
||||
inputField "email" attrs name labelText value extra
|
||||
|
||||
/// Create a password input field
|
||||
/// <summary>Create a password input field</summary>
|
||||
/// <param name="attrs">Attributes for the field</param>
|
||||
/// <param name="name">The name of the input field</param>
|
||||
/// <param name="labelText">The text of the <c>label</c> element associated with this <c>input</c></param>
|
||||
/// <param name="value">The value of the <c>input</c> field</param>
|
||||
/// <param name="extra">Any extra elements to include after the <c>input</c> and <c>label</c></param>
|
||||
/// <returns>A <c>div</c> element with the <input type=password> field constructed</returns>
|
||||
let passwordField attrs name labelText value extra =
|
||||
inputField "password" attrs name labelText value extra
|
||||
|
||||
/// Create a select (dropdown) field
|
||||
/// <summary>Create a select (dropdown) field</summary>
|
||||
/// <typeparam name="T">The type of value in the backing list</typeparam>
|
||||
/// <typeparam name="a">The type of the <c>value</c> attribute</typeparam>
|
||||
/// <param name="attrs">Attributes for the field</param>
|
||||
/// <param name="name">The name of the input field</param>
|
||||
/// <param name="labelText">The text of the <c>label</c> element associated with this <c>input</c></param>
|
||||
/// <param name="value">The value of the <c>input</c> field</param>
|
||||
/// <param name="values">The backing list for this dropdown</param>
|
||||
/// <param name="idFunc">The function to extract the ID (<c>value</c> attribute)</param>
|
||||
/// <param name="displayFunc">The function to extract the displayed version of the item</param>
|
||||
/// <param name="extra">Any extra elements to include after the <c>input</c> and <c>label</c></param>
|
||||
/// <returns>A <c>div</c> element with the <select> field constructed</returns>
|
||||
let selectField<'T, 'a>
|
||||
attrs name labelText value (values: 'T seq) (idFunc: 'T -> 'a) (displayFunc: 'T -> string) extra =
|
||||
let cssClass, attrs = extractAttrValue "class" attrs
|
||||
@ -157,7 +167,13 @@ let selectField<'T, 'a>
|
||||
yield! extra
|
||||
]
|
||||
|
||||
/// Create a checkbox input styled as a switch
|
||||
/// <summary>Create a checkbox input styled as a switch</summary>
|
||||
/// <param name="attrs">Attributes for the field</param>
|
||||
/// <param name="name">The name of the input field</param>
|
||||
/// <param name="labelText">The text of the <c>label</c> element associated with this <c>input</c></param>
|
||||
/// <param name="value">Whether the checkbox should be checked or not</param>
|
||||
/// <param name="extra">Any extra elements to include after the <c>input</c> and <c>label</c></param>
|
||||
/// <returns>A <c>div</c> element with the switch-style <input type=checkbox> field constructed</returns>
|
||||
let checkboxSwitch attrs name labelText (value: bool) extra =
|
||||
let cssClass, attrs = extractAttrValue "class" attrs
|
||||
div [ _class $"""form-check form-switch {defaultArg cssClass ""}""" ] [
|
||||
@ -168,17 +184,17 @@ let checkboxSwitch attrs name labelText (value: bool) extra =
|
||||
yield! extra
|
||||
]
|
||||
|
||||
/// A standard save button
|
||||
/// <summary>A standard save button</summary>
|
||||
let saveButton =
|
||||
button [ _type "submit"; _class "btn btn-sm btn-primary" ] [ raw "Save Changes" ]
|
||||
|
||||
/// A spacer bullet to use between action links
|
||||
/// <summary>A spacer bullet to use between action links</summary>
|
||||
let actionSpacer =
|
||||
span [ _class "text-muted" ] [ raw " • " ]
|
||||
|
||||
/// Functions for generating content in varying layouts
|
||||
/// <summary>Functions for generating content in varying layouts</summary>
|
||||
module Layout =
|
||||
|
||||
|
||||
/// Generate the title tag for a page
|
||||
let private titleTag (app: AppViewContext) =
|
||||
title [] [ txt app.PageTitle; raw " « Admin « "; txt app.WebLog.Name ]
|
||||
@ -272,15 +288,21 @@ module Layout =
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
/// Render a page with a partial layout (htmx request)
|
||||
|
||||
/// <summary>Render a page with a partial layout (htmx request)</summary>
|
||||
/// <param name="content">A function that, when given a view context, will return a view</param>
|
||||
/// <param name="app">The app view context to use when rendering the view</param>
|
||||
/// <returns>A constructed Giraffe View Engine view</returns>
|
||||
let partial content app =
|
||||
html [ _lang "en" ] [
|
||||
titleTag app
|
||||
yield! pageView content app
|
||||
]
|
||||
|
||||
/// Render a page with a full layout
|
||||
|
||||
/// <summary>Render a page with a full layout</summary>
|
||||
/// <param name="content">A function that, when given a view context, will return a view</param>
|
||||
/// <param name="app">The app view context to use when rendering the view</param>
|
||||
/// <returns>A constructed Giraffe View Engine view</returns>
|
||||
let full content app =
|
||||
html [ _lang "en" ] [
|
||||
meta [ _name "viewport"; _content "width=device-width, initial-scale=1" ]
|
||||
@ -295,12 +317,15 @@ module Layout =
|
||||
script [ _src "https://cdn.jsdelivr.net/npm/bootstrap@5.1.3/dist/js/bootstrap.bundle.min.js"
|
||||
_integrity "sha384-ka7Sk0Gln4gmtz2MlQnikT1wXgYsOg+OMhuP+IlRH9sENBO0LRn5q+8nbTov4+1p"
|
||||
_crossorigin "anonymous" ] []
|
||||
Script.minified
|
||||
script [ _src (relUrl app "htmx.min.js") ] []
|
||||
script [ _src (relUrl app "themes/admin/admin.js") ] []
|
||||
]
|
||||
]
|
||||
|
||||
/// Render a bare layout
|
||||
|
||||
/// <summary>Render a bare layout</summary>
|
||||
/// <param name="content">A function that, when given a view context, will return a view</param>
|
||||
/// <param name="app">The app view context to use when rendering the view</param>
|
||||
/// <returns>A constructed Giraffe View Engine view</returns>
|
||||
let bare (content: AppViewContext -> XmlNode list) app =
|
||||
html [ _lang "en" ] [
|
||||
title [] []
|
||||
@ -311,14 +336,17 @@ module Layout =
|
||||
// ~~ SHARED TEMPLATES BETWEEN POSTS AND PAGES
|
||||
open Giraffe.Htmx.Common
|
||||
|
||||
/// The round-trip instant pattern
|
||||
/// <summary>The round-trip instant pattern</summary>
|
||||
let roundTrip = InstantPattern.CreateWithInvariantCulture "uuuu'-'MM'-'dd'T'HH':'mm':'ss'.'fffffff"
|
||||
|
||||
/// Capitalize the first letter in the given string
|
||||
let private capitalize (it: string) =
|
||||
$"{(string it[0]).ToUpper()}{it[1..]}"
|
||||
|
||||
/// The common edit form shared by pages and posts
|
||||
/// <summary>The common edit form shared by pages and posts</summary>
|
||||
/// <param name="model">The model to use to render this view</param>
|
||||
/// <param name="app">The app view context to use to render this view</param>
|
||||
/// <returns>A common edit view</returns>
|
||||
let commonEdit (model: EditCommonModel) app = [
|
||||
textField [ _class "mb-3"; _required; _autofocus ] (nameof model.Title) "Title" model.Title []
|
||||
textField [ _class "mb-3"; _required ] (nameof model.Permalink) "Permalink" model.Permalink [
|
||||
@ -352,26 +380,201 @@ let commonEdit (model: EditCommonModel) app = [
|
||||
]
|
||||
|
||||
|
||||
/// Display a common template list
|
||||
/// <summary>Display a common template list</summary>
|
||||
/// <param name="model">The edit model</param>
|
||||
/// <param name="templates">A list of available templates for this page or post</param>
|
||||
/// <returns>A <select> element to allow a template to be selected</returns>
|
||||
let commonTemplates (model: EditCommonModel) (templates: MetaItem seq) =
|
||||
selectField [ _class "mb-3" ] (nameof model.Template) $"{capitalize model.Entity} Template" model.Template templates
|
||||
(_.Name) (_.Value) []
|
||||
_.Name _.Value []
|
||||
|
||||
|
||||
/// Display the metadata item edit form
|
||||
/// <summary>Display the OpenGraph data edit form</summary>
|
||||
/// <param name="model">The edit model</param>
|
||||
/// <returns>Fields for editing OpenGraph data for a page or post</returns>
|
||||
let commonOpenGraph (model: EditCommonModel) =
|
||||
fieldset [ _class "mb-3" ] [
|
||||
legend [] [
|
||||
span [ _class "form-check form-switch" ] [
|
||||
small [] [
|
||||
input [ _type "checkbox"; _name (nameof model.AssignOpenGraph)
|
||||
_id (nameof model.AssignOpenGraph); _class "form-check-input"; _value "true"
|
||||
_data "bs-toggle" "collapse"; _data "bs-target" "#og_props"
|
||||
_onclick "Admin.toggleOpenGraphFields()"; if model.AssignOpenGraph then _checked ]
|
||||
]
|
||||
label [ _for (nameof model.AssignOpenGraph) ] [ raw "OpenGraph Properties" ]
|
||||
]
|
||||
]
|
||||
div [ _id "og_props"; _class $"""container p-0 collapse{if model.AssignOpenGraph then " show" else ""}""" ] [
|
||||
fieldset [ _id "og_item" ] [
|
||||
legend [] [ raw "Item Details" ]
|
||||
div [ _class "row p-0" ] [
|
||||
div [ _class "mb-3 col-xs-6 col-md-3" ] [
|
||||
selectField [ _required ] (nameof model.OpenGraphType) "Type" model.OpenGraphType
|
||||
OpenGraphType.Selections fst snd []
|
||||
]
|
||||
div [ _class "mb-3 col-xs-6 col-md-3" ] [
|
||||
textField [] (nameof model.OpenGraphLocale) "Locale" model.OpenGraphLocale
|
||||
[ span [ _class "form-text" ] [ raw "ex. en-US" ] ]
|
||||
]
|
||||
div [ _class "mb-3 col-xs-6 col-md-4" ] [
|
||||
textField [] (nameof model.OpenGraphAlternateLocales) "Alternate Locales"
|
||||
model.OpenGraphAlternateLocales
|
||||
[ span [ _class "form-text" ] [ raw "comma separated" ] ]
|
||||
]
|
||||
div [ _class "mb-3 col-xs-6 col-md-2" ] [
|
||||
textField [] (nameof model.OpenGraphDeterminer) "Determiner" model.OpenGraphDeterminer
|
||||
[ span [ _class "form-text" ] [ raw "a/an/the"; br []; raw "(blank = auto)" ] ]
|
||||
]
|
||||
div [ _class "mb-3 col-12" ] [
|
||||
textField [] (nameof model.OpenGraphDescription) "Short Description" model.OpenGraphDescription
|
||||
[]
|
||||
]
|
||||
]
|
||||
]
|
||||
fieldset [ _id "og_image" ] [
|
||||
legend [] [ raw "Image" ]
|
||||
div [ _class "row p-0" ] [
|
||||
let syncJS = $"Admin.pathOrFile('{nameof model.OpenGraphImageUrl}', 'OpenGraphImageFile')"
|
||||
div [ _class "mb-3 col-12" ] [
|
||||
textField [ _onkeyup syncJS ] (nameof model.OpenGraphImageUrl) "Existing Image URL"
|
||||
model.OpenGraphImageUrl []
|
||||
]
|
||||
div [ _class "mb-3 col-12" ] [
|
||||
if model.OpenGraphImageUrl = "" then
|
||||
div [ _class "form-floating" ] [
|
||||
input [ _type "file"; _id "OpenGraphImageFile"; _name "OpenGraphImageFile"
|
||||
_class "form-control"; _accept "image/*"; _oninput syncJS ]
|
||||
label [ _for "OpenGraphImageFile" ] [ raw "Upload Image File" ]
|
||||
]
|
||||
else
|
||||
input [ _type "hidden"; _id "OpenGraphImageFile"; _name "OpenGraphImageFile" ]
|
||||
]
|
||||
div [ _class "mb-3 col-12" ] [
|
||||
textField [] (nameof model.OpenGraphImageAlt) "Alternate Text" model.OpenGraphImageAlt []
|
||||
]
|
||||
div [ _class "mb-3 col-6" ] [
|
||||
textField [] (nameof model.OpenGraphImageType) "MIME Type" model.OpenGraphImageType
|
||||
[ span [ _class "form-text" ] [ raw "Leave blank to derive type from extension" ] ]
|
||||
]
|
||||
div [ _class "mb-3 col-3" ] [
|
||||
numberField [] (nameof model.OpenGraphImageWidth) "Width" model.OpenGraphImageWidth
|
||||
[ span [ _class "form-text" ] [ raw "px" ] ]
|
||||
]
|
||||
div [ _class "mb-3 col-3" ] [
|
||||
numberField [] (nameof model.OpenGraphImageHeight) "Height" model.OpenGraphImageHeight
|
||||
[ span [ _class "form-text" ] [ raw "px" ] ]
|
||||
]
|
||||
]
|
||||
]
|
||||
fieldset [ _id "og_audio" ] [
|
||||
legend [] [ raw "Audio" ]
|
||||
div [ _class "row p-0" ] [
|
||||
let syncJS = $"Admin.pathOrFile('{nameof model.OpenGraphAudioUrl}', 'OpenGraphAudioFile')"
|
||||
div [ _class "mb-3 col-12" ] [
|
||||
textField [ _onkeyup syncJS ] (nameof model.OpenGraphAudioUrl) "Existing Audio URL"
|
||||
model.OpenGraphAudioUrl []
|
||||
]
|
||||
div [ _class "mb-3 col-12" ] [
|
||||
if model.OpenGraphAudioUrl = "" then
|
||||
div [ _class "form-floating" ] [
|
||||
input [ _type "file"; _id "OpenGraphAudioFile"; _name "OpenGraphAudioFile"
|
||||
_class "form-control"; _accept "image/*"; _oninput syncJS ]
|
||||
label [ _for "OpenGraphAudioFile" ] [ raw "Upload Audio File" ]
|
||||
]
|
||||
else
|
||||
input [ _type "hidden"; _id "OpenGraphAudioFile"; _name "OpenGraphAudioFile" ]
|
||||
]
|
||||
div [ _class "mb-3 col-6" ] [
|
||||
textField [] (nameof model.OpenGraphAudioType) "MIME Type" model.OpenGraphAudioType
|
||||
[ span [ _class "form-text" ] [ raw "Leave blank to derive type from extension" ] ]
|
||||
]
|
||||
]
|
||||
]
|
||||
fieldset [ _id "og_video" ] [
|
||||
legend [] [ raw "Video" ]
|
||||
div [ _class "row p-0" ] [
|
||||
let syncJS = $"Admin.pathOrFile('{nameof model.OpenGraphVideoUrl}', 'OpenGraphVideoFile')"
|
||||
div [ _class "mb-3 col-12" ] [
|
||||
textField [ _onkeyup syncJS ] (nameof model.OpenGraphVideoUrl) "Existing Video URL"
|
||||
model.OpenGraphVideoUrl []
|
||||
]
|
||||
div [ _class "mb-3 col-12" ] [
|
||||
if model.OpenGraphVideoUrl = "" then
|
||||
div [ _class "form-floating" ] [
|
||||
input [ _type "file"; _id "OpenGraphVideoFile"; _name "OpenGraphVideoFile"
|
||||
_class "form-control"; _accept "image/*"; _oninput syncJS ]
|
||||
label [ _for "OpenGraphVideoFile" ] [ raw "Upload Video File" ]
|
||||
]
|
||||
else
|
||||
input [ _type "hidden"; _id "OpenGraphVideoFile"; _name "OpenGraphVideoFile" ]
|
||||
]
|
||||
div [ _class "mb-3 col-6" ] [
|
||||
textField [] (nameof model.OpenGraphVideoType) "MIME Type" model.OpenGraphVideoType
|
||||
[ span [ _class "form-text" ] [ raw "Leave blank to derive type from extension" ] ]
|
||||
]
|
||||
div [ _class "mb-3 col-3" ] [
|
||||
numberField [] (nameof model.OpenGraphVideoWidth) "Width" model.OpenGraphVideoWidth
|
||||
[ span [ _class "form-text" ] [ raw "px" ] ]
|
||||
]
|
||||
div [ _class "mb-3 col-3" ] [
|
||||
numberField [] (nameof model.OpenGraphVideoHeight) "Height" model.OpenGraphVideoHeight
|
||||
[ span [ _class "form-text" ] [ raw "px" ] ]
|
||||
]
|
||||
]
|
||||
]
|
||||
fieldset [] [
|
||||
legend [] [ raw "Extra Properties" ]
|
||||
let items = Array.zip model.OpenGraphExtraNames model.OpenGraphExtraValues
|
||||
let extraDetail idx (name, value) =
|
||||
div [ _id $"og_extra_%i{idx}"; _class "row mb-3" ] [
|
||||
div [ _class "col-1 text-center align-self-center" ] [
|
||||
button [ _type "button"; _class "btn btn-sm btn-danger"
|
||||
_onclick $"Admin.removeMetaItem('og_extra', {idx})" ] [
|
||||
raw "−"
|
||||
]
|
||||
]
|
||||
div [ _class "col-3" ] [
|
||||
textField [ _id $"{nameof model.OpenGraphExtraNames}_{idx}" ]
|
||||
(nameof model.OpenGraphExtraNames) "Name" name []
|
||||
]
|
||||
div [ _class "col-8" ] [
|
||||
textField [ _id $"{nameof model.OpenGraphExtraValues}_{idx}" ]
|
||||
(nameof model.OpenGraphExtraValues) "Value" value []
|
||||
]
|
||||
]
|
||||
div [] [
|
||||
div [ _id "og_extra_items"; _class "container p-0" ] (items |> Array.mapi extraDetail |> List.ofArray)
|
||||
button [ _type "button"; _class "btn btn-sm btn-secondary"
|
||||
_onclick "Admin.addMetaItem('og_extra')" ] [
|
||||
raw "Add an Extra Property"
|
||||
]
|
||||
script [] [
|
||||
raw """document.addEventListener("DOMContentLoaded", """
|
||||
raw $"() => Admin.setNextExtraIndex({items.Length}))"
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
/// <summary>Display the metadata item edit form</summary>
|
||||
/// <param name="model">The edit model</param>
|
||||
/// <returns>A form for editing metadata</returns>
|
||||
let commonMetaItems (model: EditCommonModel) =
|
||||
let items = Array.zip model.MetaNames model.MetaValues
|
||||
let metaDetail idx (name, value) =
|
||||
div [ _id $"meta_%i{idx}"; _class "row mb-3" ] [
|
||||
div [ _class "col-1 text-center align-self-center" ] [
|
||||
button [ _type "button"; _class "btn btn-sm btn-danger"; _onclick $"Admin.removeMetaItem({idx})" ] [
|
||||
button [ _type "button"; _class "btn btn-sm btn-danger"
|
||||
_onclick $"Admin.removeMetaItem('meta', {idx})" ] [
|
||||
raw "−"
|
||||
]
|
||||
]
|
||||
div [ _class "col-3" ] [ textField [ _id $"MetaNames_{idx}" ] (nameof model.MetaNames) "Name" name [] ]
|
||||
div [ _class "col-8" ] [ textField [ _id $"MetaValues_{idx}" ] (nameof model.MetaValues) "Value" value [] ]
|
||||
]
|
||||
|
||||
|
||||
fieldset [] [
|
||||
legend [] [
|
||||
raw "Metadata "
|
||||
@ -382,7 +585,7 @@ let commonMetaItems (model: EditCommonModel) =
|
||||
]
|
||||
div [ _id "meta_item_container"; _class "collapse" ] [
|
||||
div [ _id "meta_items"; _class "container" ] (items |> Array.mapi metaDetail |> List.ofArray)
|
||||
button [ _type "button"; _class "btn btn-sm btn-secondary"; _onclick "Admin.addMetaItem()" ] [
|
||||
button [ _type "button"; _class "btn btn-sm btn-secondary"; _onclick "Admin.addMetaItem('meta')" ] [
|
||||
raw "Add an Item"
|
||||
]
|
||||
script [] [
|
||||
@ -393,7 +596,10 @@ let commonMetaItems (model: EditCommonModel) =
|
||||
]
|
||||
|
||||
|
||||
/// Revision preview template
|
||||
/// <summary>Revision preview template</summary>
|
||||
/// <param name="rev">The revision to preview</param>
|
||||
/// <param name="app">The app view context to use when rendering the preview</param>
|
||||
/// <returns>A view with a revision preview</returns>
|
||||
let commonPreview (rev: Revision) app =
|
||||
div [ _class "mwl-revision-preview mb-3" ] [
|
||||
rev.Text.AsHtml() |> addBaseToRelativeUrls app.WebLog.ExtraPath |> raw
|
||||
@ -401,7 +607,10 @@ let commonPreview (rev: Revision) app =
|
||||
|> List.singleton
|
||||
|
||||
|
||||
/// Form to manage permalinks for pages or posts
|
||||
/// <summary>Form to manage permalinks for pages or posts</summary>
|
||||
/// <param name="model">The manage permalinks model to be rendered</param>
|
||||
/// <param name="app">The app view context to use when rendering this view</param>
|
||||
/// <returns>A view for managing permalinks for a page or post</returns>
|
||||
let managePermalinks (model: ManagePermalinksModel) app = [
|
||||
let baseUrl = relUrl app $"admin/{model.Entity}/"
|
||||
let linkDetail idx link =
|
||||
@ -465,7 +674,10 @@ let managePermalinks (model: ManagePermalinksModel) app = [
|
||||
]
|
||||
]
|
||||
|
||||
/// Form to manage revisions for pages or posts
|
||||
/// <summary>Form to manage revisions for pages or posts</summary>
|
||||
/// <param name="model">The manage revisions model to be rendered</param>
|
||||
/// <param name="app">The app view context to use when rendering this view</param>
|
||||
/// <returns>A view for managing revisions for a page or post</returns>
|
||||
let manageRevisions (model: ManageRevisionsModel) app = [
|
||||
let revUrlBase = relUrl app $"admin/{model.Entity}/{model.Id}/revision"
|
||||
let revDetail idx (rev: Revision) =
|
||||
@ -485,7 +697,7 @@ let manageRevisions (model: ManageRevisionsModel) app = [
|
||||
span [ _class "text-muted" ] [ raw " • " ]
|
||||
a [ _href revRestore; _hxPost revRestore ] [ raw "Restore as Current" ]
|
||||
span [ _class "text-muted" ] [ raw " • " ]
|
||||
a [ _href revUrlPrefix; _hxDelete revUrlPrefix; _hxTarget $"#{asOfId}"
|
||||
a [ _href revUrlPrefix; _hxDelete revUrlPrefix; _hxTarget $"#{asOfId}"; _hxPushUrl "false"
|
||||
_hxSwap HxSwap.OuterHtml; _class "text-danger" ] [
|
||||
raw "Delete"
|
||||
]
|
||||
|
@ -29,7 +29,7 @@ let pageEdit (model: EditPageModel) templates app = [
|
||||
/// Display a list of pages for this web log
|
||||
let pageList (pages: DisplayPage list) pageNbr hasNext app = [
|
||||
h2 [ _class "my-3" ] [ raw app.PageTitle ]
|
||||
article [] [
|
||||
article [ _class "container mb-3" ] [
|
||||
a [ _href (relUrl app "admin/page/new/edit"); _class "btn btn-primary btn-sm mb-3" ] [ raw "Create a New Page" ]
|
||||
if pages.Length = 0 then
|
||||
p [ _class "text-muted fst-italic text-center" ] [ raw "This web log has no pages" ]
|
||||
@ -37,50 +37,47 @@ let pageList (pages: DisplayPage list) pageNbr hasNext app = [
|
||||
let titleCol = "col-12 col-md-5"
|
||||
let linkCol = "col-12 col-md-5"
|
||||
let upd8Col = "col-12 col-md-2"
|
||||
form [ _method "post"; _class "container mb-3"; _hxTarget "body" ] [
|
||||
antiCsrf app
|
||||
div [ _class "row mwl-table-heading" ] [
|
||||
div [ _class titleCol ] [
|
||||
span [ _class "d-none d-md-inline" ] [ raw "Title" ]; span [ _class "d-md-none" ] [ raw "Page" ]
|
||||
]
|
||||
div [ _class $"{linkCol} d-none d-md-inline-block" ] [ raw "Permalink" ]
|
||||
div [ _class $"{upd8Col} d-none d-md-inline-block" ] [ raw "Updated" ]
|
||||
div [ _class "row mwl-table-heading" ] [
|
||||
div [ _class titleCol ] [
|
||||
span [ _class "d-none d-md-inline" ] [ raw "Title" ]; span [ _class "d-md-none" ] [ raw "Page" ]
|
||||
]
|
||||
for pg in pages do
|
||||
let pageLink = if pg.IsDefault then "" else pg.Permalink
|
||||
div [ _class "row mwl-table-detail" ] [
|
||||
div [ _class titleCol ] [
|
||||
txt pg.Title
|
||||
if pg.IsDefault then
|
||||
raw " "; span [ _class "badge bg-success" ] [ raw "HOME PAGE" ]
|
||||
if pg.IsInPageList then
|
||||
raw " "; span [ _class "badge bg-primary" ] [ raw "IN PAGE LIST" ]
|
||||
br [] ; small [] [
|
||||
let adminUrl = relUrl app $"admin/page/{pg.Id}"
|
||||
a [ _href (relUrl app pageLink); _target "_blank" ] [ raw "View Page" ]
|
||||
if app.IsEditor || (app.IsAuthor && app.UserId.Value = WebLogUserId pg.AuthorId) then
|
||||
span [ _class "text-muted" ] [ raw " • " ]
|
||||
a [ _href $"{adminUrl}/edit" ] [ raw "Edit" ]
|
||||
if app.IsWebLogAdmin then
|
||||
span [ _class "text-muted" ] [ raw " • " ]
|
||||
a [ _href adminUrl; _hxDelete adminUrl; _class "text-danger"
|
||||
_hxConfirm $"Are you sure you want to delete the page “{pg.Title}”? This action cannot be undone." ] [
|
||||
raw "Delete"
|
||||
]
|
||||
]
|
||||
]
|
||||
div [ _class linkCol ] [
|
||||
small [ _class "d-md-none" ] [ txt pageLink ]
|
||||
span [ _class "d-none d-md-inline" ] [ txt pageLink ]
|
||||
]
|
||||
div [ _class upd8Col ] [
|
||||
small [ _class "d-md-none text-muted" ] [
|
||||
raw "Updated "; txt (pg.UpdatedOn.ToString "MMMM d, yyyy")
|
||||
]
|
||||
span [ _class "d-none d-md-inline" ] [ txt (pg.UpdatedOn.ToString "MMMM d, yyyy") ]
|
||||
div [ _class $"{linkCol} d-none d-md-inline-block" ] [ raw "Permalink" ]
|
||||
div [ _class $"{upd8Col} d-none d-md-inline-block" ] [ raw "Updated" ]
|
||||
]
|
||||
for pg in pages do
|
||||
let pageLink = if pg.IsDefault then "" else pg.Permalink
|
||||
div [ _class "row mwl-table-detail" ] [
|
||||
div [ _class titleCol ] [
|
||||
txt pg.Title
|
||||
if pg.IsDefault then
|
||||
raw " "; span [ _class "badge bg-success" ] [ raw "HOME PAGE" ]
|
||||
if pg.IsInPageList then
|
||||
raw " "; span [ _class "badge bg-primary" ] [ raw "IN PAGE LIST" ]
|
||||
br [] ; small [] [
|
||||
let adminUrl = relUrl app $"admin/page/{pg.Id}"
|
||||
a [ _href (relUrl app pageLink); _target "_blank" ] [ raw "View Page" ]
|
||||
if app.IsEditor || (app.IsAuthor && app.UserId.Value = WebLogUserId pg.AuthorId) then
|
||||
span [ _class "text-muted" ] [ raw " • " ]
|
||||
a [ _href $"{adminUrl}/edit" ] [ raw "Edit" ]
|
||||
if app.IsWebLogAdmin then
|
||||
span [ _class "text-muted" ] [ raw " • " ]
|
||||
a [ _href adminUrl; _hxDelete adminUrl; _hxTarget "body"; _class "text-danger"
|
||||
_hxConfirm $"Are you sure you want to delete the page “{pg.Title}”? This action cannot be undone." ] [
|
||||
raw "Delete"
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
div [ _class linkCol ] [
|
||||
small [ _class "d-md-none" ] [ txt pageLink ]
|
||||
span [ _class "d-none d-md-inline" ] [ txt pageLink ]
|
||||
]
|
||||
div [ _class upd8Col ] [
|
||||
small [ _class "d-md-none text-muted" ] [
|
||||
raw "Updated "; txt (pg.UpdatedOn.ToString "MMMM d, yyyy")
|
||||
]
|
||||
span [ _class "d-none d-md-inline" ] [ txt (pg.UpdatedOn.ToString "MMMM d, yyyy") ]
|
||||
]
|
||||
]
|
||||
if pageNbr > 1 || hasNext then
|
||||
div [ _class "d-flex justify-content-evenly mb-3" ] [
|
||||
div [] [
|
||||
|
@ -77,7 +77,7 @@ let chapterEdit (model: EditChapterModel) app = [
|
||||
em [ _class "form-text" ] [
|
||||
raw "Optional; "
|
||||
a [ _href "https://www.openstreetmap.org/"; _target "_blank"; _relNoOpener ] [ raw "get ID" ]
|
||||
raw ", "
|
||||
raw ", "
|
||||
a [ _href "https://github.com/Podcastindex-org/podcast-namespace/blob/main/location/location.md#osm-recommended"
|
||||
_target "_blank"; _relNoOpener ] [
|
||||
raw "see spec"
|
||||
@ -102,9 +102,7 @@ let chapterEdit (model: EditChapterModel) app = [
|
||||
|
||||
/// Display a list of chapters
|
||||
let chapterList withNew (model: ManageChaptersModel) app =
|
||||
form [ _method "post"; _id "chapter_list"; _class "container mb-3"; _hxTarget "this"; _hxSwap HxSwap.OuterHtml ] [
|
||||
antiCsrf app
|
||||
input [ _type "hidden"; _name "Id"; _value model.Id ]
|
||||
div [ _id "chapter_list"; _class "container mb-3"; _hxTarget "this"; _hxSwap HxSwap.OuterHtml ] [
|
||||
div [ _class "row mwl-table-heading" ] [
|
||||
div [ _class "col-3 col-md-2" ] [ raw "Start" ]
|
||||
div [ _class "col-3 col-md-6 col-lg-8" ] [ raw "Title" ]
|
||||
@ -293,6 +291,7 @@ let postEdit (model: EditPostModel) templates (ratings: MetaItem list) app = [
|
||||
checkboxSwitch [ _class "mb-2" ] (nameof model.DoPublish) "Publish This Post" model.DoPublish []
|
||||
saveButton
|
||||
hr [ _class "mb-3" ]
|
||||
commonOpenGraph model
|
||||
fieldset [ _class "mb-3" ] [
|
||||
legend [] [
|
||||
span [ _class "form-check form-switch" ] [
|
||||
@ -355,7 +354,7 @@ let postEdit (model: EditPostModel) templates (ratings: MetaItem list) app = [
|
||||
]
|
||||
div [ _class "col-12 col-md-4 pb-3" ] [
|
||||
selectField [] (nameof model.Explicit) "Explicit Rating" model.Explicit ratings
|
||||
(_.Name) (_.Value) [
|
||||
_.Name _.Value [
|
||||
div [ _class "form-text" ] [ raw "Optional; overrides podcast default" ]
|
||||
]
|
||||
]
|
||||
|
@ -144,7 +144,7 @@ let userList (model: WebLogUser list) app =
|
||||
]
|
||||
if app.UserId.Value <> user.Id then
|
||||
span [ _class "text-muted" ] [ raw " • " ]
|
||||
a [ _href userUrl; _hxDelete userUrl; _class "text-danger"
|
||||
a [ _href userUrl; _hxDelete userUrl; _hxPushUrl "false"; _class "text-danger"
|
||||
_hxConfirm $"Are you sure you want to delete the user “{user.PreferredName}”? This action cannot be undone. (This action will not succeed if the user has authored any posts or pages.)" ] [
|
||||
raw "Delete"
|
||||
]
|
||||
@ -186,11 +186,8 @@ let userList (model: WebLogUser list) app =
|
||||
div [ _class "container g-0" ] [
|
||||
div [ _class "row mwl-table-detail"; _id "user_new" ] []
|
||||
]
|
||||
form [ _method "post"; _class "container g-0"; _hxTarget "#user_panel"
|
||||
_hxSwap $"{HxSwap.OuterHtml} show:window:top" ] [
|
||||
antiCsrf app
|
||||
yield! List.map userDetail model
|
||||
]
|
||||
List.map userDetail model
|
||||
|> div [ _class "container g-0"; _hxTarget "#user_panel"; _hxSwap $"{HxSwap.OuterHtml} show:window:top" ]
|
||||
]
|
||||
]
|
||||
|> List.singleton
|
||||
|
@ -33,7 +33,7 @@ let categoryEdit (model: EditCategoryModel) app =
|
||||
|> String.concat ""
|
||||
{ Name = c.Id; Value = $"{parents}{c.Name}" })
|
||||
|> Seq.append [ { Name = ""; Value = "– None –" } ]
|
||||
selectField [] (nameof model.ParentId) "Parent Category" model.ParentId cats (_.Name) (_.Value) []
|
||||
selectField [] (nameof model.ParentId) "Parent Category" model.ParentId cats _.Name _.Value []
|
||||
]
|
||||
div [ _class "col-12 col-xl-10 offset-xl-1 mb-3" ] [
|
||||
textField [] (nameof model.Description) "Description" model.Description []
|
||||
@ -107,9 +107,6 @@ let categoryList includeNew app = [
|
||||
div [ _class catCol ] [ raw "Category"; span [ _class "d-md-none" ] [ raw "; Description" ] ]
|
||||
div [ _class $"{descCol} d-none d-md-inline-block" ] [ raw "Description" ]
|
||||
]
|
||||
]
|
||||
form [ _method "post"; _class "container" ] [
|
||||
antiCsrf app
|
||||
div [ _class "row mwl-table-detail"; _id "cat_new" ] [ if includeNew then loadNew ]
|
||||
yield! app.Categories |> Seq.ofArray |> Seq.map categoryDetail |> List.ofSeq
|
||||
]
|
||||
@ -249,8 +246,8 @@ let feedEdit (model: EditCustomFeedModel) (ratings: MetaItem list) (mediums: Met
|
||||
|> Seq.append [ { Name = ""; Value = "– Select Category –" } ]
|
||||
selectField [ _id "SourceValueCat"; _required
|
||||
if model.SourceType = "tag" then _disabled ]
|
||||
(nameof model.SourceValue) "Category" model.SourceValue cats (_.Name)
|
||||
(_.Value) []
|
||||
(nameof model.SourceValue) "Category" model.SourceValue cats _.Name _.Value
|
||||
[]
|
||||
]
|
||||
div [ _class "col-1 d-flex justify-content-end pb-3" ] [
|
||||
div [ _class "form-check form-check-inline me-0" ] [
|
||||
@ -305,7 +302,7 @@ let feedEdit (model: EditCustomFeedModel) (ratings: MetaItem list) (mediums: Met
|
||||
]
|
||||
div [ _class "col-12 col-md-3 col-lg-2 pb-3" ] [
|
||||
selectField [ _required ] (nameof model.Explicit) "Explicit Rating" model.Explicit
|
||||
ratings (_.Name) (_.Value) []
|
||||
ratings _.Name _.Value []
|
||||
]
|
||||
]
|
||||
div [ _class "row" ] [
|
||||
@ -380,7 +377,7 @@ let feedEdit (model: EditCustomFeedModel) (ratings: MetaItem list) (mediums: Met
|
||||
]
|
||||
]
|
||||
div [ _class "col-4 col-lg-3 offset-lg-2 pb-3" ] [
|
||||
selectField [] (nameof model.Medium) "Medium" model.Medium mediums (_.Name) (_.Value) [
|
||||
selectField [] (nameof model.Medium) "Medium" model.Medium mediums _.Name _.Value [
|
||||
span [ _class "form-text fst-italic" ] [
|
||||
raw "Optional; medium of the podcast content ("
|
||||
a [ _href "https://github.com/Podcastindex-org/podcast-namespace/blob/main/docs/1.0.md#medium"
|
||||
@ -465,7 +462,7 @@ let redirectList (model: RedirectRule list) app = [
|
||||
if idx <> model.Length - 1 then
|
||||
actionSpacer; a [ _href $"{ruleUrl}/down"; _hxPost $"{ruleUrl}/down" ] [ raw "Move Down" ]
|
||||
actionSpacer
|
||||
a [ _class "text-danger"; _href ruleUrl; _hxDelete ruleUrl
|
||||
a [ _class "text-danger"; _href ruleUrl; _hxDelete ruleUrl; _hxPushUrl "false"
|
||||
_hxConfirm "Are you sure you want to delete this redirect rule?" ] [
|
||||
raw "Delete"
|
||||
]
|
||||
@ -502,7 +499,8 @@ let redirectList (model: RedirectRule list) app = [
|
||||
]
|
||||
div [ _class "row mwl-table-detail"; _id "rule_new" ] []
|
||||
form [ _method "post"; _class "container g-0"; _hxTarget "body" ] [
|
||||
antiCsrf app; yield! List.mapi ruleDetail model
|
||||
antiCsrf app
|
||||
yield! List.mapi ruleDetail model
|
||||
]
|
||||
]
|
||||
p [ _class "mt-3 text-muted fst-italic text-center" ] [
|
||||
@ -556,7 +554,8 @@ let tagMapList (model: TagMap list) app =
|
||||
_hxSwap $"{HxSwap.InnerHtml} show:#tag_{map.Id}:top" ] [
|
||||
raw "Edit"
|
||||
]; actionSpacer
|
||||
a [ _href url; _hxDelete url; _class "text-danger"
|
||||
a [ _href url; _hxDelete url; _hxTarget "#tagList"; _hxPushUrl "false"; _hxSwap HxSwap.OuterHtml
|
||||
_class "text-danger"
|
||||
_hxConfirm $"Are you sure you want to delete the mapping for “{map.Tag}”? This action cannot be undone." ] [
|
||||
raw "Delete"
|
||||
]
|
||||
@ -576,11 +575,8 @@ let tagMapList (model: TagMap list) app =
|
||||
div [ _class "col" ] [ raw "URL Value" ]
|
||||
]
|
||||
]
|
||||
form [ _method "post"; _class "container g-0"; _hxTarget "#tagList"; _hxSwap HxSwap.OuterHtml ] [
|
||||
antiCsrf app
|
||||
div [ _class "row mwl-table-detail"; _id "tag_new" ] []
|
||||
yield! List.map tagMapDetail model
|
||||
]
|
||||
div [ _class "row mwl-table-detail"; _id "tag_new" ] []
|
||||
yield! List.map tagMapDetail model
|
||||
]
|
||||
|> List.singleton
|
||||
|
||||
@ -640,8 +636,7 @@ let uploadList (model: DisplayUpload seq) app = [
|
||||
h2 [ _class "my-3" ] [ raw app.PageTitle ]
|
||||
article [] [
|
||||
a [ _href (relUrl app "admin/upload/new"); _class "btn btn-primary btn-sm mb-3" ] [ raw "Upload a New File" ]
|
||||
form [ _method "post"; _class "container"; _hxTarget "body" ] [
|
||||
antiCsrf app
|
||||
div [ _class "container"; _hxTarget "body" ] [
|
||||
div [ _class "row" ] [
|
||||
div [ _class "col text-center" ] [
|
||||
em [ _class "text-muted" ] [ raw "Uploaded files served from" ]; br []; raw relativeBase
|
||||
@ -729,7 +724,7 @@ let webLogSettings
|
||||
a [ _href (relUrl app (string feed.Path)); _target "_blank" ] [ raw "View Feed" ]
|
||||
actionSpacer
|
||||
a [ _href $"{feedUrl}/edit" ] [ raw "Edit" ]; actionSpacer
|
||||
a [ _href feedUrl; _hxDelete feedUrl; _class "text-danger"
|
||||
a [ _href feedUrl; _hxDelete feedUrl; _hxPushUrl "false"; _class "text-danger"
|
||||
_hxConfirm $"Are you sure you want to delete the custom RSS feed based on {feed.Source}? This action cannot be undone." ] [
|
||||
raw "Delete"
|
||||
]
|
||||
@ -779,7 +774,7 @@ let webLogSettings
|
||||
]
|
||||
div [ _class "col-12 col-md-6 offset-md-1 col-xl-4 offset-xl-0 pb-3" ] [
|
||||
selectField [ _required ] (nameof model.DefaultPage) "Default Page" model.DefaultPage pages
|
||||
(fun p -> string p.Id) (_.Title) []
|
||||
(fun p -> string p.Id) _.Title []
|
||||
]
|
||||
div [ _class "col-12 col-md-4 col-xl-2 pb-3" ] [
|
||||
numberField [ _required; _min "0"; _max "50" ] (nameof model.PostsPerPage) "Posts per Page"
|
||||
@ -800,6 +795,13 @@ let webLogSettings
|
||||
selectField [] (nameof model.Uploads) "Default Upload Destination" model.Uploads uploads
|
||||
string string []
|
||||
]
|
||||
div [ _class "col-12 col-md-6 offset-md-3 col-xl-4 offset-xl-4" ] [
|
||||
checkboxSwitch [] (nameof model.AutoOpenGraph) "Auto-Add OpenGraph Properties"
|
||||
model.AutoOpenGraph []
|
||||
span [ _class "form-text fst-italic" ] [
|
||||
raw "Adds title, site name, and permalink to all pages and posts"
|
||||
]
|
||||
]
|
||||
]
|
||||
div [ _class "row pb-3" ] [
|
||||
div [ _class "col text-center" ] [
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"Generator": "myWebLog 2.2",
|
||||
{
|
||||
"Generator": "myWebLog 3",
|
||||
"Logging": {
|
||||
"LogLevel": {
|
||||
"MyWebLog.Handlers": "Information"
|
||||
@ -8,7 +8,7 @@
|
||||
"Kestrel": {
|
||||
"Endpoints": {
|
||||
"Http": {
|
||||
"Url": "http://0.0.0.0:80"
|
||||
"Url": "http://0.0.0.0:5000"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
1
src/MyWebLog/wwwroot/htmx.min.js
vendored
Normal file
1
src/MyWebLog/wwwroot/htmx.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
@ -1,2 +1,2 @@
|
||||
myWebLog Admin
|
||||
2.2
|
||||
3
|
@ -2,6 +2,12 @@
|
||||
* Support functions for the administrative UI
|
||||
*/
|
||||
this.Admin = {
|
||||
/**
|
||||
* The next index for an OpenGraph extra property item
|
||||
* @type {number}
|
||||
*/
|
||||
nextExtraIndex : 0,
|
||||
|
||||
/**
|
||||
* The next index for a metadata item
|
||||
* @type {number}
|
||||
@ -13,7 +19,15 @@ this.Admin = {
|
||||
* @type {number}
|
||||
*/
|
||||
nextPermalink : 0,
|
||||
|
||||
|
||||
/**
|
||||
* Set the next OpenGraph extra propery index
|
||||
* @param {number} idx The index to set
|
||||
*/
|
||||
setNextExtraIndex(idx) {
|
||||
this.nextExtraIndex = idx
|
||||
},
|
||||
|
||||
/**
|
||||
* Set the next meta item index
|
||||
* @param {number} idx The index to set
|
||||
@ -32,35 +46,39 @@ this.Admin = {
|
||||
|
||||
/**
|
||||
* Create a metadata remove button
|
||||
* @param {string} prefix The prefix of the row to be removed
|
||||
* @returns {HTMLDivElement} The column with the remove button
|
||||
*/
|
||||
createMetaRemoveColumn() {
|
||||
createMetaRemoveColumn(prefix) {
|
||||
const removeBtn = document.createElement("button")
|
||||
removeBtn.type = "button"
|
||||
removeBtn.className = "btn btn-sm btn-danger"
|
||||
removeBtn.innerHTML = "−"
|
||||
removeBtn.setAttribute("onclick", `Admin.removeMetaItem(${this.nextMetaIndex})`)
|
||||
removeBtn.setAttribute("onclick",
|
||||
`Admin.removeMetaItem('${prefix}', ${prefix === "og_extra" ? this.nextExtraIndex : this.nextMetaIndex})`)
|
||||
|
||||
const removeCol = document.createElement("div")
|
||||
removeCol.className = "col-1 text-center align-self-center"
|
||||
removeCol.appendChild(removeBtn)
|
||||
|
||||
|
||||
return removeCol
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* Create a metadata name field
|
||||
* @param {string} prefix The prefix for the element
|
||||
* @returns {HTMLInputElement} The name input element
|
||||
*/
|
||||
createMetaNameField() {
|
||||
createMetaNameField(prefix) {
|
||||
const namePfx = prefix === "og_extra" ? "OpenGraphExtra" : "Meta"
|
||||
const nameField = document.createElement("input")
|
||||
|
||||
|
||||
nameField.type = "text"
|
||||
nameField.name = "MetaNames"
|
||||
nameField.id = `metaNames_${this.nextMetaIndex}`
|
||||
nameField.name = `${namePfx}Names`
|
||||
nameField.id = `${namePfx}Names_${prefix === "og_extra" ? this.nextExtraIndex : this.nextMetaIndex}`
|
||||
nameField.className = "form-control"
|
||||
nameField.placeholder = "Name"
|
||||
|
||||
|
||||
return nameField
|
||||
},
|
||||
|
||||
@ -82,23 +100,25 @@ this.Admin = {
|
||||
const nameCol = document.createElement("div")
|
||||
nameCol.className = "col-3"
|
||||
nameCol.appendChild(nameFloat)
|
||||
|
||||
|
||||
return nameCol
|
||||
},
|
||||
|
||||
/**
|
||||
* Create a metadata value field
|
||||
* @param {string} prefix The prefix for the field being created
|
||||
* @returns {HTMLInputElement} The metadata value field
|
||||
*/
|
||||
createMetaValueField() {
|
||||
createMetaValueField(prefix) {
|
||||
const namePfx = prefix === "og_extra" ? "OpenGraphExtra" : "Meta"
|
||||
const valueField = document.createElement("input")
|
||||
|
||||
|
||||
valueField.type = "text"
|
||||
valueField.name = "MetaValues"
|
||||
valueField.id = `metaValues_${this.nextMetaIndex}`
|
||||
valueField.name = `${namePfx}Values`
|
||||
valueField.id = `${namePfx}Values_${prefix === "og_extra" ? this.nextExtraIndex : this.nextMetaIndex}`
|
||||
valueField.className = "form-control"
|
||||
valueField.placeholder = "Value"
|
||||
|
||||
|
||||
return valueField
|
||||
},
|
||||
|
||||
@ -124,43 +144,50 @@ this.Admin = {
|
||||
valueHint.innerText = hintText
|
||||
valueFloat.appendChild(valueHint)
|
||||
}
|
||||
|
||||
|
||||
const valueCol = document.createElement("div")
|
||||
valueCol.className = "col-8"
|
||||
valueCol.appendChild(valueFloat)
|
||||
|
||||
|
||||
return valueCol
|
||||
},
|
||||
|
||||
/**
|
||||
* Construct and add a metadata item row
|
||||
* @param {string} prefix The prefix of the row being added
|
||||
* @param {HTMLDivElement} removeCol The column with the remove button
|
||||
* @param {HTMLDivElement} nameCol The column with the name field
|
||||
* @param {HTMLDivElement} valueCol The column with the value field
|
||||
*/
|
||||
createMetaRow(removeCol, nameCol, valueCol) {
|
||||
createMetaRow(prefix, removeCol, nameCol, valueCol) {
|
||||
const newRow = document.createElement("div")
|
||||
newRow.className = "row mb-3"
|
||||
newRow.id = `meta_${this.nextMetaIndex}`
|
||||
newRow.id = `${prefix}_${prefix === "og_extra" ? this.nextExtraIndex : this.nextMetaIndex}`
|
||||
newRow.appendChild(removeCol)
|
||||
newRow.appendChild(nameCol)
|
||||
newRow.appendChild(valueCol)
|
||||
|
||||
document.getElementById("meta_items").appendChild(newRow)
|
||||
this.nextMetaIndex++
|
||||
document.getElementById(`${prefix}_items`).appendChild(newRow)
|
||||
if (prefix === "og_extra") {
|
||||
this.nextExtraIndex++
|
||||
} else {
|
||||
this.nextMetaIndex++
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* Add a new row for metadata entry
|
||||
* @param {string} prefix The prefix for the field being created
|
||||
*/
|
||||
addMetaItem() {
|
||||
const nameField = this.createMetaNameField()
|
||||
|
||||
addMetaItem(prefix) {
|
||||
const nameField = this.createMetaNameField(prefix)
|
||||
|
||||
this.createMetaRow(
|
||||
this.createMetaRemoveColumn(),
|
||||
prefix,
|
||||
this.createMetaRemoveColumn(prefix),
|
||||
this.createMetaNameColumn(nameField),
|
||||
this.createMetaValueColumn(this.createMetaValueField(), undefined))
|
||||
|
||||
this.createMetaValueColumn(this.createMetaValueField(prefix), undefined))
|
||||
|
||||
document.getElementById(nameField.id).focus()
|
||||
},
|
||||
|
||||
@ -226,7 +253,35 @@ this.Admin = {
|
||||
const link = document.getElementById("ChapterEditLink")
|
||||
if (link) link.style.display = src === "none" || src === "external" ? "none" : ""
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* Enable or disable OpenGraph fields
|
||||
*/
|
||||
toggleOpenGraphFields() {
|
||||
const disabled = !document.getElementById("AssignOpenGraph").checked
|
||||
let fieldsets = ["og_item", "og_image", "og_audio", "og_video"]
|
||||
fieldsets.forEach(it => document.getElementById(it).disabled = disabled)
|
||||
},
|
||||
|
||||
/**
|
||||
* Disable the file upload or path field if the other is entered
|
||||
* @param {string} pathElt The path element to check
|
||||
* @param {string} fileElt The file element to check
|
||||
*/
|
||||
pathOrFile(pathElt, fileElt) {
|
||||
/** @type {HTMLInputElement} */
|
||||
const path = document.getElementById(pathElt)
|
||||
/** @type {HTMLInputElement} */
|
||||
const file = document.getElementById(fileElt)
|
||||
if (path.value.length > 0) {
|
||||
file.disabled = true
|
||||
} else if (file.value.length > 0) {
|
||||
path.disabled = true
|
||||
} else {
|
||||
file.disabled = path.disabled = false
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Enable or disable podcast fields
|
||||
*/
|
||||
@ -244,7 +299,7 @@ this.Admin = {
|
||||
}
|
||||
fields.forEach(it => document.getElementById(it).disabled = disabled)
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* Check to enable or disable podcast fields
|
||||
*/
|
||||
@ -263,7 +318,7 @@ this.Admin = {
|
||||
elt.innerText = "Copied"
|
||||
return false
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* Toggle the source of a custom RSS feed
|
||||
* @param {string} source The source that was selected
|
||||
@ -281,13 +336,14 @@ this.Admin = {
|
||||
tagInput.disabled = false
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* Remove a metadata item
|
||||
* @param {number} idx The index of the metadata item to remove
|
||||
* @param {string} id The ID prefix of the item to remove
|
||||
* @param {number} idx The index of the item to remove
|
||||
*/
|
||||
removeMetaItem(idx) {
|
||||
document.getElementById(`meta_${idx}`).remove()
|
||||
removeMetaItem(id, idx) {
|
||||
document.getElementById(`${id}_${idx}`).remove()
|
||||
},
|
||||
|
||||
/**
|
||||
@ -326,12 +382,12 @@ this.Admin = {
|
||||
msgs.forEach(msg => {
|
||||
const parts = msg.split("|||")
|
||||
if (parts.length < 2) return
|
||||
|
||||
|
||||
// Create the toast header
|
||||
const toastType = document.createElement("strong")
|
||||
toastType.className = "me-auto text-uppercase"
|
||||
toastType.innerText = parts[0] === "danger" ? "error" : parts[0]
|
||||
|
||||
|
||||
const closeBtn = document.createElement("button")
|
||||
closeBtn.type = "button"
|
||||
closeBtn.className = "btn-close"
|
||||
@ -350,7 +406,7 @@ this.Admin = {
|
||||
if (parts.length === 3) {
|
||||
toastBody.innerHTML += `<hr>${parts[2]}`
|
||||
}
|
||||
|
||||
|
||||
// Assemble the toast
|
||||
const toast = document.createElement("div")
|
||||
toast.className = "toast"
|
||||
@ -361,10 +417,10 @@ this.Admin = {
|
||||
toast.appendChild(toastBody)
|
||||
|
||||
document.getElementById("toasts").appendChild(toast)
|
||||
|
||||
|
||||
let options = { delay: 4000 }
|
||||
if (parts[0] !== "success") options.autohide = false
|
||||
|
||||
|
||||
const theToast = new bootstrap.Toast(toast, options)
|
||||
theToast.show()
|
||||
})
|
||||
|
Loading…
x
Reference in New Issue
Block a user