v2 (#36)
* Use PostgreSQL JSON-based data implementation * Fix back link on RSS settings page (#34) * Show theme upload messages (#28) * Fix admin page list paging (#35) * Add db migrations for all stores * Support both .NET 6 and 7
This commit is contained in:
parent
5f3daa1de9
commit
7b325dc19e
@ -1,12 +0,0 @@
|
||||
{
|
||||
"version": 1,
|
||||
"isRoot": true,
|
||||
"tools": {
|
||||
"fake-cli": {
|
||||
"version": "5.22.0",
|
||||
"commands": [
|
||||
"fake"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
166
build.fs
Normal file
166
build.fs
Normal file
@ -0,0 +1,166 @@
|
||||
open System.IO
|
||||
open Fake.Core
|
||||
open Fake.DotNet
|
||||
open Fake.IO
|
||||
open Fake.IO.Globbing.Operators
|
||||
|
||||
let execContext = Context.FakeExecutionContext.Create false "build.fsx" []
|
||||
Context.setExecutionContext (Context.RuntimeContext.Fake execContext)
|
||||
|
||||
/// The output directory for release ZIPs
|
||||
let releasePath = "releases"
|
||||
|
||||
/// The path to the main project
|
||||
let projectPath = "src/MyWebLog"
|
||||
|
||||
/// The path and name of the main project
|
||||
let projName = $"{projectPath}/MyWebLog.fsproj"
|
||||
|
||||
/// The version being packaged (extracted from appsettings.json)
|
||||
let version =
|
||||
let settings = File.ReadAllText $"{projectPath}/appsettings.json"
|
||||
let generator = settings.Substring (settings.IndexOf "\"Generator\":")
|
||||
let appVersion = generator.Replace("\"Generator\": \"", "")
|
||||
let appVersion = appVersion.Substring (0, appVersion.IndexOf "\"")
|
||||
appVersion.Split ' ' |> Array.last
|
||||
|
||||
/// Zip a theme distributed with myWebLog
|
||||
let zipTheme (name : string) (_ : TargetParameter) =
|
||||
let path = $"src/{name}-theme"
|
||||
!! $"{path}/**/*"
|
||||
|> Zip.filesAsSpecs path
|
||||
|> Seq.filter (fun (_, name) -> not (name.EndsWith ".zip"))
|
||||
|> Zip.zipSpec $"{releasePath}/{name}-theme.zip"
|
||||
|
||||
/// Frameworks supported by this build
|
||||
let frameworks = [ "net6.0"; "net7.0" ]
|
||||
|
||||
/// Publish the project for the given runtime ID
|
||||
let publishFor rid (_ : TargetParameter) =
|
||||
frameworks
|
||||
|> List.iter (fun fwk ->
|
||||
DotNet.publish
|
||||
(fun opts ->
|
||||
{ opts with Runtime = Some rid; SelfContained = Some false; NoLogo = true; Framework = Some fwk })
|
||||
projName)
|
||||
|
||||
/// Package published output for the given runtime ID
|
||||
let packageFor rid (_ : TargetParameter) =
|
||||
frameworks
|
||||
|> List.iter (fun fwk ->
|
||||
let path = $"{projectPath}/bin/Release/{fwk}/%s{rid}/publish"
|
||||
let prodSettings = $"{path}/appsettings.Production.json"
|
||||
if File.exists prodSettings then File.delete prodSettings
|
||||
[ !! $"{path}/**/*"
|
||||
|> Zip.filesAsSpecs path
|
||||
|> Seq.map (fun (orig, dest) ->
|
||||
orig, if dest.StartsWith "MyWebLog" then dest.Replace ("MyWebLog", "myWebLog") else dest)
|
||||
Seq.singleton ($"{releasePath}/admin-theme.zip", "admin-theme.zip")
|
||||
Seq.singleton ($"{releasePath}/default-theme.zip", "default-theme.zip")
|
||||
]
|
||||
|> Seq.concat
|
||||
|> Zip.zipSpec $"{releasePath}/myWebLog-{version}.{fwk}.{rid}.zip")
|
||||
|
||||
|
||||
Target.create "Clean" (fun _ ->
|
||||
!! "src/**/bin"
|
||||
++ "src/**/obj"
|
||||
|> Shell.cleanDirs
|
||||
Shell.cleanDir releasePath
|
||||
)
|
||||
|
||||
Target.create "Build" (fun _ ->
|
||||
DotNet.build (fun opts -> { opts with NoLogo = true }) projName
|
||||
)
|
||||
|
||||
Target.create "ZipAdminTheme" (zipTheme "admin")
|
||||
Target.create "ZipDefaultTheme" (zipTheme "default")
|
||||
|
||||
Target.create "PublishWindows" (publishFor "win-x64")
|
||||
Target.create "PackageWindows" (packageFor "win-x64")
|
||||
|
||||
Target.create "PublishLinux" (publishFor "linux-x64")
|
||||
Target.create "PackageLinux" (packageFor "linux-x64")
|
||||
|
||||
Target.create "RepackageLinux" (fun _ ->
|
||||
let workDir = $"{releasePath}/linux"
|
||||
frameworks
|
||||
|> List.iter (fun fwk ->
|
||||
let zipArchive = $"{releasePath}/myWebLog-{version}.{fwk}.linux-x64.zip"
|
||||
let sh command args =
|
||||
CreateProcess.fromRawCommand command args
|
||||
|> CreateProcess.redirectOutput
|
||||
|> Proc.run
|
||||
|> ignore
|
||||
Shell.mkdir workDir
|
||||
Zip.unzip workDir zipArchive
|
||||
Shell.cd workDir
|
||||
sh "chmod" [ "+x"; "./myWebLog" ]
|
||||
sh "tar" [ "cfj"; $"../myWebLog-{version}.{fwk}.linux-x64.tar.bz2"; "." ]
|
||||
Shell.cd "../.."
|
||||
Shell.rm zipArchive)
|
||||
Shell.rm_rf workDir
|
||||
)
|
||||
|
||||
Target.create "All" ignore
|
||||
|
||||
Target.create "RemoveThemeArchives" (fun _ ->
|
||||
Shell.rm $"{releasePath}/admin-theme.zip"
|
||||
Shell.rm $"{releasePath}/default-theme.zip"
|
||||
)
|
||||
|
||||
Target.create "CI" ignore
|
||||
|
||||
open Fake.Core.TargetOperators
|
||||
|
||||
let dependencies = [
|
||||
"Clean"
|
||||
==> "All"
|
||||
|
||||
"Clean"
|
||||
?=> "Build"
|
||||
==> "All"
|
||||
|
||||
"Clean"
|
||||
?=> "ZipDefaultTheme"
|
||||
==> "All"
|
||||
|
||||
"Clean"
|
||||
?=> "ZipAdminTheme"
|
||||
==> "All"
|
||||
|
||||
"Build"
|
||||
==> "PublishWindows"
|
||||
==> "All"
|
||||
|
||||
"Build"
|
||||
==> "PublishLinux"
|
||||
==> "All"
|
||||
|
||||
"PublishWindows"
|
||||
==> "PackageWindows"
|
||||
==> "All"
|
||||
|
||||
"PublishLinux"
|
||||
==> "PackageLinux"
|
||||
==> "All"
|
||||
|
||||
"PackageLinux"
|
||||
==> "RepackageLinux"
|
||||
==> "All"
|
||||
|
||||
"All"
|
||||
==> "RemoveThemeArchives"
|
||||
==> "CI"
|
||||
]
|
||||
|
||||
[<EntryPoint>]
|
||||
let main args =
|
||||
try
|
||||
match args with
|
||||
| [| target |] -> Target.runOrDefault target
|
||||
| _ -> Target.runOrDefault "All"
|
||||
0
|
||||
with e ->
|
||||
printfn "%A" e
|
||||
1
|
20
build.fsproj
Normal file
20
build.fsproj
Normal file
@ -0,0 +1,20 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net7.0</TargetFramework>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Compile Include="build.fs" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Fake.Core.Target" Version="5.23.1" />
|
||||
<PackageReference Include="Fake.DotNet.Cli" Version="5.23.1" />
|
||||
<PackageReference Include="Fake.IO.FileSystem" Version="5.23.1" />
|
||||
<PackageReference Include="Fake.IO.Zip" Version="5.23.1" />
|
||||
<PackageReference Include="MSBuild.StructuredLogger" Version="2.1.768" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
147
build.fsx
147
build.fsx
@ -1,147 +0,0 @@
|
||||
#r "paket:
|
||||
nuget Fake.DotNet.Cli
|
||||
nuget Fake.IO.FileSystem
|
||||
nuget Fake.IO.Zip
|
||||
nuget Fake.Core.Target //"
|
||||
#load ".fake/build.fsx/intellisense.fsx"
|
||||
open System.IO
|
||||
open Fake.Core
|
||||
open Fake.DotNet
|
||||
open Fake.IO
|
||||
open Fake.IO.Globbing.Operators
|
||||
open Fake.Core.TargetOperators
|
||||
|
||||
Target.initEnvironment ()
|
||||
|
||||
/// The output directory for release ZIPs
|
||||
let releasePath = "releases"
|
||||
|
||||
/// The path to the main project
|
||||
let projectPath = "src/MyWebLog"
|
||||
|
||||
/// The path and name of the main project
|
||||
let projName = $"{projectPath}/MyWebLog.fsproj"
|
||||
|
||||
/// The version being packaged (extracted from appsettings.json)
|
||||
let version =
|
||||
let settings = File.ReadAllText $"{projectPath}/appsettings.json"
|
||||
let generator = settings.Substring (settings.IndexOf "\"Generator\":")
|
||||
let appVersion = generator.Replace("\"Generator\": \"", "")
|
||||
let appVersion = appVersion.Substring (0, appVersion.IndexOf "\"")
|
||||
appVersion.Split ' ' |> Array.last
|
||||
|
||||
/// Zip a theme distributed with myWebLog
|
||||
let zipTheme (name : string) (_ : TargetParameter) =
|
||||
let path = $"src/{name}-theme"
|
||||
!! $"{path}/**/*"
|
||||
|> Zip.filesAsSpecs path
|
||||
|> Seq.filter (fun (_, name) -> not (name.EndsWith ".zip"))
|
||||
|> Zip.zipSpec $"{releasePath}/{name}-theme.zip"
|
||||
|
||||
/// Publish the project for the given runtime ID
|
||||
let publishFor rid (_ : TargetParameter) =
|
||||
DotNet.publish (fun opts -> { opts with Runtime = Some rid; SelfContained = Some false; NoLogo = true }) projName
|
||||
|
||||
/// Package published output for the given runtime ID
|
||||
let packageFor (rid : string) (_ : TargetParameter) =
|
||||
let path = $"{projectPath}/bin/Release/net6.0/{rid}/publish"
|
||||
let prodSettings = $"{path}/appsettings.Production.json"
|
||||
if File.exists prodSettings then File.delete prodSettings
|
||||
[ !! $"{path}/**/*"
|
||||
|> Zip.filesAsSpecs path
|
||||
|> Seq.map (fun (orig, dest) ->
|
||||
orig, if dest.StartsWith "MyWebLog" then dest.Replace ("MyWebLog", "myWebLog") else dest)
|
||||
Seq.singleton ($"{releasePath}/admin-theme.zip", "admin-theme.zip")
|
||||
Seq.singleton ($"{releasePath}/default-theme.zip", "default-theme.zip")
|
||||
]
|
||||
|> Seq.concat
|
||||
|> Zip.zipSpec $"{releasePath}/myWebLog-{version}.{rid}.zip"
|
||||
|
||||
|
||||
Target.create "Clean" (fun _ ->
|
||||
!! "src/**/bin"
|
||||
++ "src/**/obj"
|
||||
|> Shell.cleanDirs
|
||||
Shell.cleanDir releasePath
|
||||
)
|
||||
|
||||
Target.create "Build" (fun _ ->
|
||||
DotNet.build (fun opts -> { opts with NoLogo = true }) projName
|
||||
)
|
||||
|
||||
Target.create "ZipAdminTheme" (zipTheme "admin")
|
||||
Target.create "ZipDefaultTheme" (zipTheme "default")
|
||||
|
||||
Target.create "PublishWindows" (publishFor "win-x64")
|
||||
Target.create "PackageWindows" (packageFor "win-x64")
|
||||
|
||||
Target.create "PublishLinux" (publishFor "linux-x64")
|
||||
Target.create "PackageLinux" (packageFor "linux-x64")
|
||||
|
||||
Target.create "RepackageLinux" (fun _ ->
|
||||
let workDir = $"{releasePath}/linux"
|
||||
let zipArchive = $"{releasePath}/myWebLog-{version}.linux-x64.zip"
|
||||
let sh command args =
|
||||
CreateProcess.fromRawCommand command args
|
||||
|> CreateProcess.redirectOutput
|
||||
|> Proc.run
|
||||
|> ignore
|
||||
Shell.mkdir workDir
|
||||
Zip.unzip workDir zipArchive
|
||||
Shell.cd workDir
|
||||
sh "chmod" [ "+x"; "./myWebLog" ]
|
||||
sh "tar" [ "cfj"; $"../myWebLog-{version}.linux-x64.tar.bz2"; "." ]
|
||||
Shell.cd "../.."
|
||||
Shell.rm zipArchive
|
||||
Shell.rm_rf workDir
|
||||
)
|
||||
|
||||
Target.create "All" ignore
|
||||
|
||||
Target.create "RemoveThemeArchives" (fun _ ->
|
||||
Shell.rm $"{releasePath}/admin-theme.zip"
|
||||
Shell.rm $"{releasePath}/default-theme.zip"
|
||||
)
|
||||
|
||||
Target.create "CI" ignore
|
||||
|
||||
"Clean"
|
||||
==> "All"
|
||||
|
||||
"Clean"
|
||||
?=> "Build"
|
||||
==> "All"
|
||||
|
||||
"Clean"
|
||||
?=> "ZipDefaultTheme"
|
||||
==> "All"
|
||||
|
||||
"Clean"
|
||||
?=> "ZipAdminTheme"
|
||||
==> "All"
|
||||
|
||||
"Build"
|
||||
==> "PublishWindows"
|
||||
==> "All"
|
||||
|
||||
"Build"
|
||||
==> "PublishLinux"
|
||||
==> "All"
|
||||
|
||||
"PublishWindows"
|
||||
==> "PackageWindows"
|
||||
==> "All"
|
||||
|
||||
"PublishLinux"
|
||||
==> "PackageLinux"
|
||||
==> "All"
|
||||
|
||||
"PackageLinux"
|
||||
==> "RepackageLinux"
|
||||
==> "All"
|
||||
|
||||
"All"
|
||||
==> "RemoveThemeArchives"
|
||||
==> "CI"
|
||||
|
||||
Target.runOrDefault "All"
|
227
build.fsx.lock
227
build.fsx.lock
@ -1,227 +0,0 @@
|
||||
STORAGE: NONE
|
||||
RESTRICTION: || (== net6.0) (== netstandard2.0)
|
||||
NUGET
|
||||
remote: https://api.nuget.org/v3/index.json
|
||||
BlackFox.VsWhere (1.1)
|
||||
FSharp.Core (>= 4.2.3)
|
||||
Microsoft.Win32.Registry (>= 4.7)
|
||||
Fake.Core.CommandLineParsing (5.22)
|
||||
FParsec (>= 1.1.1)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.Core.Context (5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.Core.Environment (5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.Core.FakeVar (5.22)
|
||||
Fake.Core.Context (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.Core.Process (5.22)
|
||||
Fake.Core.Environment (>= 5.22)
|
||||
Fake.Core.FakeVar (>= 5.22)
|
||||
Fake.Core.String (>= 5.22)
|
||||
Fake.Core.Trace (>= 5.22)
|
||||
Fake.IO.FileSystem (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
System.Collections.Immutable (>= 5.0)
|
||||
Fake.Core.SemVer (5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.Core.String (5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.Core.Target (5.22)
|
||||
Fake.Core.CommandLineParsing (>= 5.22)
|
||||
Fake.Core.Context (>= 5.22)
|
||||
Fake.Core.Environment (>= 5.22)
|
||||
Fake.Core.FakeVar (>= 5.22)
|
||||
Fake.Core.Process (>= 5.22)
|
||||
Fake.Core.String (>= 5.22)
|
||||
Fake.Core.Trace (>= 5.22)
|
||||
FSharp.Control.Reactive (>= 5.0.2)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.Core.Tasks (5.22)
|
||||
Fake.Core.Trace (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.Core.Trace (5.22)
|
||||
Fake.Core.Environment (>= 5.22)
|
||||
Fake.Core.FakeVar (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.Core.Xml (5.22)
|
||||
Fake.Core.String (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.DotNet.Cli (5.22)
|
||||
Fake.Core.Environment (>= 5.22)
|
||||
Fake.Core.Process (>= 5.22)
|
||||
Fake.Core.String (>= 5.22)
|
||||
Fake.Core.Trace (>= 5.22)
|
||||
Fake.DotNet.MSBuild (>= 5.22)
|
||||
Fake.DotNet.NuGet (>= 5.22)
|
||||
Fake.IO.FileSystem (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Mono.Posix.NETStandard (>= 1.0)
|
||||
Newtonsoft.Json (>= 13.0.1)
|
||||
Fake.DotNet.MSBuild (5.22)
|
||||
BlackFox.VsWhere (>= 1.1)
|
||||
Fake.Core.Environment (>= 5.22)
|
||||
Fake.Core.Process (>= 5.22)
|
||||
Fake.Core.String (>= 5.22)
|
||||
Fake.Core.Trace (>= 5.22)
|
||||
Fake.IO.FileSystem (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
MSBuild.StructuredLogger (>= 2.1.545)
|
||||
Fake.DotNet.NuGet (5.22)
|
||||
Fake.Core.Environment (>= 5.22)
|
||||
Fake.Core.Process (>= 5.22)
|
||||
Fake.Core.SemVer (>= 5.22)
|
||||
Fake.Core.String (>= 5.22)
|
||||
Fake.Core.Tasks (>= 5.22)
|
||||
Fake.Core.Trace (>= 5.22)
|
||||
Fake.Core.Xml (>= 5.22)
|
||||
Fake.IO.FileSystem (>= 5.22)
|
||||
Fake.Net.Http (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Newtonsoft.Json (>= 13.0.1)
|
||||
NuGet.Protocol (>= 5.11)
|
||||
Fake.IO.FileSystem (5.22)
|
||||
Fake.Core.String (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.IO.Zip (5.22)
|
||||
Fake.Core.String (>= 5.22)
|
||||
Fake.IO.FileSystem (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.Net.Http (5.22)
|
||||
Fake.Core.Trace (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
FParsec (1.1.1)
|
||||
FSharp.Core (>= 4.3.4)
|
||||
FSharp.Control.Reactive (5.0.5)
|
||||
FSharp.Core (>= 4.7.2)
|
||||
System.Reactive (>= 5.0 < 6.0)
|
||||
FSharp.Core (6.0.5)
|
||||
Microsoft.Build (17.2)
|
||||
Microsoft.Build.Framework (>= 17.2) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
|
||||
Microsoft.NET.StringTools (>= 1.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
|
||||
Microsoft.Win32.Registry (>= 4.3) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net6.0))
|
||||
System.Collections.Immutable (>= 5.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
|
||||
System.Configuration.ConfigurationManager (>= 4.7) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
|
||||
System.Reflection.Metadata (>= 1.6) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net6.0))
|
||||
System.Security.Principal.Windows (>= 4.7) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net6.0))
|
||||
System.Text.Encoding.CodePages (>= 4.0.1) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net6.0))
|
||||
System.Text.Json (>= 6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
|
||||
System.Threading.Tasks.Dataflow (>= 6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
|
||||
Microsoft.Build.Framework (17.2)
|
||||
Microsoft.Win32.Registry (>= 4.3)
|
||||
System.Security.Permissions (>= 4.7)
|
||||
Microsoft.Build.Tasks.Core (17.2)
|
||||
Microsoft.Build.Framework (>= 17.2)
|
||||
Microsoft.Build.Utilities.Core (>= 17.2)
|
||||
Microsoft.NET.StringTools (>= 1.0)
|
||||
Microsoft.Win32.Registry (>= 4.3)
|
||||
System.CodeDom (>= 4.4)
|
||||
System.Collections.Immutable (>= 5.0)
|
||||
System.Reflection.Metadata (>= 1.6)
|
||||
System.Resources.Extensions (>= 4.6)
|
||||
System.Security.Cryptography.Pkcs (>= 4.7)
|
||||
System.Security.Cryptography.Xml (>= 4.7)
|
||||
System.Security.Permissions (>= 4.7)
|
||||
System.Threading.Tasks.Dataflow (>= 6.0)
|
||||
Microsoft.Build.Utilities.Core (17.2)
|
||||
Microsoft.Build.Framework (>= 17.2)
|
||||
Microsoft.NET.StringTools (>= 1.0)
|
||||
Microsoft.Win32.Registry (>= 4.3)
|
||||
System.Collections.Immutable (>= 5.0)
|
||||
System.Configuration.ConfigurationManager (>= 4.7)
|
||||
System.Security.Permissions (>= 4.7) - restriction: == netstandard2.0
|
||||
System.Text.Encoding.CodePages (>= 4.0.1) - restriction: == netstandard2.0
|
||||
Microsoft.NET.StringTools (1.0)
|
||||
System.Memory (>= 4.5.4)
|
||||
System.Runtime.CompilerServices.Unsafe (>= 5.0)
|
||||
Microsoft.NETCore.Platforms (6.0.4) - restriction: || (&& (== net6.0) (< netcoreapp3.1)) (&& (== net6.0) (< netstandard1.2)) (&& (== net6.0) (< netstandard1.3)) (&& (== net6.0) (< netstandard1.5)) (== netstandard2.0)
|
||||
Microsoft.NETCore.Targets (5.0) - restriction: || (&& (== net6.0) (< netcoreapp3.1)) (&& (== net6.0) (< netstandard1.2)) (&& (== net6.0) (< netstandard1.3)) (&& (== net6.0) (< netstandard1.5)) (== netstandard2.0)
|
||||
Microsoft.Win32.Registry (5.0)
|
||||
System.Buffers (>= 4.5.1) - restriction: || (&& (== net6.0) (>= monoandroid) (< netstandard1.3)) (&& (== net6.0) (>= monotouch)) (&& (== net6.0) (< netcoreapp2.0)) (&& (== net6.0) (>= xamarinios)) (&& (== net6.0) (>= xamarinmac)) (&& (== net6.0) (>= xamarintvos)) (&& (== net6.0) (>= xamarinwatchos)) (== netstandard2.0)
|
||||
System.Memory (>= 4.5.4) - restriction: || (&& (== net6.0) (< netcoreapp2.0)) (&& (== net6.0) (< netcoreapp2.1)) (&& (== net6.0) (>= uap10.1)) (== netstandard2.0)
|
||||
System.Security.AccessControl (>= 5.0)
|
||||
System.Security.Principal.Windows (>= 5.0)
|
||||
Microsoft.Win32.SystemEvents (6.0.1) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= netcoreapp3.1))
|
||||
Mono.Posix.NETStandard (1.0)
|
||||
MSBuild.StructuredLogger (2.1.669)
|
||||
Microsoft.Build (>= 16.10)
|
||||
Microsoft.Build.Framework (>= 16.10)
|
||||
Microsoft.Build.Tasks.Core (>= 16.10)
|
||||
Microsoft.Build.Utilities.Core (>= 16.10)
|
||||
Newtonsoft.Json (13.0.1)
|
||||
NuGet.Common (6.2.1)
|
||||
NuGet.Frameworks (>= 6.2.1)
|
||||
NuGet.Configuration (6.2.1)
|
||||
NuGet.Common (>= 6.2.1)
|
||||
System.Security.Cryptography.ProtectedData (>= 4.4)
|
||||
NuGet.Frameworks (6.2.1)
|
||||
NuGet.Packaging (6.2.1)
|
||||
Newtonsoft.Json (>= 13.0.1)
|
||||
NuGet.Configuration (>= 6.2.1)
|
||||
NuGet.Versioning (>= 6.2.1)
|
||||
System.Security.Cryptography.Cng (>= 5.0)
|
||||
System.Security.Cryptography.Pkcs (>= 5.0)
|
||||
NuGet.Protocol (6.2.1)
|
||||
NuGet.Packaging (>= 6.2.1)
|
||||
NuGet.Versioning (6.2.1)
|
||||
System.Buffers (4.5.1) - restriction: || (&& (== net6.0) (>= monoandroid) (< netstandard1.3)) (&& (== net6.0) (>= monotouch)) (&& (== net6.0) (< netcoreapp2.0)) (&& (== net6.0) (>= xamarinios)) (&& (== net6.0) (>= xamarinmac)) (&& (== net6.0) (>= xamarintvos)) (&& (== net6.0) (>= xamarinwatchos)) (== netstandard2.0)
|
||||
System.CodeDom (6.0)
|
||||
System.Collections.Immutable (6.0)
|
||||
System.Memory (>= 4.5.4) - restriction: || (&& (== net6.0) (>= net461)) (== netstandard2.0)
|
||||
System.Runtime.CompilerServices.Unsafe (>= 6.0)
|
||||
System.Configuration.ConfigurationManager (6.0)
|
||||
System.Security.Cryptography.ProtectedData (>= 6.0)
|
||||
System.Security.Permissions (>= 6.0)
|
||||
System.Drawing.Common (6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= netcoreapp3.1))
|
||||
Microsoft.Win32.SystemEvents (>= 6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= netcoreapp3.1))
|
||||
System.Formats.Asn1 (6.0)
|
||||
System.Buffers (>= 4.5.1) - restriction: || (&& (== net6.0) (>= net461)) (== netstandard2.0)
|
||||
System.Memory (>= 4.5.4) - restriction: || (&& (== net6.0) (>= net461)) (== netstandard2.0)
|
||||
System.Memory (4.5.5)
|
||||
System.Buffers (>= 4.5.1) - restriction: || (&& (== net6.0) (>= monotouch)) (&& (== net6.0) (>= net461)) (&& (== net6.0) (< netcoreapp2.0)) (&& (== net6.0) (< netstandard1.1)) (&& (== net6.0) (< netstandard2.0)) (&& (== net6.0) (>= xamarinios)) (&& (== net6.0) (>= xamarinmac)) (&& (== net6.0) (>= xamarintvos)) (&& (== net6.0) (>= xamarinwatchos)) (== netstandard2.0)
|
||||
System.Numerics.Vectors (>= 4.4) - restriction: || (&& (== net6.0) (< netcoreapp2.0)) (== netstandard2.0)
|
||||
System.Runtime.CompilerServices.Unsafe (>= 4.5.3) - restriction: || (&& (== net6.0) (>= monotouch)) (&& (== net6.0) (>= net461)) (&& (== net6.0) (< netcoreapp2.0)) (&& (== net6.0) (< netcoreapp2.1)) (&& (== net6.0) (< netstandard1.1)) (&& (== net6.0) (< netstandard2.0)) (&& (== net6.0) (>= uap10.1)) (&& (== net6.0) (>= xamarinios)) (&& (== net6.0) (>= xamarinmac)) (&& (== net6.0) (>= xamarintvos)) (&& (== net6.0) (>= xamarinwatchos)) (== netstandard2.0)
|
||||
System.Numerics.Vectors (4.5) - restriction: || (&& (== net6.0) (>= net461)) (== netstandard2.0)
|
||||
System.Reactive (5.0)
|
||||
System.Runtime.InteropServices.WindowsRuntime (>= 4.3) - restriction: || (&& (== net6.0) (< netcoreapp3.1)) (== netstandard2.0)
|
||||
System.Threading.Tasks.Extensions (>= 4.5.4) - restriction: || (&& (== net6.0) (>= net472)) (&& (== net6.0) (< netcoreapp3.1)) (&& (== net6.0) (>= uap10.1)) (== netstandard2.0)
|
||||
System.Reflection.Metadata (6.0.1)
|
||||
System.Collections.Immutable (>= 6.0)
|
||||
System.Resources.Extensions (6.0)
|
||||
System.Memory (>= 4.5.4) - restriction: || (&& (== net6.0) (>= net461)) (== netstandard2.0)
|
||||
System.Runtime (4.3.1) - restriction: || (&& (== net6.0) (< netcoreapp3.1)) (== netstandard2.0)
|
||||
Microsoft.NETCore.Platforms (>= 1.1.1)
|
||||
Microsoft.NETCore.Targets (>= 1.1.3)
|
||||
System.Runtime.CompilerServices.Unsafe (6.0)
|
||||
System.Runtime.InteropServices.WindowsRuntime (4.3) - restriction: || (&& (== net6.0) (< netcoreapp3.1)) (== netstandard2.0)
|
||||
System.Runtime (>= 4.3)
|
||||
System.Security.AccessControl (6.0)
|
||||
System.Security.Principal.Windows (>= 5.0) - restriction: || (&& (== net6.0) (>= net461)) (== netstandard2.0)
|
||||
System.Security.Cryptography.Cng (5.0)
|
||||
System.Formats.Asn1 (>= 5.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= netcoreapp3.0))
|
||||
System.Security.Cryptography.Pkcs (6.0.1)
|
||||
System.Buffers (>= 4.5.1) - restriction: || (&& (== net6.0) (< netstandard2.1)) (== netstandard2.0)
|
||||
System.Formats.Asn1 (>= 6.0)
|
||||
System.Memory (>= 4.5.4) - restriction: || (&& (== net6.0) (< netstandard2.1)) (== netstandard2.0)
|
||||
System.Security.Cryptography.Cng (>= 5.0) - restriction: || (&& (== net6.0) (< netcoreapp3.1)) (&& (== net6.0) (< netstandard2.1)) (== netstandard2.0)
|
||||
System.Security.Cryptography.ProtectedData (6.0)
|
||||
System.Security.Cryptography.Xml (6.0)
|
||||
System.Memory (>= 4.5.4) - restriction: == netstandard2.0
|
||||
System.Security.AccessControl (>= 6.0)
|
||||
System.Security.Cryptography.Pkcs (>= 6.0)
|
||||
System.Security.Permissions (6.0)
|
||||
System.Security.AccessControl (>= 6.0)
|
||||
System.Windows.Extensions (>= 6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= netcoreapp3.1))
|
||||
System.Security.Principal.Windows (5.0)
|
||||
System.Text.Encoding.CodePages (6.0)
|
||||
System.Runtime.CompilerServices.Unsafe (>= 6.0)
|
||||
System.Text.Encodings.Web (6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
|
||||
System.Runtime.CompilerServices.Unsafe (>= 6.0)
|
||||
System.Text.Json (6.0.5) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
|
||||
System.Runtime.CompilerServices.Unsafe (>= 6.0)
|
||||
System.Text.Encodings.Web (>= 6.0)
|
||||
System.Threading.Tasks.Dataflow (6.0)
|
||||
System.Threading.Tasks.Extensions (4.5.4) - restriction: || (&& (== net6.0) (>= net472)) (&& (== net6.0) (< netcoreapp3.1)) (&& (== net6.0) (>= uap10.1)) (== netstandard2.0)
|
||||
System.Runtime.CompilerServices.Unsafe (>= 4.5.3) - restriction: || (&& (== net6.0) (>= net461)) (&& (== net6.0) (< netcoreapp2.1)) (&& (== net6.0) (< netstandard1.0)) (&& (== net6.0) (< netstandard2.0)) (&& (== net6.0) (>= wp8)) (== netstandard2.0)
|
||||
System.Windows.Extensions (6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= netcoreapp3.1))
|
||||
System.Drawing.Common (>= 6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= netcoreapp3.1))
|
7
fake.sh
7
fake.sh
@ -1,7 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -eu
|
||||
set -o pipefail
|
||||
|
||||
dotnet tool restore
|
||||
dotnet fake "$@"
|
@ -1,10 +1,9 @@
|
||||
<Project>
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net6.0</TargetFramework>
|
||||
<TargetFrameworks>net6.0;net7.0</TargetFrameworks>
|
||||
<DebugType>embedded</DebugType>
|
||||
<AssemblyVersion>2.0.0.0</AssemblyVersion>
|
||||
<FileVersion>2.0.0.0</FileVersion>
|
||||
<Version>2.0.0</Version>
|
||||
<VersionSuffix>rc2</VersionSuffix>
|
||||
</PropertyGroup>
|
||||
</Project>
|
||||
|
@ -165,6 +165,7 @@ module Json =
|
||||
Converters = ser.Converters,
|
||||
DefaultValueHandling = ser.DefaultValueHandling,
|
||||
DateFormatHandling = ser.DateFormatHandling,
|
||||
DateParseHandling = ser.DateParseHandling,
|
||||
MetadataPropertyHandling = ser.MetadataPropertyHandling,
|
||||
MissingMemberHandling = ser.MissingMemberHandling,
|
||||
NullValueHandling = ser.NullValueHandling,
|
||||
|
@ -5,19 +5,16 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Data.Sqlite" Version="6.0.8" />
|
||||
<PackageReference Include="Microsoft.Extensions.Caching.Abstractions" Version="6.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="6.0.0" />
|
||||
<PackageReference Include="BitBadger.Npgsql.FSharp.Documents" Version="1.0.0-beta2" />
|
||||
<PackageReference Include="Microsoft.Data.Sqlite" Version="7.0.3" />
|
||||
<PackageReference Include="Microsoft.Extensions.Caching.Abstractions" Version="7.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="7.0.0" />
|
||||
<PackageReference Include="Microsoft.FSharpLu.Json" Version="0.11.7" />
|
||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.1" />
|
||||
<PackageReference Include="NodaTime" Version="3.1.2" />
|
||||
<PackageReference Include="NodaTime.Serialization.JsonNet" Version="3.0.0" />
|
||||
<PackageReference Include="Npgsql" Version="6.0.6" />
|
||||
<PackageReference Include="Npgsql.FSharp" Version="5.3.0" />
|
||||
<PackageReference Include="Npgsql.NodaTime" Version="6.0.6" />
|
||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.2" />
|
||||
<PackageReference Include="NodaTime.Serialization.JsonNet" Version="3.0.1" />
|
||||
<PackageReference Include="Npgsql.NodaTime" Version="7.0.2" />
|
||||
<PackageReference Include="RethinkDb.Driver" Version="2.3.150" />
|
||||
<PackageReference Include="RethinkDb.Driver.FSharp" Version="0.9.0-beta-07" />
|
||||
<PackageReference Update="FSharp.Core" Version="6.0.5" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
@ -45,7 +42,7 @@
|
||||
<Compile Include="Postgres\PostgresUploadData.fs" />
|
||||
<Compile Include="Postgres\PostgresWebLogData.fs" />
|
||||
<Compile Include="Postgres\PostgresWebLogUserData.fs" />
|
||||
<Compile Include="PostgresData.fs" />
|
||||
<Compile Include="PostgresData.fs" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
@ -2,6 +2,7 @@ namespace MyWebLog.Data.Postgres
|
||||
|
||||
open System.Threading
|
||||
open System.Threading.Tasks
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open Microsoft.Extensions.Caching.Distributed
|
||||
open NodaTime
|
||||
open Npgsql.FSharp
|
||||
@ -40,32 +41,26 @@ module private Helpers =
|
||||
|
||||
|
||||
/// A distributed cache implementation in PostgreSQL used to handle sessions for myWebLog
|
||||
type DistributedCache (connStr : string) =
|
||||
type DistributedCache () =
|
||||
|
||||
// ~~~ INITIALIZATION ~~~
|
||||
|
||||
do
|
||||
task {
|
||||
let! exists =
|
||||
Sql.connect connStr
|
||||
|> Sql.query $"
|
||||
SELECT EXISTS
|
||||
Custom.scalar
|
||||
$"SELECT EXISTS
|
||||
(SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = 'session')
|
||||
AS {existsName}"
|
||||
|> Sql.executeRowAsync Map.toExists
|
||||
AS {existsName}" [] Map.toExists
|
||||
if not exists then
|
||||
let! _ =
|
||||
Sql.connect connStr
|
||||
|> Sql.query
|
||||
do! Custom.nonQuery
|
||||
"CREATE TABLE session (
|
||||
id TEXT NOT NULL PRIMARY KEY,
|
||||
payload BYTEA NOT NULL,
|
||||
expire_at TIMESTAMPTZ NOT NULL,
|
||||
sliding_expiration INTERVAL,
|
||||
absolute_expiration TIMESTAMPTZ);
|
||||
CREATE INDEX idx_session_expiration ON session (expire_at)"
|
||||
|> Sql.executeNonQueryAsync
|
||||
()
|
||||
CREATE INDEX idx_session_expiration ON session (expire_at)" []
|
||||
} |> sync
|
||||
|
||||
// ~~~ SUPPORT FUNCTIONS ~~~
|
||||
@ -74,16 +69,13 @@ type DistributedCache (connStr : string) =
|
||||
let getEntry key = backgroundTask {
|
||||
let idParam = "@id", Sql.string key
|
||||
let! tryEntry =
|
||||
Sql.connect connStr
|
||||
|> Sql.query "SELECT * FROM session WHERE id = @id"
|
||||
|> Sql.parameters [ idParam ]
|
||||
|> Sql.executeAsync (fun row ->
|
||||
{ Id = row.string "id"
|
||||
Payload = row.bytea "payload"
|
||||
ExpireAt = row.fieldValue<Instant> "expire_at"
|
||||
SlidingExpiration = row.fieldValueOrNone<Duration> "sliding_expiration"
|
||||
AbsoluteExpiration = row.fieldValueOrNone<Instant> "absolute_expiration" })
|
||||
|> tryHead
|
||||
Custom.single "SELECT * FROM session WHERE id = @id" [ idParam ]
|
||||
(fun row ->
|
||||
{ Id = row.string "id"
|
||||
Payload = row.bytea "payload"
|
||||
ExpireAt = row.fieldValue<Instant> "expire_at"
|
||||
SlidingExpiration = row.fieldValueOrNone<Duration> "sliding_expiration"
|
||||
AbsoluteExpiration = row.fieldValueOrNone<Instant> "absolute_expiration" })
|
||||
match tryEntry with
|
||||
| Some entry ->
|
||||
let now = getNow ()
|
||||
@ -96,11 +88,8 @@ type DistributedCache (connStr : string) =
|
||||
true, { entry with ExpireAt = absExp }
|
||||
else true, { entry with ExpireAt = now.Plus slideExp }
|
||||
if needsRefresh then
|
||||
let! _ =
|
||||
Sql.connect connStr
|
||||
|> Sql.query "UPDATE session SET expire_at = @expireAt WHERE id = @id"
|
||||
|> Sql.parameters [ expireParam item.ExpireAt; idParam ]
|
||||
|> Sql.executeNonQueryAsync
|
||||
do! Custom.nonQuery "UPDATE session SET expire_at = @expireAt WHERE id = @id"
|
||||
[ expireParam item.ExpireAt; idParam ]
|
||||
()
|
||||
return if item.ExpireAt > now then Some entry else None
|
||||
| None -> return None
|
||||
@ -113,26 +102,16 @@ type DistributedCache (connStr : string) =
|
||||
let purge () = backgroundTask {
|
||||
let now = getNow ()
|
||||
if lastPurge.Plus (Duration.FromMinutes 30L) < now then
|
||||
let! _ =
|
||||
Sql.connect connStr
|
||||
|> Sql.query "DELETE FROM session WHERE expire_at < @expireAt"
|
||||
|> Sql.parameters [ expireParam now ]
|
||||
|> Sql.executeNonQueryAsync
|
||||
do! Custom.nonQuery "DELETE FROM session WHERE expire_at < @expireAt" [ expireParam now ]
|
||||
lastPurge <- now
|
||||
}
|
||||
|
||||
/// Remove a cache entry
|
||||
let removeEntry key = backgroundTask {
|
||||
let! _ =
|
||||
Sql.connect connStr
|
||||
|> Sql.query "DELETE FROM session WHERE id = @id"
|
||||
|> Sql.parameters [ "@id", Sql.string key ]
|
||||
|> Sql.executeNonQueryAsync
|
||||
()
|
||||
}
|
||||
let removeEntry key =
|
||||
Delete.byId "session" key
|
||||
|
||||
/// Save an entry
|
||||
let saveEntry (opts : DistributedCacheEntryOptions) key payload = backgroundTask {
|
||||
let saveEntry (opts : DistributedCacheEntryOptions) key payload =
|
||||
let now = getNow ()
|
||||
let expireAt, slideExp, absExp =
|
||||
if opts.SlidingExpiration.HasValue then
|
||||
@ -148,27 +127,21 @@ type DistributedCache (connStr : string) =
|
||||
// Default to 1 hour sliding expiration
|
||||
let slide = Duration.FromHours 1
|
||||
now.Plus slide, Some slide, None
|
||||
let! _ =
|
||||
Sql.connect connStr
|
||||
|> Sql.query
|
||||
"INSERT INTO session (
|
||||
id, payload, expire_at, sliding_expiration, absolute_expiration
|
||||
) VALUES (
|
||||
@id, @payload, @expireAt, @slideExp, @absExp
|
||||
) ON CONFLICT (id) DO UPDATE
|
||||
SET payload = EXCLUDED.payload,
|
||||
expire_at = EXCLUDED.expire_at,
|
||||
sliding_expiration = EXCLUDED.sliding_expiration,
|
||||
absolute_expiration = EXCLUDED.absolute_expiration"
|
||||
|> Sql.parameters
|
||||
[ "@id", Sql.string key
|
||||
"@payload", Sql.bytea payload
|
||||
expireParam expireAt
|
||||
optParam "slideExp" slideExp
|
||||
optParam "absExp" absExp ]
|
||||
|> Sql.executeNonQueryAsync
|
||||
()
|
||||
}
|
||||
Custom.nonQuery
|
||||
"INSERT INTO session (
|
||||
id, payload, expire_at, sliding_expiration, absolute_expiration
|
||||
) VALUES (
|
||||
@id, @payload, @expireAt, @slideExp, @absExp
|
||||
) ON CONFLICT (id) DO UPDATE
|
||||
SET payload = EXCLUDED.payload,
|
||||
expire_at = EXCLUDED.expire_at,
|
||||
sliding_expiration = EXCLUDED.sliding_expiration,
|
||||
absolute_expiration = EXCLUDED.absolute_expiration"
|
||||
[ "@id", Sql.string key
|
||||
"@payload", Sql.bytea payload
|
||||
expireParam expireAt
|
||||
optParam "slideExp" slideExp
|
||||
optParam "absExp" absExp ]
|
||||
|
||||
// ~~~ IMPLEMENTATION FUNCTIONS ~~~
|
||||
|
||||
@ -200,11 +173,11 @@ type DistributedCache (connStr : string) =
|
||||
}
|
||||
|
||||
interface IDistributedCache with
|
||||
member this.Get key = get key CancellationToken.None |> sync
|
||||
member this.GetAsync (key, token) = get key token
|
||||
member this.Refresh key = refresh key CancellationToken.None |> sync
|
||||
member this.RefreshAsync (key, token) = refresh key token
|
||||
member this.Remove key = remove key CancellationToken.None |> sync
|
||||
member this.RemoveAsync (key, token) = remove key token
|
||||
member this.Set (key, value, options) = set key value options CancellationToken.None |> sync
|
||||
member this.SetAsync (key, value, options, token) = set key value options token
|
||||
member _.Get key = get key CancellationToken.None |> sync
|
||||
member _.GetAsync (key, token) = get key token
|
||||
member _.Refresh key = refresh key CancellationToken.None |> sync
|
||||
member _.RefreshAsync (key, token) = refresh key token
|
||||
member _.Remove key = remove key CancellationToken.None |> sync
|
||||
member _.RemoveAsync (key, token) = remove key token
|
||||
member _.Set (key, value, options) = set key value options CancellationToken.None |> sync
|
||||
member _.SetAsync (key, value, options, token) = set key value options token
|
||||
|
@ -1,34 +1,30 @@
|
||||
namespace MyWebLog.Data.Postgres
|
||||
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open Microsoft.Extensions.Logging
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
open Npgsql
|
||||
open Npgsql.FSharp
|
||||
|
||||
/// PostgreSQL myWebLog category data implementation
|
||||
type PostgresCategoryData (conn : NpgsqlConnection) =
|
||||
type PostgresCategoryData (log : ILogger) =
|
||||
|
||||
/// Count all categories for the given web log
|
||||
let countAll webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"SELECT COUNT(id) AS {countName} FROM category WHERE web_log_id = @webLogId"
|
||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||
|> Sql.executeRowAsync Map.toCount
|
||||
log.LogTrace "Category.countAll"
|
||||
Count.byContains Table.Category (webLogDoc webLogId)
|
||||
|
||||
/// Count all top-level categories for the given web log
|
||||
let countTopLevel webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"SELECT COUNT(id) AS {countName} FROM category WHERE web_log_id = @webLogId AND parent_id IS NULL"
|
||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||
|> Sql.executeRowAsync Map.toCount
|
||||
log.LogTrace "Category.countTopLevel"
|
||||
Count.byContains Table.Category {| webLogDoc webLogId with ParentId = None |}
|
||||
|
||||
/// Retrieve all categories for the given web log in a DotLiquid-friendly format
|
||||
let findAllForView webLogId = backgroundTask {
|
||||
log.LogTrace "Category.findAllForView"
|
||||
let! cats =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM category WHERE web_log_id = @webLogId ORDER BY LOWER(name)"
|
||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||
|> Sql.executeAsync Map.toCategory
|
||||
Custom.list $"{selectWithCriteria Table.Category} ORDER BY LOWER(data ->> '{nameof Category.empty.Name}')"
|
||||
[ webLogContains webLogId ] fromData<Category>
|
||||
let ordered = Utils.orderByHierarchy cats None None []
|
||||
let counts =
|
||||
ordered
|
||||
@ -40,18 +36,17 @@ type PostgresCategoryData (conn : NpgsqlConnection) =
|
||||
|> Seq.map (fun cat -> cat.Id)
|
||||
|> Seq.append (Seq.singleton it.Id)
|
||||
|> List.ofSeq
|
||||
|> inClause "AND pc.category_id" "id" id
|
||||
|> arrayContains (nameof Post.empty.CategoryIds) id
|
||||
let postCount =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"
|
||||
SELECT COUNT(DISTINCT p.id) AS {countName}
|
||||
FROM post p
|
||||
INNER JOIN post_category pc ON pc.post_id = p.id
|
||||
WHERE p.web_log_id = @webLogId
|
||||
AND p.status = 'Published'
|
||||
{catIdSql}"
|
||||
|> Sql.parameters (webLogIdParam webLogId :: catIdParams)
|
||||
|> Sql.executeRowAsync Map.toCount
|
||||
Custom.scalar
|
||||
$"""SELECT COUNT(DISTINCT id) AS {countName}
|
||||
FROM {Table.Post}
|
||||
WHERE {Query.whereDataContains "@criteria"}
|
||||
AND {catIdSql}"""
|
||||
[ "@criteria",
|
||||
Query.jsonbDocParam {| webLogDoc webLogId with Status = PostStatus.toString Published |}
|
||||
catIdParams
|
||||
] Map.toCount
|
||||
|> Async.AwaitTask
|
||||
|> Async.RunSynchronously
|
||||
it.Id, postCount)
|
||||
@ -69,93 +64,75 @@ type PostgresCategoryData (conn : NpgsqlConnection) =
|
||||
}
|
||||
/// Find a category by its ID for the given web log
|
||||
let findById catId webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM category WHERE id = @id AND web_log_id = @webLogId"
|
||||
|> Sql.parameters [ "@id", Sql.string (CategoryId.toString catId); webLogIdParam webLogId ]
|
||||
|> Sql.executeAsync Map.toCategory
|
||||
|> tryHead
|
||||
log.LogTrace "Category.findById"
|
||||
Document.findByIdAndWebLog<CategoryId, Category> Table.Category catId CategoryId.toString webLogId
|
||||
|
||||
/// Find all categories for the given web log
|
||||
let findByWebLog webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM category WHERE web_log_id = @webLogId"
|
||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||
|> Sql.executeAsync Map.toCategory
|
||||
log.LogTrace "Category.findByWebLog"
|
||||
Document.findByWebLog<Category> Table.Category webLogId
|
||||
|
||||
/// Create parameters for a category insert / update
|
||||
let catParameters (cat : Category) =
|
||||
Query.docParameters (CategoryId.toString cat.Id) cat
|
||||
|
||||
/// Delete a category
|
||||
let delete catId webLogId = backgroundTask {
|
||||
log.LogTrace "Category.delete"
|
||||
match! findById catId webLogId with
|
||||
| Some cat ->
|
||||
// Reassign any children to the category's parent category
|
||||
let parentParam = "@parentId", Sql.string (CategoryId.toString catId)
|
||||
let! hasChildren =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"SELECT EXISTS (SELECT 1 FROM category WHERE parent_id = @parentId) AS {existsName}"
|
||||
|> Sql.parameters [ parentParam ]
|
||||
|> Sql.executeRowAsync Map.toExists
|
||||
let! children = Find.byContains<Category> Table.Category {| ParentId = CategoryId.toString catId |}
|
||||
let hasChildren = not (List.isEmpty children)
|
||||
if hasChildren then
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "UPDATE category SET parent_id = @newParentId WHERE parent_id = @parentId"
|
||||
|> Sql.parameters
|
||||
[ parentParam
|
||||
"@newParentId", Sql.stringOrNone (cat.ParentId |> Option.map CategoryId.toString) ]
|
||||
|> Sql.executeNonQueryAsync
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.executeTransactionAsync [
|
||||
Query.Update.partialById Table.Category,
|
||||
children |> List.map (fun child -> [
|
||||
"@id", Sql.string (CategoryId.toString child.Id)
|
||||
"@data", Query.jsonbDocParam {| ParentId = cat.ParentId |}
|
||||
])
|
||||
]
|
||||
()
|
||||
// Delete the category off all posts where it is assigned, and the category itself
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query
|
||||
"DELETE FROM post_category
|
||||
WHERE category_id = @id
|
||||
AND post_id IN (SELECT id FROM post WHERE web_log_id = @webLogId);
|
||||
DELETE FROM category WHERE id = @id"
|
||||
|> Sql.parameters [ "@id", Sql.string (CategoryId.toString catId); webLogIdParam webLogId ]
|
||||
|> Sql.executeNonQueryAsync
|
||||
// Delete the category off all posts where it is assigned
|
||||
let! posts =
|
||||
Custom.list $"SELECT data FROM {Table.Post} WHERE data -> '{nameof Post.empty.CategoryIds}' @> @id"
|
||||
[ "@id", Query.jsonbDocParam [| CategoryId.toString catId |] ] fromData<Post>
|
||||
if not (List.isEmpty posts) then
|
||||
let! _ =
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.executeTransactionAsync [
|
||||
Query.Update.partialById Table.Post,
|
||||
posts |> List.map (fun post -> [
|
||||
"@id", Sql.string (PostId.toString post.Id)
|
||||
"@data", Query.jsonbDocParam
|
||||
{| CategoryIds = post.CategoryIds |> List.filter (fun cat -> cat <> catId) |}
|
||||
])
|
||||
]
|
||||
()
|
||||
// Delete the category itself
|
||||
do! Delete.byId Table.Category (CategoryId.toString catId)
|
||||
return if hasChildren then ReassignedChildCategories else CategoryDeleted
|
||||
| None -> return CategoryNotFound
|
||||
}
|
||||
|
||||
/// The INSERT statement for a category
|
||||
let catInsert =
|
||||
"INSERT INTO category (
|
||||
id, web_log_id, name, slug, description, parent_id
|
||||
) VALUES (
|
||||
@id, @webLogId, @name, @slug, @description, @parentId
|
||||
)"
|
||||
|
||||
/// Create parameters for a category insert / update
|
||||
let catParameters (cat : Category) = [
|
||||
webLogIdParam cat.WebLogId
|
||||
"@id", Sql.string (CategoryId.toString cat.Id)
|
||||
"@name", Sql.string cat.Name
|
||||
"@slug", Sql.string cat.Slug
|
||||
"@description", Sql.stringOrNone cat.Description
|
||||
"@parentId", Sql.stringOrNone (cat.ParentId |> Option.map CategoryId.toString)
|
||||
]
|
||||
|
||||
/// Save a category
|
||||
let save cat = backgroundTask {
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"
|
||||
{catInsert} ON CONFLICT (id) DO UPDATE
|
||||
SET name = EXCLUDED.name,
|
||||
slug = EXCLUDED.slug,
|
||||
description = EXCLUDED.description,
|
||||
parent_id = EXCLUDED.parent_id"
|
||||
|> Sql.parameters (catParameters cat)
|
||||
|> Sql.executeNonQueryAsync
|
||||
()
|
||||
let save (cat : Category) = backgroundTask {
|
||||
log.LogTrace "Category.save"
|
||||
do! save Table.Category (CategoryId.toString cat.Id) cat
|
||||
}
|
||||
|
||||
/// Restore categories from a backup
|
||||
let restore cats = backgroundTask {
|
||||
log.LogTrace "Category.restore"
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.executeTransactionAsync [
|
||||
catInsert, cats |> List.map catParameters
|
||||
Query.insert Table.Category, cats |> List.map catParameters
|
||||
]
|
||||
()
|
||||
}
|
||||
|
@ -2,11 +2,68 @@
|
||||
[<AutoOpen>]
|
||||
module MyWebLog.Data.Postgres.PostgresHelpers
|
||||
|
||||
/// The table names used in the PostgreSQL implementation
|
||||
[<RequireQualifiedAccess>]
|
||||
module Table =
|
||||
|
||||
/// Categories
|
||||
[<Literal>]
|
||||
let Category = "category"
|
||||
|
||||
/// Database Version
|
||||
[<Literal>]
|
||||
let DbVersion = "db_version"
|
||||
|
||||
/// Pages
|
||||
[<Literal>]
|
||||
let Page = "page"
|
||||
|
||||
/// Page Revisions
|
||||
[<Literal>]
|
||||
let PageRevision = "page_revision"
|
||||
|
||||
/// Posts
|
||||
[<Literal>]
|
||||
let Post = "post"
|
||||
|
||||
/// Post Comments
|
||||
[<Literal>]
|
||||
let PostComment = "post_comment"
|
||||
|
||||
/// Post Revisions
|
||||
[<Literal>]
|
||||
let PostRevision = "post_revision"
|
||||
|
||||
/// Tag/URL Mappings
|
||||
[<Literal>]
|
||||
let TagMap = "tag_map"
|
||||
|
||||
/// Themes
|
||||
[<Literal>]
|
||||
let Theme = "theme"
|
||||
|
||||
/// Theme Assets
|
||||
[<Literal>]
|
||||
let ThemeAsset = "theme_asset"
|
||||
|
||||
/// Uploads
|
||||
[<Literal>]
|
||||
let Upload = "upload"
|
||||
|
||||
/// Web Logs
|
||||
[<Literal>]
|
||||
let WebLog = "web_log"
|
||||
|
||||
/// Users
|
||||
[<Literal>]
|
||||
let WebLogUser = "web_log_user"
|
||||
|
||||
|
||||
open System
|
||||
open System.Threading.Tasks
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
open Newtonsoft.Json
|
||||
open NodaTime
|
||||
open Npgsql
|
||||
open Npgsql.FSharp
|
||||
@ -15,12 +72,24 @@ open Npgsql.FSharp
|
||||
let webLogIdParam webLogId =
|
||||
"@webLogId", Sql.string (WebLogId.toString webLogId)
|
||||
|
||||
/// Create an anonymous record with the given web log ID
|
||||
let webLogDoc (webLogId : WebLogId) =
|
||||
{| WebLogId = webLogId |}
|
||||
|
||||
/// Create a parameter for a web log document-contains query
|
||||
let webLogContains webLogId =
|
||||
"@criteria", Query.jsonbDocParam (webLogDoc webLogId)
|
||||
|
||||
/// The name of the field to select to be able to use Map.toCount
|
||||
let countName = "the_count"
|
||||
|
||||
/// The name of the field to select to be able to use Map.toExists
|
||||
let existsName = "does_exist"
|
||||
|
||||
/// A SQL string to select data from a table with the given JSON document contains criteria
|
||||
let selectWithCriteria tableName =
|
||||
$"""{Query.selectFromTable tableName} WHERE {Query.whereDataContains "@criteria"}"""
|
||||
|
||||
/// Create the SQL and parameters for an IN clause
|
||||
let inClause<'T> colNameAndPrefix paramName (valueFunc: 'T -> string) (items : 'T list) =
|
||||
if List.isEmpty items then "", []
|
||||
@ -37,22 +106,11 @@ let inClause<'T> colNameAndPrefix paramName (valueFunc: 'T -> string) (items : '
|
||||
|> Seq.head)
|
||||
|> function sql, ps -> $"{sql})", ps
|
||||
|
||||
/// Create the SQL and parameters for the array equivalent of an IN clause
|
||||
let arrayInClause<'T> name (valueFunc : 'T -> string) (items : 'T list) =
|
||||
if List.isEmpty items then "TRUE = FALSE", []
|
||||
else
|
||||
let mutable idx = 0
|
||||
items
|
||||
|> List.skip 1
|
||||
|> List.fold (fun (itemS, itemP) it ->
|
||||
idx <- idx + 1
|
||||
$"{itemS} OR %s{name} && ARRAY[@{name}{idx}]",
|
||||
($"@{name}{idx}", Sql.string (valueFunc it)) :: itemP)
|
||||
(Seq.ofList items
|
||||
|> Seq.map (fun it ->
|
||||
$"{name} && ARRAY[@{name}0]", [ $"@{name}0", Sql.string (valueFunc it) ])
|
||||
|> Seq.head)
|
||||
|
||||
/// Create the SQL and parameters for match-any array query
|
||||
let arrayContains<'T> name (valueFunc : 'T -> string) (items : 'T list) =
|
||||
$"data['{name}'] ?| @{name}Values",
|
||||
($"@{name}Values", Sql.stringArray (items |> List.map valueFunc |> Array.ofList))
|
||||
|
||||
/// Get the first result of the given query
|
||||
let tryHead<'T> (query : Task<'T list>) = backgroundTask {
|
||||
let! results = query
|
||||
@ -71,113 +129,24 @@ let optParam<'T> name (it : 'T option) =
|
||||
/// Mapping functions for SQL queries
|
||||
module Map =
|
||||
|
||||
/// Map an id field to a category ID
|
||||
let toCategoryId (row : RowReader) =
|
||||
CategoryId (row.string "id")
|
||||
|
||||
/// Create a category from the current row
|
||||
let toCategory (row : RowReader) : Category =
|
||||
{ Id = toCategoryId row
|
||||
WebLogId = row.string "web_log_id" |> WebLogId
|
||||
Name = row.string "name"
|
||||
Slug = row.string "slug"
|
||||
Description = row.stringOrNone "description"
|
||||
ParentId = row.stringOrNone "parent_id" |> Option.map CategoryId
|
||||
}
|
||||
|
||||
/// Get a count from a row
|
||||
let toCount (row : RowReader) =
|
||||
row.int countName
|
||||
|
||||
/// Create a custom feed from the current row
|
||||
let toCustomFeed (ser : JsonSerializer) (row : RowReader) : CustomFeed =
|
||||
{ Id = row.string "id" |> CustomFeedId
|
||||
Source = row.string "source" |> CustomFeedSource.parse
|
||||
Path = row.string "path" |> Permalink
|
||||
Podcast = row.stringOrNone "podcast" |> Option.map (Utils.deserialize ser)
|
||||
}
|
||||
|
||||
/// Get a true/false value as to whether an item exists
|
||||
let toExists (row : RowReader) =
|
||||
row.bool existsName
|
||||
|
||||
/// Create a meta item from the current row
|
||||
let toMetaItem (row : RowReader) : MetaItem =
|
||||
{ Name = row.string "name"
|
||||
Value = row.string "value"
|
||||
}
|
||||
|
||||
/// Create a permalink from the current row
|
||||
let toPermalink (row : RowReader) =
|
||||
Permalink (row.string "permalink")
|
||||
|
||||
/// Create a page from the current row
|
||||
let toPage (ser : JsonSerializer) (row : RowReader) : Page =
|
||||
{ Page.empty with
|
||||
Id = row.string "id" |> PageId
|
||||
WebLogId = row.string "web_log_id" |> WebLogId
|
||||
AuthorId = row.string "author_id" |> WebLogUserId
|
||||
Title = row.string "title"
|
||||
Permalink = toPermalink row
|
||||
PriorPermalinks = row.stringArray "prior_permalinks" |> Array.map Permalink |> List.ofArray
|
||||
PublishedOn = row.fieldValue<Instant> "published_on"
|
||||
UpdatedOn = row.fieldValue<Instant> "updated_on"
|
||||
IsInPageList = row.bool "is_in_page_list"
|
||||
Template = row.stringOrNone "template"
|
||||
Text = row.string "page_text"
|
||||
Metadata = row.stringOrNone "meta_items"
|
||||
|> Option.map (Utils.deserialize ser)
|
||||
|> Option.defaultValue []
|
||||
}
|
||||
|
||||
/// Create a post from the current row
|
||||
let toPost (ser : JsonSerializer) (row : RowReader) : Post =
|
||||
{ Post.empty with
|
||||
Id = row.string "id" |> PostId
|
||||
WebLogId = row.string "web_log_id" |> WebLogId
|
||||
AuthorId = row.string "author_id" |> WebLogUserId
|
||||
Status = row.string "status" |> PostStatus.parse
|
||||
Title = row.string "title"
|
||||
Permalink = toPermalink row
|
||||
PriorPermalinks = row.stringArray "prior_permalinks" |> Array.map Permalink |> List.ofArray
|
||||
PublishedOn = row.fieldValueOrNone<Instant> "published_on"
|
||||
UpdatedOn = row.fieldValue<Instant> "updated_on"
|
||||
Template = row.stringOrNone "template"
|
||||
Text = row.string "post_text"
|
||||
Episode = row.stringOrNone "episode" |> Option.map (Utils.deserialize ser)
|
||||
CategoryIds = row.stringArrayOrNone "category_ids"
|
||||
|> Option.map (Array.map CategoryId >> List.ofArray)
|
||||
|> Option.defaultValue []
|
||||
Tags = row.stringArrayOrNone "tags"
|
||||
|> Option.map List.ofArray
|
||||
|> Option.defaultValue []
|
||||
Metadata = row.stringOrNone "meta_items"
|
||||
|> Option.map (Utils.deserialize ser)
|
||||
|> Option.defaultValue []
|
||||
}
|
||||
|
||||
/// Create a revision from the current row
|
||||
let toRevision (row : RowReader) : Revision =
|
||||
{ AsOf = row.fieldValue<Instant> "as_of"
|
||||
Text = row.string "revision_text" |> MarkupText.parse
|
||||
}
|
||||
|
||||
/// Create a tag mapping from the current row
|
||||
let toTagMap (row : RowReader) : TagMap =
|
||||
{ Id = row.string "id" |> TagMapId
|
||||
WebLogId = row.string "web_log_id" |> WebLogId
|
||||
Tag = row.string "tag"
|
||||
UrlValue = row.string "url_value"
|
||||
}
|
||||
|
||||
/// Create a theme from the current row (excludes templates)
|
||||
let toTheme (row : RowReader) : Theme =
|
||||
{ Theme.empty with
|
||||
Id = row.string "id" |> ThemeId
|
||||
Name = row.string "name"
|
||||
Version = row.string "version"
|
||||
}
|
||||
|
||||
/// Create a theme asset from the current row
|
||||
let toThemeAsset includeData (row : RowReader) : ThemeAsset =
|
||||
{ Id = ThemeAssetId (ThemeId (row.string "theme_id"), row.string "path")
|
||||
@ -185,12 +154,6 @@ module Map =
|
||||
Data = if includeData then row.bytea "data" else [||]
|
||||
}
|
||||
|
||||
/// Create a theme template from the current row
|
||||
let toThemeTemplate includeText (row : RowReader) : ThemeTemplate =
|
||||
{ Name = row.string "name"
|
||||
Text = if includeText then row.string "template" else ""
|
||||
}
|
||||
|
||||
/// Create an uploaded file from the current row
|
||||
let toUpload includeData (row : RowReader) : Upload =
|
||||
{ Id = row.string "id" |> UploadId
|
||||
@ -199,42 +162,75 @@ module Map =
|
||||
UpdatedOn = row.fieldValue<Instant> "updated_on"
|
||||
Data = if includeData then row.bytea "data" else [||]
|
||||
}
|
||||
|
||||
/// Document manipulation functions
|
||||
module Document =
|
||||
|
||||
/// Create a web log from the current row
|
||||
let toWebLog (row : RowReader) : WebLog =
|
||||
{ Id = row.string "id" |> WebLogId
|
||||
Name = row.string "name"
|
||||
Slug = row.string "slug"
|
||||
Subtitle = row.stringOrNone "subtitle"
|
||||
DefaultPage = row.string "default_page"
|
||||
PostsPerPage = row.int "posts_per_page"
|
||||
ThemeId = row.string "theme_id" |> ThemeId
|
||||
UrlBase = row.string "url_base"
|
||||
TimeZone = row.string "time_zone"
|
||||
AutoHtmx = row.bool "auto_htmx"
|
||||
Uploads = row.string "uploads" |> UploadDestination.parse
|
||||
Rss = {
|
||||
IsFeedEnabled = row.bool "is_feed_enabled"
|
||||
FeedName = row.string "feed_name"
|
||||
ItemsInFeed = row.intOrNone "items_in_feed"
|
||||
IsCategoryEnabled = row.bool "is_category_enabled"
|
||||
IsTagEnabled = row.bool "is_tag_enabled"
|
||||
Copyright = row.stringOrNone "copyright"
|
||||
CustomFeeds = []
|
||||
}
|
||||
}
|
||||
/// Determine whether a document exists with the given key for the given web log
|
||||
let existsByWebLog<'TKey> table (key : 'TKey) (keyFunc : 'TKey -> string) webLogId =
|
||||
Custom.scalar
|
||||
$""" SELECT EXISTS (
|
||||
SELECT 1 FROM %s{table} WHERE id = @id AND {Query.whereDataContains "@criteria"}
|
||||
) AS {existsName}"""
|
||||
[ "@id", Sql.string (keyFunc key); webLogContains webLogId ] Map.toExists
|
||||
|
||||
/// Create a web log user from the current row
|
||||
let toWebLogUser (row : RowReader) : WebLogUser =
|
||||
{ Id = row.string "id" |> WebLogUserId
|
||||
WebLogId = row.string "web_log_id" |> WebLogId
|
||||
Email = row.string "email"
|
||||
FirstName = row.string "first_name"
|
||||
LastName = row.string "last_name"
|
||||
PreferredName = row.string "preferred_name"
|
||||
PasswordHash = row.string "password_hash"
|
||||
Url = row.stringOrNone "url"
|
||||
AccessLevel = row.string "access_level" |> AccessLevel.parse
|
||||
CreatedOn = row.fieldValue<Instant> "created_on"
|
||||
LastSeenOn = row.fieldValueOrNone<Instant> "last_seen_on"
|
||||
}
|
||||
/// Find a document by its ID for the given web log
|
||||
let findByIdAndWebLog<'TKey, 'TDoc> table (key : 'TKey) (keyFunc : 'TKey -> string) webLogId =
|
||||
Custom.single $"""{Query.selectFromTable table} WHERE id = @id AND {Query.whereDataContains "@criteria"}"""
|
||||
[ "@id", Sql.string (keyFunc key); webLogContains webLogId ] fromData<'TDoc>
|
||||
|
||||
/// Find a document by its ID for the given web log
|
||||
let findByWebLog<'TDoc> table webLogId : Task<'TDoc list> =
|
||||
Find.byContains table (webLogDoc webLogId)
|
||||
|
||||
|
||||
/// Functions to support revisions
|
||||
module Revisions =
|
||||
|
||||
/// Find all revisions for the given entity
|
||||
let findByEntityId<'TKey> revTable entityTable (key : 'TKey) (keyFunc : 'TKey -> string) =
|
||||
Custom.list $"SELECT as_of, revision_text FROM %s{revTable} WHERE %s{entityTable}_id = @id ORDER BY as_of DESC"
|
||||
[ "@id", Sql.string (keyFunc key) ] Map.toRevision
|
||||
|
||||
/// Find all revisions for all posts for the given web log
|
||||
let findByWebLog<'TKey> revTable entityTable (keyFunc : string -> 'TKey) webLogId =
|
||||
Custom.list
|
||||
$"""SELECT pr.*
|
||||
FROM %s{revTable} pr
|
||||
INNER JOIN %s{entityTable} p ON p.id = pr.{entityTable}_id
|
||||
WHERE p.{Query.whereDataContains "@criteria"}
|
||||
ORDER BY as_of DESC"""
|
||||
[ webLogContains webLogId ] (fun row -> keyFunc (row.string $"{entityTable}_id"), Map.toRevision row)
|
||||
|
||||
/// Parameters for a revision INSERT statement
|
||||
let revParams<'TKey> (key : 'TKey) (keyFunc : 'TKey -> string) rev = [
|
||||
typedParam "asOf" rev.AsOf
|
||||
"@id", Sql.string (keyFunc key)
|
||||
"@text", Sql.string (MarkupText.toString rev.Text)
|
||||
]
|
||||
|
||||
/// The SQL statement to insert a revision
|
||||
let insertSql table =
|
||||
$"INSERT INTO %s{table} VALUES (@id, @asOf, @text)"
|
||||
|
||||
/// Update a page's revisions
|
||||
let update<'TKey> revTable entityTable (key : 'TKey) (keyFunc : 'TKey -> string) oldRevs newRevs = backgroundTask {
|
||||
let toDelete, toAdd = Utils.diffRevisions oldRevs newRevs
|
||||
if not (List.isEmpty toDelete) || not (List.isEmpty toAdd) then
|
||||
let! _ =
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.executeTransactionAsync [
|
||||
if not (List.isEmpty toDelete) then
|
||||
$"DELETE FROM %s{revTable} WHERE %s{entityTable}_id = @id AND as_of = @asOf",
|
||||
toDelete
|
||||
|> List.map (fun it -> [
|
||||
"@id", Sql.string (keyFunc key)
|
||||
typedParam "asOf" it.AsOf
|
||||
])
|
||||
if not (List.isEmpty toAdd) then
|
||||
insertSql revTable, toAdd |> List.map (revParams key keyFunc)
|
||||
]
|
||||
()
|
||||
}
|
||||
|
||||
|
@ -1,107 +1,63 @@
|
||||
namespace MyWebLog.Data.Postgres
|
||||
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open Microsoft.Extensions.Logging
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
open Newtonsoft.Json
|
||||
open Npgsql
|
||||
open Npgsql.FSharp
|
||||
|
||||
/// PostgreSQL myWebLog page data implementation
|
||||
type PostgresPageData (conn : NpgsqlConnection, ser : JsonSerializer) =
|
||||
type PostgresPageData (log : ILogger) =
|
||||
|
||||
// SUPPORT FUNCTIONS
|
||||
|
||||
/// Append revisions and permalinks to a page
|
||||
/// Append revisions to a page
|
||||
let appendPageRevisions (page : Page) = backgroundTask {
|
||||
let! revisions =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT as_of, revision_text FROM page_revision WHERE page_id = @pageId ORDER BY as_of DESC"
|
||||
|> Sql.parameters [ "@pageId", Sql.string (PageId.toString page.Id) ]
|
||||
|> Sql.executeAsync Map.toRevision
|
||||
log.LogTrace "Page.appendPageRevisions"
|
||||
let! revisions = Revisions.findByEntityId Table.PageRevision Table.Page page.Id PageId.toString
|
||||
return { page with Revisions = revisions }
|
||||
}
|
||||
|
||||
/// Shorthand to map to a page
|
||||
let toPage = Map.toPage ser
|
||||
|
||||
/// Return a page with no text or revisions
|
||||
let pageWithoutText row =
|
||||
{ toPage row with Text = "" }
|
||||
|
||||
/// The INSERT statement for a page revision
|
||||
let revInsert = "INSERT INTO page_revision VALUES (@pageId, @asOf, @text)"
|
||||
|
||||
/// Parameters for a revision INSERT statement
|
||||
let revParams pageId rev = [
|
||||
typedParam "asOf" rev.AsOf
|
||||
"@pageId", Sql.string (PageId.toString pageId)
|
||||
"@text", Sql.string (MarkupText.toString rev.Text)
|
||||
]
|
||||
let pageWithoutText (row : RowReader) =
|
||||
{ fromData<Page> row with Text = "" }
|
||||
|
||||
/// Update a page's revisions
|
||||
let updatePageRevisions pageId oldRevs newRevs = backgroundTask {
|
||||
let toDelete, toAdd = Utils.diffRevisions oldRevs newRevs
|
||||
if not (List.isEmpty toDelete) || not (List.isEmpty toAdd) then
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.executeTransactionAsync [
|
||||
if not (List.isEmpty toDelete) then
|
||||
"DELETE FROM page_revision WHERE page_id = @pageId AND as_of = @asOf",
|
||||
toDelete
|
||||
|> List.map (fun it -> [
|
||||
"@pageId", Sql.string (PageId.toString pageId)
|
||||
typedParam "asOf" it.AsOf
|
||||
])
|
||||
if not (List.isEmpty toAdd) then
|
||||
revInsert, toAdd |> List.map (revParams pageId)
|
||||
]
|
||||
()
|
||||
}
|
||||
let updatePageRevisions pageId oldRevs newRevs =
|
||||
log.LogTrace "Page.updatePageRevisions"
|
||||
Revisions.update Table.PageRevision Table.Page pageId PageId.toString oldRevs newRevs
|
||||
|
||||
/// Does the given page exist?
|
||||
let pageExists pageId webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"SELECT EXISTS (SELECT 1 FROM page WHERE id = @id AND web_log_id = @webLogId) AS {existsName}"
|
||||
|> Sql.parameters [ "@id", Sql.string (PageId.toString pageId); webLogIdParam webLogId ]
|
||||
|> Sql.executeRowAsync Map.toExists
|
||||
log.LogTrace "Page.pageExists"
|
||||
Document.existsByWebLog Table.Page pageId PageId.toString webLogId
|
||||
|
||||
// IMPLEMENTATION FUNCTIONS
|
||||
|
||||
/// Get all pages for a web log (without text, revisions, prior permalinks, or metadata)
|
||||
/// Get all pages for a web log (without text or revisions)
|
||||
let all webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM page WHERE web_log_id = @webLogId ORDER BY LOWER(title)"
|
||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||
|> Sql.executeAsync pageWithoutText
|
||||
log.LogTrace "Page.all"
|
||||
Custom.list $"{selectWithCriteria Table.Page} ORDER BY LOWER(data ->> '{nameof Page.empty.Title}')"
|
||||
[ webLogContains webLogId ] fromData<Page>
|
||||
|
||||
/// Count all pages for the given web log
|
||||
let countAll webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"SELECT COUNT(id) AS {countName} FROM page WHERE web_log_id = @webLogId"
|
||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||
|> Sql.executeRowAsync Map.toCount
|
||||
log.LogTrace "Page.countAll"
|
||||
Count.byContains Table.Page (webLogDoc webLogId)
|
||||
|
||||
/// Count all pages shown in the page list for the given web log
|
||||
let countListed webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"
|
||||
SELECT COUNT(id) AS {countName}
|
||||
FROM page
|
||||
WHERE web_log_id = @webLogId
|
||||
AND is_in_page_list = TRUE"
|
||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||
|> Sql.executeRowAsync Map.toCount
|
||||
log.LogTrace "Page.countListed"
|
||||
Count.byContains Table.Page {| webLogDoc webLogId with IsInPageList = true |}
|
||||
|
||||
/// Find a page by its ID (without revisions)
|
||||
let findById pageId webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM page WHERE id = @id AND web_log_id = @webLogId"
|
||||
|> Sql.parameters [ "@id", Sql.string (PageId.toString pageId); webLogIdParam webLogId ]
|
||||
|> Sql.executeAsync toPage
|
||||
|> tryHead
|
||||
log.LogTrace "Page.findById"
|
||||
Document.findByIdAndWebLog<PageId, Page> Table.Page pageId PageId.toString webLogId
|
||||
|
||||
/// Find a complete page by its ID
|
||||
let findFullById pageId webLogId = backgroundTask {
|
||||
log.LogTrace "Page.findFullById"
|
||||
match! findById pageId webLogId with
|
||||
| Some page ->
|
||||
let! withMore = appendPageRevisions page
|
||||
@ -111,57 +67,40 @@ type PostgresPageData (conn : NpgsqlConnection, ser : JsonSerializer) =
|
||||
|
||||
/// Delete a page by its ID
|
||||
let delete pageId webLogId = backgroundTask {
|
||||
log.LogTrace "Page.delete"
|
||||
match! pageExists pageId webLogId with
|
||||
| true ->
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query
|
||||
"DELETE FROM page_revision WHERE page_id = @id;
|
||||
DELETE FROM page WHERE id = @id"
|
||||
|> Sql.parameters [ "@id", Sql.string (PageId.toString pageId) ]
|
||||
|> Sql.executeNonQueryAsync
|
||||
do! Delete.byId Table.Page (PageId.toString pageId)
|
||||
return true
|
||||
| false -> return false
|
||||
}
|
||||
|
||||
/// Find a page by its permalink for the given web log
|
||||
let findByPermalink permalink webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM page WHERE web_log_id = @webLogId AND permalink = @link"
|
||||
|> Sql.parameters [ webLogIdParam webLogId; "@link", Sql.string (Permalink.toString permalink) ]
|
||||
|> Sql.executeAsync toPage
|
||||
log.LogTrace "Page.findByPermalink"
|
||||
Find.byContains<Page> Table.Page {| webLogDoc webLogId with Permalink = Permalink.toString permalink |}
|
||||
|> tryHead
|
||||
|
||||
/// Find the current permalink within a set of potential prior permalinks for the given web log
|
||||
let findCurrentPermalink permalinks webLogId = backgroundTask {
|
||||
log.LogTrace "Page.findCurrentPermalink"
|
||||
if List.isEmpty permalinks then return None
|
||||
else
|
||||
let linkSql, linkParams = arrayInClause "prior_permalinks" Permalink.toString permalinks
|
||||
let linkSql, linkParam =
|
||||
arrayContains (nameof Page.empty.PriorPermalinks) Permalink.toString permalinks
|
||||
return!
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"SELECT permalink FROM page WHERE web_log_id = @webLogId AND ({linkSql})"
|
||||
|> Sql.parameters (webLogIdParam webLogId :: linkParams)
|
||||
|> Sql.executeAsync Map.toPermalink
|
||||
|> tryHead
|
||||
Custom.single
|
||||
$"""SELECT data ->> '{nameof Page.empty.Permalink}' AS permalink
|
||||
FROM page
|
||||
WHERE {Query.whereDataContains "@criteria"}
|
||||
AND {linkSql}""" [ webLogContains webLogId; linkParam ] Map.toPermalink
|
||||
}
|
||||
|
||||
/// Get all complete pages for the given web log
|
||||
let findFullByWebLog webLogId = backgroundTask {
|
||||
let! pages =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM page WHERE web_log_id = @webLogId"
|
||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||
|> Sql.executeAsync toPage
|
||||
let! revisions =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query
|
||||
"SELECT *
|
||||
FROM page_revision pr
|
||||
INNER JOIN page p ON p.id = pr.page_id
|
||||
WHERE p.web_log_id = @webLogId
|
||||
ORDER BY pr.as_of DESC"
|
||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||
|> Sql.executeAsync (fun row -> PageId (row.string "page_id"), Map.toRevision row)
|
||||
log.LogTrace "Page.findFullByWebLog"
|
||||
let! pages = Document.findByWebLog<Page> Table.Page webLogId
|
||||
let! revisions = Revisions.findByWebLog Table.PageRevision Table.Page PageId webLogId
|
||||
return
|
||||
pages
|
||||
|> List.map (fun it ->
|
||||
@ -170,95 +109,53 @@ type PostgresPageData (conn : NpgsqlConnection, ser : JsonSerializer) =
|
||||
|
||||
/// Get all listed pages for the given web log (without revisions or text)
|
||||
let findListed webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM page WHERE web_log_id = @webLogId AND is_in_page_list = TRUE ORDER BY LOWER(title)"
|
||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||
|> Sql.executeAsync pageWithoutText
|
||||
log.LogTrace "Page.findListed"
|
||||
Custom.list $"{selectWithCriteria Table.Page} ORDER BY LOWER(data ->> '{nameof Page.empty.Title}')"
|
||||
[ "@criteria", Query.jsonbDocParam {| webLogDoc webLogId with IsInPageList = true |} ]
|
||||
pageWithoutText
|
||||
|
||||
/// Get a page of pages for the given web log (without revisions)
|
||||
let findPageOfPages webLogId pageNbr =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query
|
||||
"SELECT *
|
||||
FROM page
|
||||
WHERE web_log_id = @webLogId
|
||||
ORDER BY LOWER(title)
|
||||
LIMIT @pageSize OFFSET @toSkip"
|
||||
|> Sql.parameters [ webLogIdParam webLogId; "@pageSize", Sql.int 26; "@toSkip", Sql.int ((pageNbr - 1) * 25) ]
|
||||
|> Sql.executeAsync toPage
|
||||
log.LogTrace "Page.findPageOfPages"
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Page}
|
||||
ORDER BY LOWER(data->>'{nameof Page.empty.Title}')
|
||||
LIMIT @pageSize OFFSET @toSkip"
|
||||
[ webLogContains webLogId; "@pageSize", Sql.int 26; "@toSkip", Sql.int ((pageNbr - 1) * 25) ]
|
||||
fromData<Page>
|
||||
|
||||
/// The INSERT statement for a page
|
||||
let pageInsert =
|
||||
"INSERT INTO page (
|
||||
id, web_log_id, author_id, title, permalink, prior_permalinks, published_on, updated_on, is_in_page_list,
|
||||
template, page_text, meta_items
|
||||
) VALUES (
|
||||
@id, @webLogId, @authorId, @title, @permalink, @priorPermalinks, @publishedOn, @updatedOn, @isInPageList,
|
||||
@template, @text, @metaItems
|
||||
)"
|
||||
|
||||
/// The parameters for saving a page
|
||||
let pageParams (page : Page) = [
|
||||
webLogIdParam page.WebLogId
|
||||
"@id", Sql.string (PageId.toString page.Id)
|
||||
"@authorId", Sql.string (WebLogUserId.toString page.AuthorId)
|
||||
"@title", Sql.string page.Title
|
||||
"@permalink", Sql.string (Permalink.toString page.Permalink)
|
||||
"@isInPageList", Sql.bool page.IsInPageList
|
||||
"@template", Sql.stringOrNone page.Template
|
||||
"@text", Sql.string page.Text
|
||||
"@metaItems", Sql.jsonb (Utils.serialize ser page.Metadata)
|
||||
"@priorPermalinks", Sql.stringArray (page.PriorPermalinks |> List.map Permalink.toString |> Array.ofList)
|
||||
typedParam "publishedOn" page.PublishedOn
|
||||
typedParam "updatedOn" page.UpdatedOn
|
||||
]
|
||||
|
||||
/// Restore pages from a backup
|
||||
let restore (pages : Page list) = backgroundTask {
|
||||
log.LogTrace "Page.restore"
|
||||
let revisions = pages |> List.collect (fun p -> p.Revisions |> List.map (fun r -> p.Id, r))
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.executeTransactionAsync [
|
||||
pageInsert, pages |> List.map pageParams
|
||||
revInsert, revisions |> List.map (fun (pageId, rev) -> revParams pageId rev)
|
||||
Query.insert Table.Page,
|
||||
pages
|
||||
|> List.map (fun page -> Query.docParameters (PageId.toString page.Id) { page with Revisions = [] })
|
||||
Revisions.insertSql Table.PageRevision,
|
||||
revisions |> List.map (fun (pageId, rev) -> Revisions.revParams pageId PageId.toString rev)
|
||||
]
|
||||
()
|
||||
}
|
||||
|
||||
/// Save a page
|
||||
let save (page : Page) = backgroundTask {
|
||||
log.LogTrace "Page.save"
|
||||
let! oldPage = findFullById page.Id page.WebLogId
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"
|
||||
{pageInsert} ON CONFLICT (id) DO UPDATE
|
||||
SET author_id = EXCLUDED.author_id,
|
||||
title = EXCLUDED.title,
|
||||
permalink = EXCLUDED.permalink,
|
||||
prior_permalinks = EXCLUDED.prior_permalinks,
|
||||
published_on = EXCLUDED.published_on,
|
||||
updated_on = EXCLUDED.updated_on,
|
||||
is_in_page_list = EXCLUDED.is_in_page_list,
|
||||
template = EXCLUDED.template,
|
||||
page_text = EXCLUDED.page_text,
|
||||
meta_items = EXCLUDED.meta_items"
|
||||
|> Sql.parameters (pageParams page)
|
||||
|> Sql.executeNonQueryAsync
|
||||
do! save Table.Page (PageId.toString page.Id) { page with Revisions = [] }
|
||||
do! updatePageRevisions page.Id (match oldPage with Some p -> p.Revisions | None -> []) page.Revisions
|
||||
()
|
||||
}
|
||||
|
||||
/// Update a page's prior permalinks
|
||||
let updatePriorPermalinks pageId webLogId permalinks = backgroundTask {
|
||||
log.LogTrace "Page.updatePriorPermalinks"
|
||||
match! pageExists pageId webLogId with
|
||||
| true ->
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "UPDATE page SET prior_permalinks = @prior WHERE id = @id"
|
||||
|> Sql.parameters
|
||||
[ "@id", Sql.string (PageId.toString pageId)
|
||||
"@prior", Sql.stringArray (permalinks |> List.map Permalink.toString |> Array.ofList) ]
|
||||
|> Sql.executeNonQueryAsync
|
||||
do! Update.partialById Table.Page (PageId.toString pageId) {| PriorPermalinks = permalinks |}
|
||||
return true
|
||||
| false -> return false
|
||||
}
|
||||
|
@ -1,128 +1,61 @@
|
||||
namespace MyWebLog.Data.Postgres
|
||||
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open Microsoft.Extensions.Logging
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
open Newtonsoft.Json
|
||||
open NodaTime
|
||||
open Npgsql
|
||||
open NodaTime.Text
|
||||
open Npgsql.FSharp
|
||||
|
||||
/// PostgreSQL myWebLog post data implementation
|
||||
type PostgresPostData (conn : NpgsqlConnection, ser : JsonSerializer) =
|
||||
type PostgresPostData (log : ILogger) =
|
||||
|
||||
// SUPPORT FUNCTIONS
|
||||
|
||||
/// Append revisions to a post
|
||||
let appendPostRevisions (post : Post) = backgroundTask {
|
||||
let! revisions =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT as_of, revision_text FROM post_revision WHERE post_id = @id ORDER BY as_of DESC"
|
||||
|> Sql.parameters [ "@id", Sql.string (PostId.toString post.Id) ]
|
||||
|> Sql.executeAsync Map.toRevision
|
||||
log.LogTrace "Post.appendPostRevisions"
|
||||
let! revisions = Revisions.findByEntityId Table.PostRevision Table.Post post.Id PostId.toString
|
||||
return { post with Revisions = revisions }
|
||||
}
|
||||
|
||||
/// The SELECT statement for a post that will include category IDs
|
||||
let selectPost =
|
||||
"SELECT *, ARRAY(SELECT cat.category_id FROM post_category cat WHERE cat.post_id = p.id) AS category_ids
|
||||
FROM post p"
|
||||
|
||||
/// Shorthand for mapping to a post
|
||||
let toPost = Map.toPost ser
|
||||
|
||||
/// Return a post with no revisions, prior permalinks, or text
|
||||
let postWithoutText row =
|
||||
{ toPost row with Text = "" }
|
||||
|
||||
/// The INSERT statement for a post/category cross-reference
|
||||
let catInsert = "INSERT INTO post_category VALUES (@postId, @categoryId)"
|
||||
|
||||
/// Parameters for adding or updating a post/category cross-reference
|
||||
let catParams postId cat = [
|
||||
"@postId", Sql.string (PostId.toString postId)
|
||||
"categoryId", Sql.string (CategoryId.toString cat)
|
||||
]
|
||||
|
||||
/// Update a post's assigned categories
|
||||
let updatePostCategories postId oldCats newCats = backgroundTask {
|
||||
let toDelete, toAdd = Utils.diffLists oldCats newCats CategoryId.toString
|
||||
if not (List.isEmpty toDelete) || not (List.isEmpty toAdd) then
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.executeTransactionAsync [
|
||||
if not (List.isEmpty toDelete) then
|
||||
"DELETE FROM post_category WHERE post_id = @postId AND category_id = @categoryId",
|
||||
toDelete |> List.map (catParams postId)
|
||||
if not (List.isEmpty toAdd) then
|
||||
catInsert, toAdd |> List.map (catParams postId)
|
||||
]
|
||||
()
|
||||
}
|
||||
|
||||
/// The INSERT statement for a post revision
|
||||
let revInsert = "INSERT INTO post_revision VALUES (@postId, @asOf, @text)"
|
||||
|
||||
/// The parameters for adding a post revision
|
||||
let revParams postId rev = [
|
||||
typedParam "asOf" rev.AsOf
|
||||
"@postId", Sql.string (PostId.toString postId)
|
||||
"@text", Sql.string (MarkupText.toString rev.Text)
|
||||
]
|
||||
{ fromData<Post> row with Text = "" }
|
||||
|
||||
/// Update a post's revisions
|
||||
let updatePostRevisions postId oldRevs newRevs = backgroundTask {
|
||||
let toDelete, toAdd = Utils.diffRevisions oldRevs newRevs
|
||||
if not (List.isEmpty toDelete) || not (List.isEmpty toAdd) then
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.executeTransactionAsync [
|
||||
if not (List.isEmpty toDelete) then
|
||||
"DELETE FROM post_revision WHERE post_id = @postId AND as_of = @asOf",
|
||||
toDelete
|
||||
|> List.map (fun it -> [
|
||||
"@postId", Sql.string (PostId.toString postId)
|
||||
typedParam "asOf" it.AsOf
|
||||
])
|
||||
if not (List.isEmpty toAdd) then
|
||||
revInsert, toAdd |> List.map (revParams postId)
|
||||
]
|
||||
()
|
||||
}
|
||||
let updatePostRevisions postId oldRevs newRevs =
|
||||
log.LogTrace "Post.updatePostRevisions"
|
||||
Revisions.update Table.PostRevision Table.Post postId PostId.toString oldRevs newRevs
|
||||
|
||||
/// Does the given post exist?
|
||||
let postExists postId webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"SELECT EXISTS (SELECT 1 FROM post WHERE id = @id AND web_log_id = @webLogId) AS {existsName}"
|
||||
|> Sql.parameters [ "@id", Sql.string (PostId.toString postId); webLogIdParam webLogId ]
|
||||
|> Sql.executeRowAsync Map.toExists
|
||||
log.LogTrace "Post.postExists"
|
||||
Document.existsByWebLog Table.Post postId PostId.toString webLogId
|
||||
|
||||
// IMPLEMENTATION FUNCTIONS
|
||||
|
||||
/// Count posts in a status for the given web log
|
||||
let countByStatus status webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"SELECT COUNT(id) AS {countName} FROM post WHERE web_log_id = @webLogId AND status = @status"
|
||||
|> Sql.parameters [ webLogIdParam webLogId; "@status", Sql.string (PostStatus.toString status) ]
|
||||
|> Sql.executeRowAsync Map.toCount
|
||||
log.LogTrace "Post.countByStatus"
|
||||
Count.byContains Table.Post {| webLogDoc webLogId with Status = PostStatus.toString status |}
|
||||
|
||||
/// Find a post by its ID for the given web log (excluding revisions)
|
||||
let findById postId webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"{selectPost} WHERE id = @id AND web_log_id = @webLogId"
|
||||
|> Sql.parameters [ "@id", Sql.string (PostId.toString postId); webLogIdParam webLogId ]
|
||||
|> Sql.executeAsync toPost
|
||||
|> tryHead
|
||||
let findById postId webLogId =
|
||||
log.LogTrace "Post.findById"
|
||||
Document.findByIdAndWebLog<PostId, Post> Table.Post postId PostId.toString webLogId
|
||||
|
||||
/// Find a post by its permalink for the given web log (excluding revisions and prior permalinks)
|
||||
let findByPermalink permalink webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"{selectPost} WHERE web_log_id = @webLogId AND permalink = @link"
|
||||
|> Sql.parameters [ webLogIdParam webLogId; "@link", Sql.string (Permalink.toString permalink) ]
|
||||
|> Sql.executeAsync toPost
|
||||
|> tryHead
|
||||
log.LogTrace "Post.findByPermalink"
|
||||
Custom.single (selectWithCriteria Table.Post)
|
||||
[ "@criteria",
|
||||
Query.jsonbDocParam {| webLogDoc webLogId with Permalink = Permalink.toString permalink |}
|
||||
] fromData<Post>
|
||||
|
||||
/// Find a complete post by its ID for the given web log
|
||||
let findFullById postId webLogId = backgroundTask {
|
||||
log.LogTrace "Post.findFullById"
|
||||
match! findById postId webLogId with
|
||||
| Some post ->
|
||||
let! withRevisions = appendPostRevisions post
|
||||
@ -132,50 +65,38 @@ type PostgresPostData (conn : NpgsqlConnection, ser : JsonSerializer) =
|
||||
|
||||
/// Delete a post by its ID for the given web log
|
||||
let delete postId webLogId = backgroundTask {
|
||||
log.LogTrace "Post.delete"
|
||||
match! postExists postId webLogId with
|
||||
| true ->
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query
|
||||
"DELETE FROM post_revision WHERE post_id = @id;
|
||||
DELETE FROM post_category WHERE post_id = @id;
|
||||
DELETE FROM post WHERE id = @id"
|
||||
|> Sql.parameters [ "@id", Sql.string (PostId.toString postId) ]
|
||||
|> Sql.executeNonQueryAsync
|
||||
let theId = PostId.toString postId
|
||||
do! Custom.nonQuery
|
||||
$"""DELETE FROM {Table.PostComment} WHERE {Query.whereDataContains "@criteria"};
|
||||
DELETE FROM {Table.Post} WHERE id = @id"""
|
||||
[ "@id", Sql.string theId; "@criteria", Query.jsonbDocParam {| PostId = theId |} ]
|
||||
return true
|
||||
| false -> return false
|
||||
}
|
||||
|
||||
/// Find the current permalink from a list of potential prior permalinks for the given web log
|
||||
let findCurrentPermalink permalinks webLogId = backgroundTask {
|
||||
log.LogTrace "Post.findCurrentPermalink"
|
||||
if List.isEmpty permalinks then return None
|
||||
else
|
||||
let linkSql, linkParams = arrayInClause "prior_permalinks" Permalink.toString permalinks
|
||||
let linkSql, linkParam =
|
||||
arrayContains (nameof Post.empty.PriorPermalinks) Permalink.toString permalinks
|
||||
return!
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"SELECT permalink FROM post WHERE web_log_id = @webLogId AND ({linkSql})"
|
||||
|> Sql.parameters (webLogIdParam webLogId :: linkParams)
|
||||
|> Sql.executeAsync Map.toPermalink
|
||||
|> tryHead
|
||||
Custom.single
|
||||
$"""SELECT data ->> '{nameof Post.empty.Permalink}' AS permalink
|
||||
FROM {Table.Post}
|
||||
WHERE {Query.whereDataContains "@criteria"}
|
||||
AND {linkSql}""" [ webLogContains webLogId; linkParam ] Map.toPermalink
|
||||
}
|
||||
|
||||
/// Get all complete posts for the given web log
|
||||
let findFullByWebLog webLogId = backgroundTask {
|
||||
let! posts =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"{selectPost} WHERE web_log_id = @webLogId"
|
||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||
|> Sql.executeAsync toPost
|
||||
let! revisions =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query
|
||||
"SELECT *
|
||||
FROM post_revision pr
|
||||
INNER JOIN post p ON p.id = pr.post_id
|
||||
WHERE p.web_log_id = @webLogId
|
||||
ORDER BY as_of DESC"
|
||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||
|> Sql.executeAsync (fun row -> PostId (row.string "post_id"), Map.toRevision row)
|
||||
log.LogTrace "Post.findFullByWebLog"
|
||||
let! posts = Document.findByWebLog<Post> Table.Post webLogId
|
||||
let! revisions = Revisions.findByWebLog Table.PostRevision Table.Post PostId webLogId
|
||||
return
|
||||
posts
|
||||
|> List.map (fun it ->
|
||||
@ -184,174 +105,103 @@ type PostgresPostData (conn : NpgsqlConnection, ser : JsonSerializer) =
|
||||
|
||||
/// Get a page of categorized posts for the given web log (excludes revisions)
|
||||
let findPageOfCategorizedPosts webLogId categoryIds pageNbr postsPerPage =
|
||||
let catSql, catParams = inClause "AND pc.category_id" "catId" CategoryId.toString categoryIds
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"
|
||||
{selectPost}
|
||||
INNER JOIN post_category pc ON pc.post_id = p.id
|
||||
WHERE p.web_log_id = @webLogId
|
||||
AND p.status = @status
|
||||
{catSql}
|
||||
ORDER BY published_on DESC
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
|> Sql.parameters
|
||||
[ webLogIdParam webLogId
|
||||
"@status", Sql.string (PostStatus.toString Published)
|
||||
yield! catParams ]
|
||||
|> Sql.executeAsync toPost
|
||||
log.LogTrace "Post.findPageOfCategorizedPosts"
|
||||
let catSql, catParam = arrayContains (nameof Post.empty.CategoryIds) CategoryId.toString categoryIds
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Post}
|
||||
AND {catSql}
|
||||
ORDER BY data ->> '{nameof Post.empty.PublishedOn}' DESC
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
[ "@criteria", Query.jsonbDocParam {| webLogDoc webLogId with Status = PostStatus.toString Published |}
|
||||
catParam
|
||||
] fromData<Post>
|
||||
|
||||
/// Get a page of posts for the given web log (excludes text and revisions)
|
||||
let findPageOfPosts webLogId pageNbr postsPerPage =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"
|
||||
{selectPost}
|
||||
WHERE web_log_id = @webLogId
|
||||
ORDER BY published_on DESC NULLS FIRST, updated_on
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||
|> Sql.executeAsync postWithoutText
|
||||
log.LogTrace "Post.findPageOfPosts"
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Post}
|
||||
ORDER BY data ->> '{nameof Post.empty.PublishedOn}' DESC NULLS FIRST,
|
||||
data ->> '{nameof Post.empty.UpdatedOn}'
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
[ webLogContains webLogId ] postWithoutText
|
||||
|
||||
/// Get a page of published posts for the given web log (excludes revisions)
|
||||
let findPageOfPublishedPosts webLogId pageNbr postsPerPage =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"
|
||||
{selectPost}
|
||||
WHERE web_log_id = @webLogId
|
||||
AND status = @status
|
||||
ORDER BY published_on DESC
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
|> Sql.parameters [ webLogIdParam webLogId; "@status", Sql.string (PostStatus.toString Published) ]
|
||||
|> Sql.executeAsync toPost
|
||||
log.LogTrace "Post.findPageOfPublishedPosts"
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Post}
|
||||
ORDER BY data ->> '{nameof Post.empty.PublishedOn}' DESC
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
[ "@criteria", Query.jsonbDocParam {| webLogDoc webLogId with Status = PostStatus.toString Published |} ]
|
||||
fromData<Post>
|
||||
|
||||
/// Get a page of tagged posts for the given web log (excludes revisions and prior permalinks)
|
||||
let findPageOfTaggedPosts webLogId (tag : string) pageNbr postsPerPage =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"
|
||||
{selectPost}
|
||||
WHERE web_log_id = @webLogId
|
||||
AND status = @status
|
||||
AND tags && ARRAY[@tag]
|
||||
ORDER BY published_on DESC
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
|> Sql.parameters
|
||||
[ webLogIdParam webLogId
|
||||
"@status", Sql.string (PostStatus.toString Published)
|
||||
"@tag", Sql.string tag
|
||||
]
|
||||
|> Sql.executeAsync toPost
|
||||
log.LogTrace "Post.findPageOfTaggedPosts"
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Post}
|
||||
AND data['{nameof Post.empty.Tags}'] @> @tag
|
||||
ORDER BY data ->> '{nameof Post.empty.PublishedOn}' DESC
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
[ "@criteria", Query.jsonbDocParam {| webLogDoc webLogId with Status = PostStatus.toString Published |}
|
||||
"@tag", Query.jsonbDocParam [| tag |]
|
||||
] fromData<Post>
|
||||
|
||||
/// Find the next newest and oldest post from a publish date for the given web log
|
||||
let findSurroundingPosts webLogId (publishedOn : Instant) = backgroundTask {
|
||||
let queryParams () = Sql.parameters [
|
||||
webLogIdParam webLogId
|
||||
typedParam "publishedOn" publishedOn
|
||||
"@status", Sql.string (PostStatus.toString Published)
|
||||
let findSurroundingPosts webLogId publishedOn = backgroundTask {
|
||||
log.LogTrace "Post.findSurroundingPosts"
|
||||
let queryParams () = [
|
||||
"@criteria", Query.jsonbDocParam {| webLogDoc webLogId with Status = PostStatus.toString Published |}
|
||||
"@publishedOn", Sql.string ((InstantPattern.General.Format publishedOn).Substring (0, 19))
|
||||
]
|
||||
let pubField = nameof Post.empty.PublishedOn
|
||||
let! older =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"
|
||||
{selectPost}
|
||||
WHERE web_log_id = @webLogId
|
||||
AND status = @status
|
||||
AND published_on < @publishedOn
|
||||
ORDER BY published_on DESC
|
||||
LIMIT 1"
|
||||
|> queryParams ()
|
||||
|> Sql.executeAsync toPost
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Post}
|
||||
AND SUBSTR(data ->> '{pubField}', 1, 19) < @publishedOn
|
||||
ORDER BY data ->> '{pubField}' DESC
|
||||
LIMIT 1" (queryParams ()) fromData<Post>
|
||||
let! newer =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"
|
||||
{selectPost}
|
||||
WHERE web_log_id = @webLogId
|
||||
AND status = @status
|
||||
AND published_on > @publishedOn
|
||||
ORDER BY published_on
|
||||
LIMIT 1"
|
||||
|> queryParams ()
|
||||
|> Sql.executeAsync toPost
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Post}
|
||||
AND SUBSTR(data ->> '{pubField}', 1, 19) > @publishedOn
|
||||
ORDER BY data ->> '{pubField}'
|
||||
LIMIT 1" (queryParams ()) fromData<Post>
|
||||
return List.tryHead older, List.tryHead newer
|
||||
}
|
||||
|
||||
/// The INSERT statement for a post
|
||||
let postInsert =
|
||||
"INSERT INTO post (
|
||||
id, web_log_id, author_id, status, title, permalink, prior_permalinks, published_on, updated_on,
|
||||
template, post_text, tags, meta_items, episode
|
||||
) VALUES (
|
||||
@id, @webLogId, @authorId, @status, @title, @permalink, @priorPermalinks, @publishedOn, @updatedOn,
|
||||
@template, @text, @tags, @metaItems, @episode
|
||||
)"
|
||||
|
||||
/// The parameters for saving a post
|
||||
let postParams (post : Post) = [
|
||||
webLogIdParam post.WebLogId
|
||||
"@id", Sql.string (PostId.toString post.Id)
|
||||
"@authorId", Sql.string (WebLogUserId.toString post.AuthorId)
|
||||
"@status", Sql.string (PostStatus.toString post.Status)
|
||||
"@title", Sql.string post.Title
|
||||
"@permalink", Sql.string (Permalink.toString post.Permalink)
|
||||
"@template", Sql.stringOrNone post.Template
|
||||
"@text", Sql.string post.Text
|
||||
"@priorPermalinks", Sql.stringArray (post.PriorPermalinks |> List.map Permalink.toString |> Array.ofList)
|
||||
"@episode", Sql.jsonbOrNone (post.Episode |> Option.map (Utils.serialize ser))
|
||||
"@tags", Sql.stringArrayOrNone (if List.isEmpty post.Tags then None else Some (Array.ofList post.Tags))
|
||||
"@metaItems",
|
||||
if List.isEmpty post.Metadata then None else Some (Utils.serialize ser post.Metadata)
|
||||
|> Sql.jsonbOrNone
|
||||
optParam "publishedOn" post.PublishedOn
|
||||
typedParam "updatedOn" post.UpdatedOn
|
||||
]
|
||||
|
||||
/// Save a post
|
||||
let save (post : Post) = backgroundTask {
|
||||
log.LogTrace "Post.save"
|
||||
let! oldPost = findFullById post.Id post.WebLogId
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"
|
||||
{postInsert} ON CONFLICT (id) DO UPDATE
|
||||
SET author_id = EXCLUDED.author_id,
|
||||
status = EXCLUDED.status,
|
||||
title = EXCLUDED.title,
|
||||
permalink = EXCLUDED.permalink,
|
||||
prior_permalinks = EXCLUDED.prior_permalinks,
|
||||
published_on = EXCLUDED.published_on,
|
||||
updated_on = EXCLUDED.updated_on,
|
||||
template = EXCLUDED.template,
|
||||
post_text = EXCLUDED.post_text,
|
||||
tags = EXCLUDED.tags,
|
||||
meta_items = EXCLUDED.meta_items,
|
||||
episode = EXCLUDED.episode"
|
||||
|> Sql.parameters (postParams post)
|
||||
|> Sql.executeNonQueryAsync
|
||||
do! updatePostCategories post.Id (match oldPost with Some p -> p.CategoryIds | None -> []) post.CategoryIds
|
||||
do! updatePostRevisions post.Id (match oldPost with Some p -> p.Revisions | None -> []) post.Revisions
|
||||
do! save Table.Post (PostId.toString post.Id) { post with Revisions = [] }
|
||||
do! updatePostRevisions post.Id (match oldPost with Some p -> p.Revisions | None -> []) post.Revisions
|
||||
}
|
||||
|
||||
/// Restore posts from a backup
|
||||
let restore posts = backgroundTask {
|
||||
let cats = posts |> List.collect (fun p -> p.CategoryIds |> List.map (fun c -> p.Id, c))
|
||||
let revisions = posts |> List.collect (fun p -> p.Revisions |> List.map (fun r -> p.Id, r))
|
||||
log.LogTrace "Post.restore"
|
||||
let revisions = posts |> List.collect (fun p -> p.Revisions |> List.map (fun r -> p.Id, r))
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.executeTransactionAsync [
|
||||
postInsert, posts |> List.map postParams
|
||||
catInsert, cats |> List.map (fun (postId, catId) -> catParams postId catId)
|
||||
revInsert, revisions |> List.map (fun (postId, rev) -> revParams postId rev)
|
||||
Query.insert Table.Post,
|
||||
posts
|
||||
|> List.map (fun post -> Query.docParameters (PostId.toString post.Id) { post with Revisions = [] })
|
||||
Revisions.insertSql Table.PostRevision,
|
||||
revisions |> List.map (fun (postId, rev) -> Revisions.revParams postId PostId.toString rev)
|
||||
]
|
||||
()
|
||||
}
|
||||
|
||||
/// Update prior permalinks for a post
|
||||
let updatePriorPermalinks postId webLogId permalinks = backgroundTask {
|
||||
log.LogTrace "Post.updatePriorPermalinks"
|
||||
match! postExists postId webLogId with
|
||||
| true ->
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "UPDATE post SET prior_permalinks = @prior WHERE id = @id"
|
||||
|> Sql.parameters
|
||||
[ "@id", Sql.string (PostId.toString postId)
|
||||
"@prior", Sql.stringArray (permalinks |> List.map Permalink.toString |> Array.ofList) ]
|
||||
|> Sql.executeNonQueryAsync
|
||||
do! Update.partialById Table.Post (PostId.toString postId) {| PriorPermalinks = permalinks |}
|
||||
return true
|
||||
| false -> return false
|
||||
}
|
||||
|
@ -1,100 +1,61 @@
|
||||
namespace MyWebLog.Data.Postgres
|
||||
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open Microsoft.Extensions.Logging
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
open Npgsql
|
||||
open Npgsql.FSharp
|
||||
|
||||
/// PostgreSQL myWebLog tag mapping data implementation
|
||||
type PostgresTagMapData (conn : NpgsqlConnection) =
|
||||
|
||||
type PostgresTagMapData (log : ILogger) =
|
||||
|
||||
/// Find a tag mapping by its ID for the given web log
|
||||
let findById tagMapId webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM tag_map WHERE id = @id AND web_log_id = @webLogId"
|
||||
|> Sql.parameters [ "@id", Sql.string (TagMapId.toString tagMapId); webLogIdParam webLogId ]
|
||||
|> Sql.executeAsync Map.toTagMap
|
||||
|> tryHead
|
||||
log.LogTrace "TagMap.findById"
|
||||
Document.findByIdAndWebLog<TagMapId, TagMap> Table.TagMap tagMapId TagMapId.toString webLogId
|
||||
|
||||
/// Delete a tag mapping for the given web log
|
||||
let delete tagMapId webLogId = backgroundTask {
|
||||
let idParams = [ "@id", Sql.string (TagMapId.toString tagMapId) ]
|
||||
let! exists =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"
|
||||
SELECT EXISTS
|
||||
(SELECT 1 FROM tag_map WHERE id = @id AND web_log_id = @webLogId)
|
||||
AS {existsName}"
|
||||
|> Sql.parameters (webLogIdParam webLogId :: idParams)
|
||||
|> Sql.executeRowAsync Map.toExists
|
||||
log.LogTrace "TagMap.delete"
|
||||
let! exists = Document.existsByWebLog Table.TagMap tagMapId TagMapId.toString webLogId
|
||||
if exists then
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "DELETE FROM tag_map WHERE id = @id"
|
||||
|> Sql.parameters idParams
|
||||
|> Sql.executeNonQueryAsync
|
||||
do! Delete.byId Table.TagMap (TagMapId.toString tagMapId)
|
||||
return true
|
||||
else return false
|
||||
}
|
||||
|
||||
/// Find a tag mapping by its URL value for the given web log
|
||||
let findByUrlValue urlValue webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM tag_map WHERE web_log_id = @webLogId AND url_value = @urlValue"
|
||||
|> Sql.parameters [ webLogIdParam webLogId; "@urlValue", Sql.string urlValue ]
|
||||
|> Sql.executeAsync Map.toTagMap
|
||||
|> tryHead
|
||||
|
||||
let findByUrlValue (urlValue : string) webLogId =
|
||||
log.LogTrace "TagMap.findByUrlValue"
|
||||
Custom.single (selectWithCriteria Table.TagMap)
|
||||
[ "@criteria", Query.jsonbDocParam {| webLogDoc webLogId with UrlValue = urlValue |} ]
|
||||
fromData<TagMap>
|
||||
|
||||
/// Get all tag mappings for the given web log
|
||||
let findByWebLog webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM tag_map WHERE web_log_id = @webLogId ORDER BY tag"
|
||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||
|> Sql.executeAsync Map.toTagMap
|
||||
log.LogTrace "TagMap.findByWebLog"
|
||||
Custom.list $"{selectWithCriteria Table.TagMap} ORDER BY data ->> 'tag'" [ webLogContains webLogId ]
|
||||
fromData<TagMap>
|
||||
|
||||
/// Find any tag mappings in a list of tags for the given web log
|
||||
let findMappingForTags tags webLogId =
|
||||
let tagSql, tagParams = inClause "AND tag" "tag" id tags
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"SELECT * FROM tag_map WHERE web_log_id = @webLogId {tagSql}"
|
||||
|> Sql.parameters (webLogIdParam webLogId :: tagParams)
|
||||
|> Sql.executeAsync Map.toTagMap
|
||||
|
||||
/// The INSERT statement for a tag mapping
|
||||
let tagMapInsert =
|
||||
"INSERT INTO tag_map (
|
||||
id, web_log_id, tag, url_value
|
||||
) VALUES (
|
||||
@id, @webLogId, @tag, @urlValue
|
||||
)"
|
||||
|
||||
/// The parameters for saving a tag mapping
|
||||
let tagMapParams (tagMap : TagMap) = [
|
||||
webLogIdParam tagMap.WebLogId
|
||||
"@id", Sql.string (TagMapId.toString tagMap.Id)
|
||||
"@tag", Sql.string tagMap.Tag
|
||||
"@urlValue", Sql.string tagMap.UrlValue
|
||||
]
|
||||
log.LogTrace "TagMap.findMappingForTags"
|
||||
let tagSql, tagParam = arrayContains (nameof TagMap.empty.Tag) id tags
|
||||
Custom.list $"{selectWithCriteria Table.TagMap} AND {tagSql}" [ webLogContains webLogId; tagParam ]
|
||||
fromData<TagMap>
|
||||
|
||||
/// Save a tag mapping
|
||||
let save tagMap = backgroundTask {
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"
|
||||
{tagMapInsert} ON CONFLICT (id) DO UPDATE
|
||||
SET tag = EXCLUDED.tag,
|
||||
url_value = EXCLUDED.url_value"
|
||||
|> Sql.parameters (tagMapParams tagMap)
|
||||
|> Sql.executeNonQueryAsync
|
||||
()
|
||||
}
|
||||
let save (tagMap : TagMap) =
|
||||
save Table.TagMap (TagMapId.toString tagMap.Id) tagMap
|
||||
|
||||
/// Restore tag mappings from a backup
|
||||
let restore tagMaps = backgroundTask {
|
||||
let restore (tagMaps : TagMap list) = backgroundTask {
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.executeTransactionAsync [
|
||||
tagMapInsert, tagMaps |> List.map tagMapParams
|
||||
Query.insert Table.TagMap,
|
||||
tagMaps |> List.map (fun tagMap -> Query.docParameters (TagMapId.toString tagMap.Id) tagMap)
|
||||
]
|
||||
()
|
||||
}
|
||||
|
@ -1,129 +1,53 @@
|
||||
namespace MyWebLog.Data.Postgres
|
||||
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open Microsoft.Extensions.Logging
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
open Npgsql
|
||||
open Npgsql.FSharp
|
||||
|
||||
/// PostreSQL myWebLog theme data implementation
|
||||
type PostgresThemeData (conn : NpgsqlConnection) =
|
||||
type PostgresThemeData (log : ILogger) =
|
||||
|
||||
/// Clear out the template text from a theme
|
||||
let withoutTemplateText row =
|
||||
let theme = fromData<Theme> row
|
||||
{ theme with Templates = theme.Templates |> List.map (fun template -> { template with Text = "" }) }
|
||||
|
||||
/// Retrieve all themes (except 'admin'; excludes template text)
|
||||
let all () = backgroundTask {
|
||||
let! themes =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM theme WHERE id <> 'admin' ORDER BY id"
|
||||
|> Sql.executeAsync Map.toTheme
|
||||
let! templates =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT name, theme_id FROM theme_template WHERE theme_id <> 'admin' ORDER BY name"
|
||||
|> Sql.executeAsync (fun row -> ThemeId (row.string "theme_id"), Map.toThemeTemplate false row)
|
||||
return
|
||||
themes
|
||||
|> List.map (fun t ->
|
||||
{ t with Templates = templates |> List.filter (fun tt -> fst tt = t.Id) |> List.map snd })
|
||||
}
|
||||
let all () =
|
||||
log.LogTrace "Theme.all"
|
||||
Custom.list $"{Query.selectFromTable Table.Theme} WHERE id <> 'admin' ORDER BY id" [] withoutTemplateText
|
||||
|
||||
/// Does a given theme exist?
|
||||
let exists themeId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT EXISTS (SELECT 1 FROM theme WHERE id = @id) AS does_exist"
|
||||
|> Sql.parameters [ "@id", Sql.string (ThemeId.toString themeId) ]
|
||||
|> Sql.executeRowAsync Map.toExists
|
||||
log.LogTrace "Theme.exists"
|
||||
Exists.byId Table.Theme (ThemeId.toString themeId)
|
||||
|
||||
/// Find a theme by its ID
|
||||
let findById themeId = backgroundTask {
|
||||
let themeIdParam = [ "@id", Sql.string (ThemeId.toString themeId) ]
|
||||
let! theme =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM theme WHERE id = @id"
|
||||
|> Sql.parameters themeIdParam
|
||||
|> Sql.executeAsync Map.toTheme
|
||||
|> tryHead
|
||||
if Option.isSome theme then
|
||||
let! templates =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM theme_template WHERE theme_id = @id"
|
||||
|> Sql.parameters themeIdParam
|
||||
|> Sql.executeAsync (Map.toThemeTemplate true)
|
||||
return Some { theme.Value with Templates = templates }
|
||||
else return None
|
||||
}
|
||||
let findById themeId =
|
||||
log.LogTrace "Theme.findById"
|
||||
Find.byId<Theme> Table.Theme (ThemeId.toString themeId)
|
||||
|
||||
/// Find a theme by its ID (excludes the text of templates)
|
||||
let findByIdWithoutText themeId = backgroundTask {
|
||||
match! findById themeId with
|
||||
| Some theme ->
|
||||
return Some {
|
||||
theme with Templates = theme.Templates |> List.map (fun t -> { t with Text = "" })
|
||||
}
|
||||
| None -> return None
|
||||
}
|
||||
let findByIdWithoutText themeId =
|
||||
log.LogTrace "Theme.findByIdWithoutText"
|
||||
Custom.single (Query.Find.byId Table.Theme) [ "@id", Sql.string (ThemeId.toString themeId) ] withoutTemplateText
|
||||
|
||||
/// Delete a theme by its ID
|
||||
let delete themeId = backgroundTask {
|
||||
let idParams = [ "@id", Sql.string (ThemeId.toString themeId) ]
|
||||
let! exists =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"SELECT EXISTS (SELECT 1 FROM theme WHERE id = @id) AS {existsName}"
|
||||
|> Sql.parameters idParams
|
||||
|> Sql.executeRowAsync Map.toExists
|
||||
if exists then
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query
|
||||
"DELETE FROM theme_asset WHERE theme_id = @id;
|
||||
DELETE FROM theme_template WHERE theme_id = @id;
|
||||
DELETE FROM theme WHERE id = @id"
|
||||
|> Sql.parameters idParams
|
||||
|> Sql.executeNonQueryAsync
|
||||
log.LogTrace "Theme.delete"
|
||||
match! exists themeId with
|
||||
| true ->
|
||||
do! Delete.byId Table.Theme (ThemeId.toString themeId)
|
||||
return true
|
||||
else return false
|
||||
| false -> return false
|
||||
}
|
||||
|
||||
/// Save a theme
|
||||
let save (theme : Theme) = backgroundTask {
|
||||
let! oldTheme = findById theme.Id
|
||||
let themeIdParam = Sql.string (ThemeId.toString theme.Id)
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query
|
||||
"INSERT INTO theme VALUES (@id, @name, @version)
|
||||
ON CONFLICT (id) DO UPDATE
|
||||
SET name = EXCLUDED.name,
|
||||
version = EXCLUDED.version"
|
||||
|> Sql.parameters
|
||||
[ "@id", themeIdParam
|
||||
"@name", Sql.string theme.Name
|
||||
"@version", Sql.string theme.Version ]
|
||||
|> Sql.executeNonQueryAsync
|
||||
|
||||
let toDelete, _ =
|
||||
Utils.diffLists (oldTheme |> Option.map (fun t -> t.Templates) |> Option.defaultValue [])
|
||||
theme.Templates (fun t -> t.Name)
|
||||
let toAddOrUpdate =
|
||||
theme.Templates
|
||||
|> List.filter (fun t -> not (toDelete |> List.exists (fun d -> d.Name = t.Name)))
|
||||
|
||||
if not (List.isEmpty toDelete) || not (List.isEmpty toAddOrUpdate) then
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.executeTransactionAsync [
|
||||
if not (List.isEmpty toDelete) then
|
||||
"DELETE FROM theme_template WHERE theme_id = @themeId AND name = @name",
|
||||
toDelete |> List.map (fun tmpl -> [ "@themeId", themeIdParam; "@name", Sql.string tmpl.Name ])
|
||||
if not (List.isEmpty toAddOrUpdate) then
|
||||
"INSERT INTO theme_template VALUES (@themeId, @name, @template)
|
||||
ON CONFLICT (theme_id, name) DO UPDATE
|
||||
SET template = EXCLUDED.template",
|
||||
toAddOrUpdate |> List.map (fun tmpl -> [
|
||||
"@themeId", themeIdParam
|
||||
"@name", Sql.string tmpl.Name
|
||||
"@template", Sql.string tmpl.Text
|
||||
])
|
||||
]
|
||||
()
|
||||
}
|
||||
let save (theme : Theme) =
|
||||
log.LogTrace "Theme.save"
|
||||
save Table.Theme (ThemeId.toString theme.Id) theme
|
||||
|
||||
interface IThemeData with
|
||||
member _.All () = all ()
|
||||
@ -135,68 +59,54 @@ type PostgresThemeData (conn : NpgsqlConnection) =
|
||||
|
||||
|
||||
/// PostreSQL myWebLog theme data implementation
|
||||
type PostgresThemeAssetData (conn : NpgsqlConnection) =
|
||||
type PostgresThemeAssetData (log : ILogger) =
|
||||
|
||||
/// Get all theme assets (excludes data)
|
||||
let all () =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT theme_id, path, updated_on FROM theme_asset"
|
||||
|> Sql.executeAsync (Map.toThemeAsset false)
|
||||
log.LogTrace "ThemeAsset.all"
|
||||
Custom.list $"SELECT theme_id, path, updated_on FROM {Table.ThemeAsset}" [] (Map.toThemeAsset false)
|
||||
|
||||
/// Delete all assets for the given theme
|
||||
let deleteByTheme themeId = backgroundTask {
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "DELETE FROM theme_asset WHERE theme_id = @themeId"
|
||||
|> Sql.parameters [ "@themeId", Sql.string (ThemeId.toString themeId) ]
|
||||
|> Sql.executeNonQueryAsync
|
||||
()
|
||||
}
|
||||
let deleteByTheme themeId =
|
||||
log.LogTrace "ThemeAsset.deleteByTheme"
|
||||
Custom.nonQuery $"DELETE FROM {Table.ThemeAsset} WHERE theme_id = @themeId"
|
||||
[ "@themeId", Sql.string (ThemeId.toString themeId) ]
|
||||
|
||||
/// Find a theme asset by its ID
|
||||
let findById assetId =
|
||||
log.LogTrace "ThemeAsset.findById"
|
||||
let (ThemeAssetId (ThemeId themeId, path)) = assetId
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM theme_asset WHERE theme_id = @themeId AND path = @path"
|
||||
|> Sql.parameters [ "@themeId", Sql.string themeId; "@path", Sql.string path ]
|
||||
|> Sql.executeAsync (Map.toThemeAsset true)
|
||||
|> tryHead
|
||||
Custom.single $"SELECT * FROM {Table.ThemeAsset} WHERE theme_id = @themeId AND path = @path"
|
||||
[ "@themeId", Sql.string themeId; "@path", Sql.string path ] (Map.toThemeAsset true)
|
||||
|
||||
/// Get theme assets for the given theme (excludes data)
|
||||
let findByTheme themeId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT theme_id, path, updated_on FROM theme_asset WHERE theme_id = @themeId"
|
||||
|> Sql.parameters [ "@themeId", Sql.string (ThemeId.toString themeId) ]
|
||||
|> Sql.executeAsync (Map.toThemeAsset false)
|
||||
log.LogTrace "ThemeAsset.findByTheme"
|
||||
Custom.list $"SELECT theme_id, path, updated_on FROM {Table.ThemeAsset} WHERE theme_id = @themeId"
|
||||
[ "@themeId", Sql.string (ThemeId.toString themeId) ] (Map.toThemeAsset false)
|
||||
|
||||
/// Get theme assets for the given theme
|
||||
let findByThemeWithData themeId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM theme_asset WHERE theme_id = @themeId"
|
||||
|> Sql.parameters [ "@themeId", Sql.string (ThemeId.toString themeId) ]
|
||||
|> Sql.executeAsync (Map.toThemeAsset true)
|
||||
log.LogTrace "ThemeAsset.findByThemeWithData"
|
||||
Custom.list $"SELECT * FROM {Table.ThemeAsset} WHERE theme_id = @themeId"
|
||||
[ "@themeId", Sql.string (ThemeId.toString themeId) ] (Map.toThemeAsset true)
|
||||
|
||||
/// Save a theme asset
|
||||
let save (asset : ThemeAsset) = backgroundTask {
|
||||
let save (asset : ThemeAsset) =
|
||||
log.LogTrace "ThemeAsset.save"
|
||||
let (ThemeAssetId (ThemeId themeId, path)) = asset.Id
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query
|
||||
"INSERT INTO theme_asset (
|
||||
theme_id, path, updated_on, data
|
||||
) VALUES (
|
||||
@themeId, @path, @updatedOn, @data
|
||||
) ON CONFLICT (theme_id, path) DO UPDATE
|
||||
SET updated_on = EXCLUDED.updated_on,
|
||||
data = EXCLUDED.data"
|
||||
|> Sql.parameters
|
||||
[ "@themeId", Sql.string themeId
|
||||
"@path", Sql.string path
|
||||
"@data", Sql.bytea asset.Data
|
||||
typedParam "updatedOn" asset.UpdatedOn ]
|
||||
|> Sql.executeNonQueryAsync
|
||||
()
|
||||
}
|
||||
Custom.nonQuery
|
||||
$"INSERT INTO {Table.ThemeAsset} (
|
||||
theme_id, path, updated_on, data
|
||||
) VALUES (
|
||||
@themeId, @path, @updatedOn, @data
|
||||
) ON CONFLICT (theme_id, path) DO UPDATE
|
||||
SET updated_on = EXCLUDED.updated_on,
|
||||
data = EXCLUDED.data"
|
||||
[ "@themeId", Sql.string themeId
|
||||
"@path", Sql.string path
|
||||
"@data", Sql.bytea asset.Data
|
||||
typedParam "updatedOn" asset.UpdatedOn ]
|
||||
|
||||
interface IThemeAssetData with
|
||||
member _.All () = all ()
|
||||
|
@ -1,16 +1,17 @@
|
||||
namespace MyWebLog.Data.Postgres
|
||||
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open Microsoft.Extensions.Logging
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
open Npgsql
|
||||
open Npgsql.FSharp
|
||||
|
||||
/// PostgreSQL myWebLog uploaded file data implementation
|
||||
type PostgresUploadData (conn : NpgsqlConnection) =
|
||||
type PostgresUploadData (log : ILogger) =
|
||||
|
||||
/// The INSERT statement for an uploaded file
|
||||
let upInsert =
|
||||
"INSERT INTO upload (
|
||||
let upInsert = $"
|
||||
INSERT INTO {Table.Upload} (
|
||||
id, web_log_id, path, updated_on, data
|
||||
) VALUES (
|
||||
@id, @webLogId, @path, @updatedOn, @data
|
||||
@ -26,64 +27,49 @@ type PostgresUploadData (conn : NpgsqlConnection) =
|
||||
]
|
||||
|
||||
/// Save an uploaded file
|
||||
let add upload = backgroundTask {
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query upInsert
|
||||
|> Sql.parameters (upParams upload)
|
||||
|> Sql.executeNonQueryAsync
|
||||
()
|
||||
}
|
||||
let add upload =
|
||||
log.LogTrace "Upload.add"
|
||||
Custom.nonQuery upInsert (upParams upload)
|
||||
|
||||
/// Delete an uploaded file by its ID
|
||||
let delete uploadId webLogId = backgroundTask {
|
||||
let theParams = [ "@id", Sql.string (UploadId.toString uploadId); webLogIdParam webLogId ]
|
||||
log.LogTrace "Upload.delete"
|
||||
let idParam = [ "@id", Sql.string (UploadId.toString uploadId) ]
|
||||
let! path =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT path FROM upload WHERE id = @id AND web_log_id = @webLogId"
|
||||
|> Sql.parameters theParams
|
||||
|> Sql.executeAsync (fun row -> row.string "path")
|
||||
|> tryHead
|
||||
Custom.single $"SELECT path FROM {Table.Upload} WHERE id = @id AND web_log_id = @webLogId"
|
||||
(webLogIdParam webLogId :: idParam) (fun row -> row.string "path")
|
||||
if Option.isSome path then
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "DELETE FROM upload WHERE id = @id AND web_log_id = @webLogId"
|
||||
|> Sql.parameters theParams
|
||||
|> Sql.executeNonQueryAsync
|
||||
do! Custom.nonQuery (Query.Delete.byId Table.Upload) idParam
|
||||
return Ok path.Value
|
||||
else return Error $"""Upload ID {UploadId.toString uploadId} not found"""
|
||||
}
|
||||
|
||||
/// Find an uploaded file by its path for the given web log
|
||||
let findByPath path webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM upload WHERE web_log_id = @webLogId AND path = @path"
|
||||
|> Sql.parameters [ webLogIdParam webLogId; "@path", Sql.string path ]
|
||||
|> Sql.executeAsync (Map.toUpload true)
|
||||
|> tryHead
|
||||
log.LogTrace "Upload.findByPath"
|
||||
Custom.single $"SELECT * FROM {Table.Upload} WHERE web_log_id = @webLogId AND path = @path"
|
||||
[ webLogIdParam webLogId; "@path", Sql.string path ] (Map.toUpload true)
|
||||
|
||||
/// Find all uploaded files for the given web log (excludes data)
|
||||
let findByWebLog webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT id, web_log_id, path, updated_on FROM upload WHERE web_log_id = @webLogId"
|
||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||
|> Sql.executeAsync (Map.toUpload false)
|
||||
log.LogTrace "Upload.findByWebLog"
|
||||
Custom.list $"SELECT id, web_log_id, path, updated_on FROM {Table.Upload} WHERE web_log_id = @webLogId"
|
||||
[ webLogIdParam webLogId ] (Map.toUpload false)
|
||||
|
||||
/// Find all uploaded files for the given web log
|
||||
let findByWebLogWithData webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM upload WHERE web_log_id = @webLogId"
|
||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||
|> Sql.executeAsync (Map.toUpload true)
|
||||
log.LogTrace "Upload.findByWebLogWithData"
|
||||
Custom.list $"SELECT * FROM {Table.Upload} WHERE web_log_id = @webLogId" [ webLogIdParam webLogId ]
|
||||
(Map.toUpload true)
|
||||
|
||||
/// Restore uploads from a backup
|
||||
let restore uploads = backgroundTask {
|
||||
log.LogTrace "Upload.restore"
|
||||
for batch in uploads |> List.chunkBySize 5 do
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.executeTransactionAsync [
|
||||
upInsert, batch |> List.map upParams
|
||||
]
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.executeTransactionAsync [ upInsert, batch |> List.map upParams ]
|
||||
()
|
||||
}
|
||||
|
||||
|
@ -1,231 +1,61 @@
|
||||
namespace MyWebLog.Data.Postgres
|
||||
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open Microsoft.Extensions.Logging
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
open Newtonsoft.Json
|
||||
open Npgsql
|
||||
open Npgsql.FSharp
|
||||
|
||||
/// PostgreSQL myWebLog web log data implementation
|
||||
type PostgresWebLogData (conn : NpgsqlConnection, ser : JsonSerializer) =
|
||||
|
||||
// SUPPORT FUNCTIONS
|
||||
|
||||
/// The parameters for web log INSERT or web log/RSS options UPDATE statements
|
||||
let rssParams (webLog : WebLog) = [
|
||||
"@isFeedEnabled", Sql.bool webLog.Rss.IsFeedEnabled
|
||||
"@feedName", Sql.string webLog.Rss.FeedName
|
||||
"@itemsInFeed", Sql.intOrNone webLog.Rss.ItemsInFeed
|
||||
"@isCategoryEnabled", Sql.bool webLog.Rss.IsCategoryEnabled
|
||||
"@isTagEnabled", Sql.bool webLog.Rss.IsTagEnabled
|
||||
"@copyright", Sql.stringOrNone webLog.Rss.Copyright
|
||||
]
|
||||
|
||||
/// The parameters for web log INSERT or UPDATE statements
|
||||
let webLogParams (webLog : WebLog) = [
|
||||
"@id", Sql.string (WebLogId.toString webLog.Id)
|
||||
"@name", Sql.string webLog.Name
|
||||
"@slug", Sql.string webLog.Slug
|
||||
"@subtitle", Sql.stringOrNone webLog.Subtitle
|
||||
"@defaultPage", Sql.string webLog.DefaultPage
|
||||
"@postsPerPage", Sql.int webLog.PostsPerPage
|
||||
"@themeId", Sql.string (ThemeId.toString webLog.ThemeId)
|
||||
"@urlBase", Sql.string webLog.UrlBase
|
||||
"@timeZone", Sql.string webLog.TimeZone
|
||||
"@autoHtmx", Sql.bool webLog.AutoHtmx
|
||||
"@uploads", Sql.string (UploadDestination.toString webLog.Uploads)
|
||||
yield! rssParams webLog
|
||||
]
|
||||
|
||||
/// Shorthand to map a result to a custom feed
|
||||
let toCustomFeed =
|
||||
Map.toCustomFeed ser
|
||||
|
||||
/// Get the current custom feeds for a web log
|
||||
let getCustomFeeds (webLog : WebLog) =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM web_log_feed WHERE web_log_id = @webLogId"
|
||||
|> Sql.parameters [ webLogIdParam webLog.Id ]
|
||||
|> Sql.executeAsync toCustomFeed
|
||||
|
||||
/// Append custom feeds to a web log
|
||||
let appendCustomFeeds (webLog : WebLog) = backgroundTask {
|
||||
let! feeds = getCustomFeeds webLog
|
||||
return { webLog with Rss = { webLog.Rss with CustomFeeds = feeds } }
|
||||
}
|
||||
|
||||
/// The parameters to save a custom feed
|
||||
let feedParams webLogId (feed : CustomFeed) = [
|
||||
webLogIdParam webLogId
|
||||
"@id", Sql.string (CustomFeedId.toString feed.Id)
|
||||
"@source", Sql.string (CustomFeedSource.toString feed.Source)
|
||||
"@path", Sql.string (Permalink.toString feed.Path)
|
||||
"@podcast", Sql.jsonbOrNone (feed.Podcast |> Option.map (Utils.serialize ser))
|
||||
]
|
||||
|
||||
/// Update the custom feeds for a web log
|
||||
let updateCustomFeeds (webLog : WebLog) = backgroundTask {
|
||||
let! feeds = getCustomFeeds webLog
|
||||
let toDelete, _ = Utils.diffLists feeds webLog.Rss.CustomFeeds (fun it -> $"{CustomFeedId.toString it.Id}")
|
||||
let toId (feed : CustomFeed) = feed.Id
|
||||
let toAddOrUpdate =
|
||||
webLog.Rss.CustomFeeds |> List.filter (fun f -> not (toDelete |> List.map toId |> List.contains f.Id))
|
||||
if not (List.isEmpty toDelete) || not (List.isEmpty toAddOrUpdate) then
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.executeTransactionAsync [
|
||||
if not (List.isEmpty toDelete) then
|
||||
"DELETE FROM web_log_feed WHERE id = @id",
|
||||
toDelete |> List.map (fun it -> [ "@id", Sql.string (CustomFeedId.toString it.Id) ])
|
||||
if not (List.isEmpty toAddOrUpdate) then
|
||||
"INSERT INTO web_log_feed (
|
||||
id, web_log_id, source, path, podcast
|
||||
) VALUES (
|
||||
@id, @webLogId, @source, @path, @podcast
|
||||
) ON CONFLICT (id) DO UPDATE
|
||||
SET source = EXCLUDED.source,
|
||||
path = EXCLUDED.path,
|
||||
podcast = EXCLUDED.podcast",
|
||||
toAddOrUpdate |> List.map (feedParams webLog.Id)
|
||||
]
|
||||
()
|
||||
}
|
||||
|
||||
// IMPLEMENTATION FUNCTIONS
|
||||
type PostgresWebLogData (log : ILogger) =
|
||||
|
||||
/// Add a web log
|
||||
let add webLog = backgroundTask {
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query
|
||||
"INSERT INTO web_log (
|
||||
id, name, slug, subtitle, default_page, posts_per_page, theme_id, url_base, time_zone, auto_htmx,
|
||||
uploads, is_feed_enabled, feed_name, items_in_feed, is_category_enabled, is_tag_enabled, copyright
|
||||
) VALUES (
|
||||
@id, @name, @slug, @subtitle, @defaultPage, @postsPerPage, @themeId, @urlBase, @timeZone, @autoHtmx,
|
||||
@uploads, @isFeedEnabled, @feedName, @itemsInFeed, @isCategoryEnabled, @isTagEnabled, @copyright
|
||||
)"
|
||||
|> Sql.parameters (webLogParams webLog)
|
||||
|> Sql.executeNonQueryAsync
|
||||
do! updateCustomFeeds webLog
|
||||
}
|
||||
let add (webLog : WebLog) =
|
||||
log.LogTrace "WebLog.add"
|
||||
insert Table.WebLog (WebLogId.toString webLog.Id) webLog
|
||||
|
||||
/// Retrieve all web logs
|
||||
let all () = backgroundTask {
|
||||
let! webLogs =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM web_log"
|
||||
|> Sql.executeAsync Map.toWebLog
|
||||
let! feeds =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM web_log_feed"
|
||||
|> Sql.executeAsync (fun row -> WebLogId (row.string "web_log_id"), toCustomFeed row)
|
||||
return
|
||||
webLogs
|
||||
|> List.map (fun it ->
|
||||
{ it with
|
||||
Rss =
|
||||
{ it.Rss with
|
||||
CustomFeeds = feeds |> List.filter (fun (wlId, _) -> wlId = it.Id) |> List.map snd } })
|
||||
}
|
||||
let all () =
|
||||
log.LogTrace "WebLog.all"
|
||||
Find.all<WebLog> Table.WebLog
|
||||
|
||||
/// Delete a web log by its ID
|
||||
let delete webLogId = backgroundTask {
|
||||
let subQuery table = $"(SELECT id FROM {table} WHERE web_log_id = @webLogId)"
|
||||
let postSubQuery = subQuery "post"
|
||||
let pageSubQuery = subQuery "page"
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"
|
||||
DELETE FROM post_comment WHERE post_id IN {postSubQuery};
|
||||
DELETE FROM post_revision WHERE post_id IN {postSubQuery};
|
||||
DELETE FROM post_category WHERE post_id IN {postSubQuery};
|
||||
DELETE FROM post WHERE web_log_id = @webLogId;
|
||||
DELETE FROM page_revision WHERE page_id IN {pageSubQuery};
|
||||
DELETE FROM page WHERE web_log_id = @webLogId;
|
||||
DELETE FROM category WHERE web_log_id = @webLogId;
|
||||
DELETE FROM tag_map WHERE web_log_id = @webLogId;
|
||||
DELETE FROM upload WHERE web_log_id = @webLogId;
|
||||
DELETE FROM web_log_user WHERE web_log_id = @webLogId;
|
||||
DELETE FROM web_log_feed WHERE web_log_id = @webLogId;
|
||||
DELETE FROM web_log WHERE id = @webLogId"
|
||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||
|> Sql.executeNonQueryAsync
|
||||
()
|
||||
}
|
||||
let delete webLogId =
|
||||
log.LogTrace "WebLog.delete"
|
||||
Custom.nonQuery
|
||||
$"""DELETE FROM {Table.PostComment}
|
||||
WHERE data ->> '{nameof Comment.empty.PostId}' IN
|
||||
(SELECT id FROM {Table.Post} WHERE {Query.whereDataContains "@criteria"});
|
||||
{Query.Delete.byContains Table.Post};
|
||||
{Query.Delete.byContains Table.Page};
|
||||
{Query.Delete.byContains Table.Category};
|
||||
{Query.Delete.byContains Table.TagMap};
|
||||
{Query.Delete.byContains Table.WebLogUser};
|
||||
DELETE FROM {Table.Upload} WHERE web_log_id = @webLogId;
|
||||
DELETE FROM {Table.WebLog} WHERE id = @webLogId"""
|
||||
[ webLogIdParam webLogId; webLogContains webLogId ]
|
||||
|
||||
/// Find a web log by its host (URL base)
|
||||
let findByHost url = backgroundTask {
|
||||
let! webLog =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM web_log WHERE url_base = @urlBase"
|
||||
|> Sql.parameters [ "@urlBase", Sql.string url ]
|
||||
|> Sql.executeAsync Map.toWebLog
|
||||
|> tryHead
|
||||
if Option.isSome webLog then
|
||||
let! withFeeds = appendCustomFeeds webLog.Value
|
||||
return Some withFeeds
|
||||
else return None
|
||||
}
|
||||
let findByHost (url : string) =
|
||||
log.LogTrace "WebLog.findByHost"
|
||||
Custom.single (selectWithCriteria Table.WebLog) [ "@criteria", Query.jsonbDocParam {| UrlBase = url |} ]
|
||||
fromData<WebLog>
|
||||
|
||||
/// Find a web log by its ID
|
||||
let findById webLogId = backgroundTask {
|
||||
let! webLog =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM web_log WHERE id = @webLogId"
|
||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||
|> Sql.executeAsync Map.toWebLog
|
||||
|> tryHead
|
||||
if Option.isSome webLog then
|
||||
let! withFeeds = appendCustomFeeds webLog.Value
|
||||
return Some withFeeds
|
||||
else return None
|
||||
}
|
||||
let findById webLogId =
|
||||
log.LogTrace "WebLog.findById"
|
||||
Find.byId<WebLog> Table.WebLog (WebLogId.toString webLogId)
|
||||
|
||||
/// Update settings for a web log
|
||||
let updateSettings webLog = backgroundTask {
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query
|
||||
"UPDATE web_log
|
||||
SET name = @name,
|
||||
slug = @slug,
|
||||
subtitle = @subtitle,
|
||||
default_page = @defaultPage,
|
||||
posts_per_page = @postsPerPage,
|
||||
theme_id = @themeId,
|
||||
url_base = @urlBase,
|
||||
time_zone = @timeZone,
|
||||
auto_htmx = @autoHtmx,
|
||||
uploads = @uploads,
|
||||
is_feed_enabled = @isFeedEnabled,
|
||||
feed_name = @feedName,
|
||||
items_in_feed = @itemsInFeed,
|
||||
is_category_enabled = @isCategoryEnabled,
|
||||
is_tag_enabled = @isTagEnabled,
|
||||
copyright = @copyright
|
||||
WHERE id = @id"
|
||||
|> Sql.parameters (webLogParams webLog)
|
||||
|> Sql.executeNonQueryAsync
|
||||
()
|
||||
}
|
||||
let updateSettings (webLog : WebLog) =
|
||||
log.LogTrace "WebLog.updateSettings"
|
||||
Update.full Table.WebLog (WebLogId.toString webLog.Id) webLog
|
||||
|
||||
/// Update RSS options for a web log
|
||||
let updateRssOptions (webLog : WebLog) = backgroundTask {
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query
|
||||
"UPDATE web_log
|
||||
SET is_feed_enabled = @isFeedEnabled,
|
||||
feed_name = @feedName,
|
||||
items_in_feed = @itemsInFeed,
|
||||
is_category_enabled = @isCategoryEnabled,
|
||||
is_tag_enabled = @isTagEnabled,
|
||||
copyright = @copyright
|
||||
WHERE id = @webLogId"
|
||||
|> Sql.parameters (webLogIdParam webLog.Id :: rssParams webLog)
|
||||
|> Sql.executeNonQueryAsync
|
||||
do! updateCustomFeeds webLog
|
||||
log.LogTrace "WebLog.updateRssOptions"
|
||||
match! findById webLog.Id with
|
||||
| Some _ -> do! Update.partialById Table.WebLog (WebLogId.toString webLog.Id) {| Rss = webLog.Rss |}
|
||||
| None -> ()
|
||||
}
|
||||
|
||||
interface IWebLogData with
|
||||
|
@ -1,140 +1,91 @@
|
||||
namespace MyWebLog.Data.Postgres
|
||||
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open Microsoft.Extensions.Logging
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
open Npgsql
|
||||
open Npgsql.FSharp
|
||||
|
||||
/// PostgreSQL myWebLog user data implementation
|
||||
type PostgresWebLogUserData (conn : NpgsqlConnection) =
|
||||
type PostgresWebLogUserData (log : ILogger) =
|
||||
|
||||
/// The INSERT statement for a user
|
||||
let userInsert =
|
||||
"INSERT INTO web_log_user (
|
||||
id, web_log_id, email, first_name, last_name, preferred_name, password_hash, url, access_level,
|
||||
created_on, last_seen_on
|
||||
) VALUES (
|
||||
@id, @webLogId, @email, @firstName, @lastName, @preferredName, @passwordHash, @url, @accessLevel,
|
||||
@createdOn, @lastSeenOn
|
||||
)"
|
||||
|
||||
/// Parameters for saving web log users
|
||||
let userParams (user : WebLogUser) = [
|
||||
"@id", Sql.string (WebLogUserId.toString user.Id)
|
||||
"@webLogId", Sql.string (WebLogId.toString user.WebLogId)
|
||||
"@email", Sql.string user.Email
|
||||
"@firstName", Sql.string user.FirstName
|
||||
"@lastName", Sql.string user.LastName
|
||||
"@preferredName", Sql.string user.PreferredName
|
||||
"@passwordHash", Sql.string user.PasswordHash
|
||||
"@url", Sql.stringOrNone user.Url
|
||||
"@accessLevel", Sql.string (AccessLevel.toString user.AccessLevel)
|
||||
typedParam "createdOn" user.CreatedOn
|
||||
optParam "lastSeenOn" user.LastSeenOn
|
||||
]
|
||||
|
||||
/// Find a user by their ID for the given web log
|
||||
let findById userId webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM web_log_user WHERE id = @id AND web_log_id = @webLogId"
|
||||
|> Sql.parameters [ "@id", Sql.string (WebLogUserId.toString userId); webLogIdParam webLogId ]
|
||||
|> Sql.executeAsync Map.toWebLogUser
|
||||
|> tryHead
|
||||
log.LogTrace "WebLogUser.findById"
|
||||
Document.findByIdAndWebLog<WebLogUserId, WebLogUser> Table.WebLogUser userId WebLogUserId.toString webLogId
|
||||
|
||||
/// Delete a user if they have no posts or pages
|
||||
let delete userId webLogId = backgroundTask {
|
||||
log.LogTrace "WebLogUser.delete"
|
||||
match! findById userId webLogId with
|
||||
| Some _ ->
|
||||
let userParam = [ "@userId", Sql.string (WebLogUserId.toString userId) ]
|
||||
let criteria = Query.whereDataContains "@criteria"
|
||||
let! isAuthor =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query
|
||||
"SELECT ( EXISTS (SELECT 1 FROM page WHERE author_id = @userId
|
||||
OR EXISTS (SELECT 1 FROM post WHERE author_id = @userId)) AS does_exist"
|
||||
|> Sql.parameters userParam
|
||||
|> Sql.executeRowAsync Map.toExists
|
||||
Custom.scalar
|
||||
$" SELECT ( EXISTS (SELECT 1 FROM {Table.Page} WHERE {criteria}
|
||||
OR EXISTS (SELECT 1 FROM {Table.Post} WHERE {criteria})
|
||||
) AS {existsName}"
|
||||
[ "@criteria", Query.jsonbDocParam {| AuthorId = userId |} ] Map.toExists
|
||||
if isAuthor then
|
||||
return Error "User has pages or posts; cannot delete"
|
||||
else
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "DELETE FROM web_log_user WHERE id = @userId"
|
||||
|> Sql.parameters userParam
|
||||
|> Sql.executeNonQueryAsync
|
||||
do! Delete.byId Table.WebLogUser (WebLogUserId.toString userId)
|
||||
return Ok true
|
||||
| None -> return Error "User does not exist"
|
||||
}
|
||||
|
||||
/// Find a user by their e-mail address for the given web log
|
||||
let findByEmail email webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM web_log_user WHERE web_log_id = @webLogId AND email = @email"
|
||||
|> Sql.parameters [ webLogIdParam webLogId; "@email", Sql.string email ]
|
||||
|> Sql.executeAsync Map.toWebLogUser
|
||||
|> tryHead
|
||||
let findByEmail (email : string) webLogId =
|
||||
log.LogTrace "WebLogUser.findByEmail"
|
||||
Custom.single (selectWithCriteria Table.WebLogUser)
|
||||
[ "@criteria", Query.jsonbDocParam {| webLogDoc webLogId with Email = email |} ]
|
||||
fromData<WebLogUser>
|
||||
|
||||
/// Get all users for the given web log
|
||||
let findByWebLog webLogId =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT * FROM web_log_user WHERE web_log_id = @webLogId ORDER BY LOWER(preferred_name)"
|
||||
|> Sql.parameters [ webLogIdParam webLogId ]
|
||||
|> Sql.executeAsync Map.toWebLogUser
|
||||
log.LogTrace "WebLogUser.findByWebLog"
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.WebLogUser} ORDER BY LOWER(data->>'{nameof WebLogUser.empty.PreferredName}')"
|
||||
[ webLogContains webLogId ] fromData<WebLogUser>
|
||||
|
||||
/// Find the names of users by their IDs for the given web log
|
||||
let findNames webLogId userIds = backgroundTask {
|
||||
log.LogTrace "WebLogUser.findNames"
|
||||
let idSql, idParams = inClause "AND id" "id" WebLogUserId.toString userIds
|
||||
let! users =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"SELECT * FROM web_log_user WHERE web_log_id = @webLogId {idSql}"
|
||||
|> Sql.parameters (webLogIdParam webLogId :: idParams)
|
||||
|> Sql.executeAsync Map.toWebLogUser
|
||||
Custom.list $"{selectWithCriteria Table.WebLogUser} {idSql}" (webLogContains webLogId :: idParams)
|
||||
fromData<WebLogUser>
|
||||
return
|
||||
users
|
||||
|> List.map (fun u -> { Name = WebLogUserId.toString u.Id; Value = WebLogUser.displayName u })
|
||||
}
|
||||
|
||||
/// Restore users from a backup
|
||||
let restore users = backgroundTask {
|
||||
let restore (users : WebLogUser list) = backgroundTask {
|
||||
log.LogTrace "WebLogUser.restore"
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.executeTransactionAsync [
|
||||
userInsert, users |> List.map userParams
|
||||
Query.insert Table.WebLogUser,
|
||||
users |> List.map (fun user -> Query.docParameters (WebLogUserId.toString user.Id) user)
|
||||
]
|
||||
()
|
||||
}
|
||||
|
||||
/// Set a user's last seen date/time to now
|
||||
let setLastSeen userId webLogId = backgroundTask {
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "UPDATE web_log_user SET last_seen_on = @lastSeenOn WHERE id = @id AND web_log_id = @webLogId"
|
||||
|> Sql.parameters
|
||||
[ webLogIdParam webLogId
|
||||
typedParam "lastSeenOn" (Noda.now ())
|
||||
"@id", Sql.string (WebLogUserId.toString userId) ]
|
||||
|> Sql.executeNonQueryAsync
|
||||
()
|
||||
log.LogTrace "WebLogUser.setLastSeen"
|
||||
match! Document.existsByWebLog Table.WebLogUser userId WebLogUserId.toString webLogId with
|
||||
| true ->
|
||||
do! Update.partialById Table.WebLogUser (WebLogUserId.toString userId) {| LastSeenOn = Some (Noda.now ()) |}
|
||||
| false -> ()
|
||||
}
|
||||
|
||||
/// Save a user
|
||||
let save user = backgroundTask {
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"
|
||||
{userInsert} ON CONFLICT (id) DO UPDATE
|
||||
SET email = @email,
|
||||
first_name = @firstName,
|
||||
last_name = @lastName,
|
||||
preferred_name = @preferredName,
|
||||
password_hash = @passwordHash,
|
||||
url = @url,
|
||||
access_level = @accessLevel,
|
||||
created_on = @createdOn,
|
||||
last_seen_on = @lastSeenOn"
|
||||
|> Sql.parameters (userParams user)
|
||||
|> Sql.executeNonQueryAsync
|
||||
()
|
||||
}
|
||||
let save (user : WebLogUser) =
|
||||
log.LogTrace "WebLogUser.save"
|
||||
save Table.WebLogUser (WebLogUserId.toString user.Id) user
|
||||
|
||||
interface IWebLogUserData with
|
||||
member _.Add user = save user
|
||||
|
@ -1,207 +1,127 @@
|
||||
namespace MyWebLog.Data
|
||||
|
||||
open BitBadger.Npgsql.Documents
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open Microsoft.Extensions.Logging
|
||||
open MyWebLog
|
||||
open MyWebLog.Data.Postgres
|
||||
open Newtonsoft.Json
|
||||
open Npgsql
|
||||
open Npgsql.FSharp
|
||||
|
||||
/// Data implementation for PostgreSQL
|
||||
type PostgresData (conn : NpgsqlConnection, log : ILogger<PostgresData>, ser : JsonSerializer) =
|
||||
type PostgresData (source : NpgsqlDataSource, log : ILogger<PostgresData>, ser : JsonSerializer) =
|
||||
|
||||
/// Create any needed tables
|
||||
let ensureTables () = backgroundTask {
|
||||
let _ = NpgsqlConnection.GlobalTypeMapper.UseNodaTime ()
|
||||
// Set up the PostgreSQL document store
|
||||
Configuration.useDataSource source
|
||||
Configuration.useSerializer
|
||||
{ new IDocumentSerializer with
|
||||
member _.Serialize<'T> (it : 'T) : string = Utils.serialize ser it
|
||||
member _.Deserialize<'T> (it : string) : 'T = Utils.deserialize ser it
|
||||
}
|
||||
|
||||
let! tables =
|
||||
Sql.existingConnection conn
|
||||
Sql.fromDataSource source
|
||||
|> Sql.query "SELECT tablename FROM pg_tables WHERE schemaname = 'public'"
|
||||
|> Sql.executeAsync (fun row -> row.string "tablename")
|
||||
let needsTable table = not (List.contains table tables)
|
||||
// Create a document table
|
||||
let mutable isNew = false
|
||||
|
||||
let sql = seq {
|
||||
// Theme tables
|
||||
if needsTable "theme" then
|
||||
if needsTable Table.Theme then
|
||||
isNew <- true
|
||||
"CREATE TABLE theme (
|
||||
id TEXT NOT NULL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
version TEXT NOT NULL)"
|
||||
if needsTable "theme_template" then
|
||||
"CREATE TABLE theme_template (
|
||||
theme_id TEXT NOT NULL REFERENCES theme (id),
|
||||
name TEXT NOT NULL,
|
||||
template TEXT NOT NULL,
|
||||
PRIMARY KEY (theme_id, name))"
|
||||
if needsTable "theme_asset" then
|
||||
"CREATE TABLE theme_asset (
|
||||
theme_id TEXT NOT NULL REFERENCES theme (id),
|
||||
Definition.createTable Table.Theme
|
||||
if needsTable Table.ThemeAsset then
|
||||
$"CREATE TABLE {Table.ThemeAsset} (
|
||||
theme_id TEXT NOT NULL REFERENCES {Table.Theme} (id) ON DELETE CASCADE,
|
||||
path TEXT NOT NULL,
|
||||
updated_on TIMESTAMPTZ NOT NULL,
|
||||
data BYTEA NOT NULL,
|
||||
PRIMARY KEY (theme_id, path))"
|
||||
|
||||
// Web log tables
|
||||
if needsTable "web_log" then
|
||||
"CREATE TABLE web_log (
|
||||
id TEXT NOT NULL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
slug TEXT NOT NULL,
|
||||
subtitle TEXT,
|
||||
default_page TEXT NOT NULL,
|
||||
posts_per_page INTEGER NOT NULL,
|
||||
theme_id TEXT NOT NULL REFERENCES theme (id),
|
||||
url_base TEXT NOT NULL,
|
||||
time_zone TEXT NOT NULL,
|
||||
auto_htmx BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
uploads TEXT NOT NULL,
|
||||
is_feed_enabled BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
feed_name TEXT NOT NULL,
|
||||
items_in_feed INTEGER,
|
||||
is_category_enabled BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
is_tag_enabled BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
copyright TEXT)"
|
||||
"CREATE INDEX web_log_theme_idx ON web_log (theme_id)"
|
||||
if needsTable "web_log_feed" then
|
||||
"CREATE TABLE web_log_feed (
|
||||
id TEXT NOT NULL PRIMARY KEY,
|
||||
web_log_id TEXT NOT NULL REFERENCES web_log (id),
|
||||
source TEXT NOT NULL,
|
||||
path TEXT NOT NULL,
|
||||
podcast JSONB)"
|
||||
"CREATE INDEX web_log_feed_web_log_idx ON web_log_feed (web_log_id)"
|
||||
// Web log table
|
||||
if needsTable Table.WebLog then
|
||||
Definition.createTable Table.WebLog
|
||||
Definition.createIndex Table.WebLog Optimized
|
||||
|
||||
// Category table
|
||||
if needsTable "category" then
|
||||
"CREATE TABLE category (
|
||||
id TEXT NOT NULL PRIMARY KEY,
|
||||
web_log_id TEXT NOT NULL REFERENCES web_log (id),
|
||||
name TEXT NOT NULL,
|
||||
slug TEXT NOT NULL,
|
||||
description TEXT,
|
||||
parent_id TEXT)"
|
||||
"CREATE INDEX category_web_log_idx ON category (web_log_id)"
|
||||
if needsTable Table.Category then
|
||||
Definition.createTable Table.Category
|
||||
Definition.createIndex Table.Category Optimized
|
||||
|
||||
// Web log user table
|
||||
if needsTable "web_log_user" then
|
||||
"CREATE TABLE web_log_user (
|
||||
id TEXT NOT NULL PRIMARY KEY,
|
||||
web_log_id TEXT NOT NULL REFERENCES web_log (id),
|
||||
email TEXT NOT NULL,
|
||||
first_name TEXT NOT NULL,
|
||||
last_name TEXT NOT NULL,
|
||||
preferred_name TEXT NOT NULL,
|
||||
password_hash TEXT NOT NULL,
|
||||
url TEXT,
|
||||
access_level TEXT NOT NULL,
|
||||
created_on TIMESTAMPTZ NOT NULL,
|
||||
last_seen_on TIMESTAMPTZ)"
|
||||
"CREATE INDEX web_log_user_web_log_idx ON web_log_user (web_log_id)"
|
||||
"CREATE INDEX web_log_user_email_idx ON web_log_user (web_log_id, email)"
|
||||
if needsTable Table.WebLogUser then
|
||||
Definition.createTable Table.WebLogUser
|
||||
Definition.createIndex Table.WebLogUser Optimized
|
||||
|
||||
// Page tables
|
||||
if needsTable "page" then
|
||||
"CREATE TABLE page (
|
||||
id TEXT NOT NULL PRIMARY KEY,
|
||||
web_log_id TEXT NOT NULL REFERENCES web_log (id),
|
||||
author_id TEXT NOT NULL REFERENCES web_log_user (id),
|
||||
title TEXT NOT NULL,
|
||||
permalink TEXT NOT NULL,
|
||||
prior_permalinks TEXT[] NOT NULL DEFAULT '{}',
|
||||
published_on TIMESTAMPTZ NOT NULL,
|
||||
updated_on TIMESTAMPTZ NOT NULL,
|
||||
is_in_page_list BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
template TEXT,
|
||||
page_text TEXT NOT NULL,
|
||||
meta_items JSONB)"
|
||||
"CREATE INDEX page_web_log_idx ON page (web_log_id)"
|
||||
"CREATE INDEX page_author_idx ON page (author_id)"
|
||||
"CREATE INDEX page_permalink_idx ON page (web_log_id, permalink)"
|
||||
if needsTable "page_revision" then
|
||||
"CREATE TABLE page_revision (
|
||||
page_id TEXT NOT NULL REFERENCES page (id),
|
||||
if needsTable Table.Page then
|
||||
Definition.createTable Table.Page
|
||||
$"CREATE INDEX page_web_log_idx ON {Table.Page} ((data ->> '{nameof Page.empty.WebLogId}'))"
|
||||
$"CREATE INDEX page_author_idx ON {Table.Page} ((data ->> '{nameof Page.empty.AuthorId}'))"
|
||||
$"CREATE INDEX page_permalink_idx ON {Table.Page}
|
||||
((data ->> '{nameof Page.empty.WebLogId}'), (data ->> '{nameof Page.empty.Permalink}'))"
|
||||
if needsTable Table.PageRevision then
|
||||
$"CREATE TABLE {Table.PageRevision} (
|
||||
page_id TEXT NOT NULL REFERENCES {Table.Page} (id) ON DELETE CASCADE,
|
||||
as_of TIMESTAMPTZ NOT NULL,
|
||||
revision_text TEXT NOT NULL,
|
||||
PRIMARY KEY (page_id, as_of))"
|
||||
|
||||
// Post tables
|
||||
if needsTable "post" then
|
||||
"CREATE TABLE post (
|
||||
id TEXT NOT NULL PRIMARY KEY,
|
||||
web_log_id TEXT NOT NULL REFERENCES web_log (id),
|
||||
author_id TEXT NOT NULL REFERENCES web_log_user (id),
|
||||
status TEXT NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
permalink TEXT NOT NULL,
|
||||
prior_permalinks TEXT[] NOT NULL DEFAULT '{}',
|
||||
published_on TIMESTAMPTZ,
|
||||
updated_on TIMESTAMPTZ NOT NULL,
|
||||
template TEXT,
|
||||
post_text TEXT NOT NULL,
|
||||
tags TEXT[],
|
||||
meta_items JSONB,
|
||||
episode JSONB)"
|
||||
"CREATE INDEX post_web_log_idx ON post (web_log_id)"
|
||||
"CREATE INDEX post_author_idx ON post (author_id)"
|
||||
"CREATE INDEX post_status_idx ON post (web_log_id, status, updated_on)"
|
||||
"CREATE INDEX post_permalink_idx ON post (web_log_id, permalink)"
|
||||
if needsTable "post_category" then
|
||||
"CREATE TABLE post_category (
|
||||
post_id TEXT NOT NULL REFERENCES post (id),
|
||||
category_id TEXT NOT NULL REFERENCES category (id),
|
||||
PRIMARY KEY (post_id, category_id))"
|
||||
"CREATE INDEX post_category_category_idx ON post_category (category_id)"
|
||||
if needsTable "post_revision" then
|
||||
"CREATE TABLE post_revision (
|
||||
post_id TEXT NOT NULL REFERENCES post (id),
|
||||
if needsTable Table.Post then
|
||||
Definition.createTable Table.Post
|
||||
$"CREATE INDEX post_web_log_idx ON {Table.Post} ((data ->> '{nameof Post.empty.WebLogId}'))"
|
||||
$"CREATE INDEX post_author_idx ON {Table.Post} ((data ->> '{nameof Post.empty.AuthorId}'))"
|
||||
$"CREATE INDEX post_status_idx ON {Table.Post}
|
||||
((data ->> '{nameof Post.empty.WebLogId}'), (data ->> '{nameof Post.empty.Status}'),
|
||||
(data ->> '{nameof Post.empty.UpdatedOn}'))"
|
||||
$"CREATE INDEX post_permalink_idx ON {Table.Post}
|
||||
((data ->> '{nameof Post.empty.WebLogId}'), (data ->> '{nameof Post.empty.Permalink}'))"
|
||||
$"CREATE INDEX post_category_idx ON {Table.Post} USING GIN ((data['{nameof Post.empty.CategoryIds}']))"
|
||||
$"CREATE INDEX post_tag_idx ON {Table.Post} USING GIN ((data['{nameof Post.empty.Tags}']))"
|
||||
if needsTable Table.PostRevision then
|
||||
$"CREATE TABLE {Table.PostRevision} (
|
||||
post_id TEXT NOT NULL REFERENCES {Table.Post} (id) ON DELETE CASCADE,
|
||||
as_of TIMESTAMPTZ NOT NULL,
|
||||
revision_text TEXT NOT NULL,
|
||||
PRIMARY KEY (post_id, as_of))"
|
||||
if needsTable "post_comment" then
|
||||
"CREATE TABLE post_comment (
|
||||
id TEXT NOT NULL PRIMARY KEY,
|
||||
post_id TEXT NOT NULL REFERENCES post(id),
|
||||
in_reply_to_id TEXT,
|
||||
name TEXT NOT NULL,
|
||||
email TEXT NOT NULL,
|
||||
url TEXT,
|
||||
status TEXT NOT NULL,
|
||||
posted_on TIMESTAMPTZ NOT NULL,
|
||||
comment_text TEXT NOT NULL)"
|
||||
"CREATE INDEX post_comment_post_idx ON post_comment (post_id)"
|
||||
if needsTable Table.PostComment then
|
||||
Definition.createTable Table.PostComment
|
||||
$"CREATE INDEX post_comment_post_idx ON {Table.PostComment}
|
||||
((data ->> '{nameof Comment.empty.PostId}'))"
|
||||
|
||||
// Tag map table
|
||||
if needsTable "tag_map" then
|
||||
"CREATE TABLE tag_map (
|
||||
id TEXT NOT NULL PRIMARY KEY,
|
||||
web_log_id TEXT NOT NULL REFERENCES web_log (id),
|
||||
tag TEXT NOT NULL,
|
||||
url_value TEXT NOT NULL)"
|
||||
"CREATE INDEX tag_map_web_log_idx ON tag_map (web_log_id)"
|
||||
if needsTable Table.TagMap then
|
||||
Definition.createTable Table.TagMap
|
||||
Definition.createIndex Table.TagMap Optimized
|
||||
|
||||
// Uploaded file table
|
||||
if needsTable "upload" then
|
||||
"CREATE TABLE upload (
|
||||
if needsTable Table.Upload then
|
||||
$"CREATE TABLE {Table.Upload} (
|
||||
id TEXT NOT NULL PRIMARY KEY,
|
||||
web_log_id TEXT NOT NULL REFERENCES web_log (id),
|
||||
web_log_id TEXT NOT NULL REFERENCES {Table.WebLog} (id),
|
||||
path TEXT NOT NULL,
|
||||
updated_on TIMESTAMPTZ NOT NULL,
|
||||
data BYTEA NOT NULL)"
|
||||
"CREATE INDEX upload_web_log_idx ON upload (web_log_id)"
|
||||
"CREATE INDEX upload_path_idx ON upload (web_log_id, path)"
|
||||
$"CREATE INDEX upload_web_log_idx ON {Table.Upload} (web_log_id)"
|
||||
$"CREATE INDEX upload_path_idx ON {Table.Upload} (web_log_id, path)"
|
||||
|
||||
// Database version table
|
||||
if needsTable "db_version" then
|
||||
"CREATE TABLE db_version (id TEXT NOT NULL PRIMARY KEY)"
|
||||
$"INSERT INTO db_version VALUES ('{Utils.currentDbVersion}')"
|
||||
if needsTable Table.DbVersion then
|
||||
$"CREATE TABLE {Table.DbVersion} (id TEXT NOT NULL PRIMARY KEY)"
|
||||
$"INSERT INTO {Table.DbVersion} VALUES ('{Utils.currentDbVersion}')"
|
||||
}
|
||||
|
||||
Sql.existingConnection conn
|
||||
Sql.fromDataSource source
|
||||
|> Sql.executeTransactionAsync
|
||||
(sql
|
||||
|> Seq.map (fun s ->
|
||||
let parts = s.Split ' '
|
||||
let parts = s.Replace(" IF NOT EXISTS", "", System.StringComparison.OrdinalIgnoreCase).Split ' '
|
||||
if parts[1].ToLowerInvariant () = "table" then
|
||||
log.LogInformation $"Creating {parts[2]} table..."
|
||||
s, [ [] ])
|
||||
@ -212,18 +132,40 @@ type PostgresData (conn : NpgsqlConnection, log : ILogger<PostgresData>, ser : J
|
||||
}
|
||||
|
||||
/// Set a specific database version
|
||||
let setDbVersion version = backgroundTask {
|
||||
let! _ =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query $"DELETE FROM db_version; INSERT INTO db_version VALUES ('%s{version}')"
|
||||
|> Sql.executeNonQueryAsync
|
||||
()
|
||||
}
|
||||
let setDbVersion version =
|
||||
Custom.nonQuery $"DELETE FROM db_version; INSERT INTO db_version VALUES ('%s{version}')" []
|
||||
|
||||
/// Migrate from v2-rc2 to v2 (manual migration required)
|
||||
let migrateV2Rc2ToV2 () = backgroundTask {
|
||||
Utils.logMigrationStep log "v2-rc2 to v2" "Requires user action"
|
||||
|
||||
let! webLogs =
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.query $"SELECT url_base, slug FROM {Table.WebLog}"
|
||||
|> Sql.executeAsync (fun row -> row.string "url_base", row.string "slug")
|
||||
|
||||
[ "** MANUAL DATABASE UPGRADE REQUIRED **"; ""
|
||||
"The data structure for PostgreSQL changed significantly between v2-rc2 and v2."
|
||||
"To migrate your data:"
|
||||
" - Use a v2-rc2 executable to back up each web log"
|
||||
" - Drop all tables from the database"
|
||||
" - Use this executable to restore each backup"; ""
|
||||
"Commands to back up all web logs:"
|
||||
yield! webLogs |> List.map (fun (url, slug) -> sprintf "./myWebLog backup %s v2-rc2.%s.json" url slug)
|
||||
]
|
||||
|> String.concat "\n"
|
||||
|> log.LogWarning
|
||||
|
||||
log.LogCritical "myWebLog will now exit"
|
||||
exit 1
|
||||
}
|
||||
|
||||
/// Do required data migration between versions
|
||||
let migrate version = backgroundTask {
|
||||
match version with
|
||||
| Some "v2-rc2" -> ()
|
||||
| Some "v2" -> ()
|
||||
| Some "v2-rc2" -> do! migrateV2Rc2ToV2 ()
|
||||
// Future versions will be inserted here
|
||||
| Some _
|
||||
| None ->
|
||||
@ -233,26 +175,23 @@ type PostgresData (conn : NpgsqlConnection, log : ILogger<PostgresData>, ser : J
|
||||
|
||||
interface IData with
|
||||
|
||||
member _.Category = PostgresCategoryData conn
|
||||
member _.Page = PostgresPageData (conn, ser)
|
||||
member _.Post = PostgresPostData (conn, ser)
|
||||
member _.TagMap = PostgresTagMapData conn
|
||||
member _.Theme = PostgresThemeData conn
|
||||
member _.ThemeAsset = PostgresThemeAssetData conn
|
||||
member _.Upload = PostgresUploadData conn
|
||||
member _.WebLog = PostgresWebLogData (conn, ser)
|
||||
member _.WebLogUser = PostgresWebLogUserData conn
|
||||
member _.Category = PostgresCategoryData log
|
||||
member _.Page = PostgresPageData log
|
||||
member _.Post = PostgresPostData log
|
||||
member _.TagMap = PostgresTagMapData log
|
||||
member _.Theme = PostgresThemeData log
|
||||
member _.ThemeAsset = PostgresThemeAssetData log
|
||||
member _.Upload = PostgresUploadData log
|
||||
member _.WebLog = PostgresWebLogData log
|
||||
member _.WebLogUser = PostgresWebLogUserData log
|
||||
|
||||
member _.Serializer = ser
|
||||
|
||||
member _.StartUp () = backgroundTask {
|
||||
log.LogTrace "PostgresData.StartUp"
|
||||
do! ensureTables ()
|
||||
|
||||
let! version =
|
||||
Sql.existingConnection conn
|
||||
|> Sql.query "SELECT id FROM db_version"
|
||||
|> Sql.executeAsync (fun row -> row.string "id")
|
||||
|> tryHead
|
||||
let! version = Custom.single "SELECT id FROM db_version" [] (fun row -> row.string "id")
|
||||
match version with
|
||||
| Some v when v = Utils.currentDbVersion -> ()
|
||||
| Some _
|
||||
|
@ -5,7 +5,6 @@ open MyWebLog
|
||||
open RethinkDb.Driver
|
||||
|
||||
/// Functions to assist with retrieving data
|
||||
[<AutoOpen>]
|
||||
module private RethinkHelpers =
|
||||
|
||||
/// Table names
|
||||
@ -90,6 +89,7 @@ open System
|
||||
open Microsoft.Extensions.Logging
|
||||
open MyWebLog.ViewModels
|
||||
open RethinkDb.Driver.FSharp
|
||||
open RethinkHelpers
|
||||
|
||||
/// RethinkDB implementation of data functions for myWebLog
|
||||
type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<RethinkDbData>) =
|
||||
@ -214,11 +214,18 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
|
||||
logStep "Setting database version to v2-rc2"
|
||||
do! setDbVersion "v2-rc2"
|
||||
}
|
||||
|
||||
/// Migrate from v2-rc2 to v2
|
||||
let migrateV2Rc2ToV2 () = backgroundTask {
|
||||
Utils.logMigrationStep log "v2-rc2 to v2" "Setting database version; no migration required"
|
||||
do! setDbVersion "v2"
|
||||
}
|
||||
|
||||
/// Migrate data between versions
|
||||
let migrate version = backgroundTask {
|
||||
match version with
|
||||
| Some v when v = "v2-rc2" -> ()
|
||||
| Some v when v = "v2" -> ()
|
||||
| Some v when v = "v2-rc2" -> do! migrateV2Rc2ToV2 ()
|
||||
| Some v when v = "v2-rc1" -> do! migrateV2Rc1ToV2Rc2 ()
|
||||
| Some _
|
||||
| None ->
|
||||
|
@ -529,11 +529,18 @@ type SQLiteData (conn : SqliteConnection, log : ILogger<SQLiteData>, ser : JsonS
|
||||
do! setDbVersion "v2-rc2"
|
||||
}
|
||||
|
||||
/// Migrate from v2-rc2 to v2
|
||||
let migrateV2Rc2ToV2 () = backgroundTask {
|
||||
Utils.logMigrationStep log "v2-rc2 to v2" "Setting database version; no migration required"
|
||||
do! setDbVersion "v2"
|
||||
}
|
||||
|
||||
/// Migrate data among versions (up only)
|
||||
let migrate version = backgroundTask {
|
||||
|
||||
match version with
|
||||
| Some v when v = "v2-rc2" -> ()
|
||||
| Some v when v = "v2" -> ()
|
||||
| Some v when v = "v2-rc2" -> do! migrateV2Rc2ToV2 ()
|
||||
| Some v when v = "v2-rc1" -> do! migrateV2Rc1ToV2Rc2 ()
|
||||
| Some _
|
||||
| None ->
|
||||
|
@ -6,7 +6,7 @@ open MyWebLog
|
||||
open MyWebLog.ViewModels
|
||||
|
||||
/// The current database version
|
||||
let currentDbVersion = "v2-rc2"
|
||||
let currentDbVersion = "v2"
|
||||
|
||||
/// Create a category hierarchy from the given list of categories
|
||||
let rec orderByHierarchy (cats : Category list) parentId slugBase parentNames = seq {
|
||||
|
@ -7,10 +7,9 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Markdig" Version="0.30.3" />
|
||||
<PackageReference Update="FSharp.Core" Version="6.0.5" />
|
||||
<PackageReference Include="Markdown.ColorCode" Version="1.0.1" />
|
||||
<PackageReference Include="NodaTime" Version="3.1.2" />
|
||||
<PackageReference Include="Markdig" Version="0.30.4" />
|
||||
<PackageReference Include="Markdown.ColorCode" Version="1.0.2" />
|
||||
<PackageReference Include="NodaTime" Version="3.1.6" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
@ -120,7 +120,6 @@ module ViewContext =
|
||||
/// The current web log
|
||||
[<Literal>]
|
||||
let WebLog = "web_log"
|
||||
|
||||
|
||||
|
||||
/// The HTTP item key for loading the session
|
||||
|
@ -12,9 +12,14 @@ let all pageNbr : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||
return!
|
||||
hashForPage "Pages"
|
||||
|> withAntiCsrf ctx
|
||||
|> addToHash "pages" (pages |> List.map (DisplayPage.fromPageMinimal ctx.WebLog))
|
||||
|> addToHash "pages" (pages
|
||||
|> Seq.ofList
|
||||
|> Seq.truncate 25
|
||||
|> Seq.map (DisplayPage.fromPageMinimal ctx.WebLog)
|
||||
|> List.ofSeq)
|
||||
|> addToHash "page_nbr" pageNbr
|
||||
|> addToHash "prev_page" (if pageNbr = 2 then "" else $"/page/{pageNbr - 1}")
|
||||
|> addToHash "has_next" (List.length pages > 25)
|
||||
|> addToHash "next_page" $"/page/{pageNbr + 1}"
|
||||
|> adminView "page-list" next ctx
|
||||
}
|
||||
|
@ -23,14 +23,13 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="DotLiquid" Version="2.2.656" />
|
||||
<PackageReference Include="DotLiquid" Version="2.2.682" />
|
||||
<PackageReference Include="Giraffe" Version="6.0.0" />
|
||||
<PackageReference Include="Giraffe.Htmx" Version="1.8.0" />
|
||||
<PackageReference Include="Giraffe.ViewEngine.Htmx" Version="1.8.0" />
|
||||
<PackageReference Include="Giraffe.Htmx" Version="1.8.5" />
|
||||
<PackageReference Include="Giraffe.ViewEngine.Htmx" Version="1.8.5" />
|
||||
<PackageReference Include="NeoSmart.Caching.Sqlite" Version="6.0.1" />
|
||||
<PackageReference Include="RethinkDB.DistributedCache" Version="1.0.0-rc1" />
|
||||
<PackageReference Update="FSharp.Core" Version="6.0.5" />
|
||||
<PackageReference Include="System.ServiceModel.Syndication" Version="6.0.0" />
|
||||
<PackageReference Include="System.ServiceModel.Syndication" Version="7.0.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
@ -10,7 +10,7 @@ type WebLogMiddleware (next : RequestDelegate, log : ILogger<WebLogMiddleware>)
|
||||
/// Is the debug level enabled on the logger?
|
||||
let isDebug = log.IsEnabled LogLevel.Debug
|
||||
|
||||
member this.InvokeAsync (ctx : HttpContext) = task {
|
||||
member _.InvokeAsync (ctx : HttpContext) = task {
|
||||
/// Create the full path of the request
|
||||
let path = $"{ctx.Request.Scheme}://{ctx.Request.Host.Value}{ctx.Request.Path.Value}"
|
||||
match WebLogCache.tryGet path with
|
||||
@ -36,10 +36,16 @@ open Npgsql
|
||||
module DataImplementation =
|
||||
|
||||
open MyWebLog.Converters
|
||||
// open Npgsql.Logging
|
||||
open RethinkDb.Driver.FSharp
|
||||
open RethinkDb.Driver.Net
|
||||
|
||||
/// Create an NpgsqlDataSource from the connection string, configuring appropriately
|
||||
let createNpgsqlDataSource (cfg : IConfiguration) =
|
||||
let builder = NpgsqlDataSourceBuilder (cfg.GetConnectionString "PostgreSQL")
|
||||
let _ = builder.UseNodaTime ()
|
||||
// let _ = builder.UseLoggerFactory(LoggerFactory.Create(fun it -> it.AddConsole () |> ignore))
|
||||
builder.Build ()
|
||||
|
||||
/// Get the configured data implementation
|
||||
let get (sp : IServiceProvider) : IData =
|
||||
let config = sp.GetRequiredService<IConfiguration> ()
|
||||
@ -62,11 +68,11 @@ module DataImplementation =
|
||||
let conn = await (rethinkCfg.CreateConnectionAsync log)
|
||||
RethinkDbData (conn, rethinkCfg, log)
|
||||
elif hasConnStr "PostgreSQL" then
|
||||
let source = createNpgsqlDataSource config
|
||||
use conn = source.CreateConnection ()
|
||||
let log = sp.GetRequiredService<ILogger<PostgresData>> ()
|
||||
// NpgsqlLogManager.Provider <- ConsoleLoggingProvider NpgsqlLogLevel.Debug
|
||||
let conn = new NpgsqlConnection (connStr "PostgreSQL")
|
||||
log.LogInformation $"Using PostgreSQL database {conn.Host}:{conn.Port}/{conn.Database}"
|
||||
PostgresData (conn, log, Json.configure (JsonSerializer.CreateDefault ()))
|
||||
log.LogInformation $"Using PostgreSQL database {conn.Database}"
|
||||
PostgresData (source, log, Json.configure (JsonSerializer.CreateDefault ()))
|
||||
else
|
||||
createSQLite "Data Source=./myweblog.db;Cache=Shared"
|
||||
|
||||
@ -152,16 +158,15 @@ let rec main args =
|
||||
let cachePath = defaultArg (Option.ofObj (cfg.GetConnectionString "SQLiteCachePath")) "./session.db"
|
||||
let _ = builder.Services.AddSqliteCache (fun o -> o.CachePath <- cachePath)
|
||||
()
|
||||
| :? PostgresData ->
|
||||
// ADO.NET connections are designed to work as per-request instantiation
|
||||
let cfg = sp.GetRequiredService<IConfiguration> ()
|
||||
| :? PostgresData as postgres ->
|
||||
// ADO.NET Data Sources are designed to work as singletons
|
||||
let _ =
|
||||
builder.Services.AddScoped<NpgsqlConnection> (fun sp ->
|
||||
new NpgsqlConnection (cfg.GetConnectionString "PostgreSQL"))
|
||||
let _ = builder.Services.AddScoped<IData, PostgresData> ()
|
||||
builder.Services.AddSingleton<NpgsqlDataSource> (fun sp ->
|
||||
DataImplementation.createNpgsqlDataSource (sp.GetRequiredService<IConfiguration> ()))
|
||||
let _ = builder.Services.AddSingleton<IData> postgres
|
||||
let _ =
|
||||
builder.Services.AddSingleton<IDistributedCache> (fun sp ->
|
||||
Postgres.DistributedCache (cfg.GetConnectionString "PostgreSQL") :> IDistributedCache)
|
||||
builder.Services.AddSingleton<IDistributedCache> (fun _ ->
|
||||
Postgres.DistributedCache () :> IDistributedCache)
|
||||
()
|
||||
| _ -> ()
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"Generator": "myWebLog 2.0-rc2",
|
||||
"Generator": "myWebLog 2.0",
|
||||
"Logging": {
|
||||
"LogLevel": {
|
||||
"MyWebLog.Handlers": "Information"
|
||||
|
@ -1,13 +1,19 @@
|
||||
<h2 class="my-3">{{ page_title }}</h2>
|
||||
<article>
|
||||
<form action="{{ "admin/settings/rss/save" | relative_link }}" method="post">
|
||||
<input type="hidden" name="{{ csrf.form_field_name }}" value="{{ csrf.request_token }}">
|
||||
<input type="hidden" name="Id" value="{{ model.id }}">
|
||||
<input
|
||||
type="hidden"
|
||||
name="{{ csrf.form_field_name }}"
|
||||
value="{{ csrf.request_token }}">
|
||||
<input
|
||||
type="hidden"
|
||||
name="Id"
|
||||
value="{{ model.id }}">
|
||||
{%- assign typ = model.source_type -%}
|
||||
<div class="container">
|
||||
<div class="row pb-3">
|
||||
<div class="col">
|
||||
<a href="{{ "admin/settings/rss" | relative_link }}">« Back to RSS Settings</a>
|
||||
<a href="{{ "admin/settings#rss-settings" | relative_link }}">« Back to Settings</a>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row pb-3">
|
||||
@ -17,8 +23,13 @@
|
||||
<div class="row">
|
||||
<div class="col">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="Path" id="path" class="form-control" placeholder="Relative Feed Path"
|
||||
value="{{ model.path }}">
|
||||
<input
|
||||
type="text"
|
||||
name="Path"
|
||||
id="path"
|
||||
class="form-control"
|
||||
placeholder="Relative Feed Path"
|
||||
value="{{ model.path }}">
|
||||
<label for="path">Relative Feed Path</label>
|
||||
<span class="form-text fst-italic">Appended to {{ web_log.url_base }}/</span>
|
||||
</div>
|
||||
@ -27,8 +38,13 @@
|
||||
<div class="row">
|
||||
<div class="col py-3 d-flex align-self-center justify-content-center">
|
||||
<div class="form-check form-switch">
|
||||
<input type="checkbox" name="IsPodcast" id="isPodcast" class="form-check-input" value="true"
|
||||
{%- if model.is_podcast %} checked="checked"{% endif %} onclick="Admin.checkPodcast()">
|
||||
<input
|
||||
type="checkbox"
|
||||
name="IsPodcast"
|
||||
id="isPodcast"
|
||||
class="form-check-input"
|
||||
value="true"
|
||||
{%- if model.is_podcast %}checked="checked"{% endif %}onclick="Admin.checkPodcast()">
|
||||
<label for="isPodcast" class="form-check-label">This Is a Podcast Feed</label>
|
||||
</div>
|
||||
</div>
|
||||
@ -41,21 +57,31 @@
|
||||
<div class="row d-flex align-items-center">
|
||||
<div class="col-1 d-flex justify-content-end pb-3">
|
||||
<div class="form-check form-check-inline me-0">
|
||||
<input type="radio" name="SourceType" id="sourceTypeCat" class="form-check-input" value="category"
|
||||
{%- unless typ == "tag" %} checked="checked" {% endunless -%}
|
||||
onclick="Admin.customFeedBy('category')">
|
||||
<input
|
||||
type="radio"
|
||||
name="SourceType"
|
||||
id="sourceTypeCat"
|
||||
class="form-check-input"
|
||||
value="category"
|
||||
{%- unless typ == "tag" %}checked="checked"{% endunless -%}onclick="Admin.customFeedBy('category')">
|
||||
<label for="sourceTypeCat" class="form-check-label d-none">Category</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-11 pb-3">
|
||||
<div class="form-floating">
|
||||
<select name="SourceValue" id="sourceValueCat" class="form-control" required
|
||||
{%- if typ == "tag" %} disabled="disabled"{% endif %}>
|
||||
<select
|
||||
name="SourceValue"
|
||||
id="sourceValueCat"
|
||||
class="form-control"
|
||||
required
|
||||
{%- if typ == "tag" %}disabled="disabled"{% endif %}>
|
||||
<option value="">– Select Category –</option>
|
||||
{% for cat in categories -%}
|
||||
<option value="{{ cat.id }}"
|
||||
{%- if typ != "tag" and model.source_value == cat.id %} selected="selected"{% endif -%}>
|
||||
{% for it in cat.parent_names %}{{ it }} ⟩ {% endfor %}{{ cat.name }}
|
||||
<option value="{{ cat.id }}"{%- if typ != "tag" and model.source_value == cat.id %}selected="selected"{% endif -%}>
|
||||
{% for it in cat.parent_names %}
|
||||
{{ it }} ⟩
|
||||
{% endfor %}
|
||||
{{ cat.name }}
|
||||
</option>
|
||||
{%- endfor %}
|
||||
</select>
|
||||
@ -64,16 +90,25 @@
|
||||
</div>
|
||||
<div class="col-1 d-flex justify-content-end pb-3">
|
||||
<div class="form-check form-check-inline me-0">
|
||||
<input type="radio" name="SourceType" id="sourceTypeTag" class="form-check-input" value="tag"
|
||||
{%- if typ == "tag" %} checked="checked"{% endif %} onclick="Admin.customFeedBy('tag')">
|
||||
<input
|
||||
type="radio"
|
||||
name="SourceType"
|
||||
id="sourceTypeTag"
|
||||
class="form-check-input"
|
||||
value="tag"
|
||||
{%- if typ == "tag" %}checked="checked"{% endif %}onclick="Admin.customFeedBy('tag')">
|
||||
<label for="sourceTypeTag" class="form-check-label d-none">Tag</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-11 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="SourceValue" id="sourceValueTag" class="form-control" placeholder="Tag"
|
||||
{%- unless typ == "tag" %} disabled="disabled"{% endunless %} required
|
||||
{%- if typ == "tag" %} value="{{ model.source_value }}"{% endif %}>
|
||||
<input
|
||||
type="text"
|
||||
name="SourceValue"
|
||||
id="sourceValueTag"
|
||||
class="form-control"
|
||||
placeholder="Tag"
|
||||
{%- unless typ == "tag" %}disabled="disabled"{% endunless %}required{%- if typ == "tag" %}value="{{ model.source_value }}"{% endif %}>
|
||||
<label for="sourceValueTag">Tag</label>
|
||||
</div>
|
||||
</div>
|
||||
@ -83,27 +118,47 @@
|
||||
</div>
|
||||
<div class="row pb-3">
|
||||
<div class="col">
|
||||
<fieldset class="container" id="podcastFields"{% unless model.is_podcast %} disabled="disabled"{%endunless%}>
|
||||
<fieldset
|
||||
class="container"
|
||||
id="podcastFields"
|
||||
{% unless model.is_podcast %}disabled="disabled"{% endunless %}>
|
||||
<legend>Podcast Settings</legend>
|
||||
<div class="row">
|
||||
<div class="col-12 col-md-5 col-lg-4 offset-lg-1 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="Title" id="title" class="form-control" placeholder="Title" required
|
||||
value="{{ model.title }}">
|
||||
<input
|
||||
type="text"
|
||||
name="Title"
|
||||
id="title"
|
||||
class="form-control"
|
||||
placeholder="Title"
|
||||
required
|
||||
value="{{ model.title }}">
|
||||
<label for="title">Title</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-12 col-md-4 col-lg-4 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="Subtitle" id="subtitle" class="form-control" placeholder="Subtitle"
|
||||
value="{{ model.subtitle }}">
|
||||
<input
|
||||
type="text"
|
||||
name="Subtitle"
|
||||
id="subtitle"
|
||||
class="form-control"
|
||||
placeholder="Subtitle"
|
||||
value="{{ model.subtitle }}">
|
||||
<label for="subtitle">Podcast Subtitle</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-12 col-md-3 col-lg-2 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="number" name="ItemsInFeed" id="itemsInFeed" class="form-control" placeholder="Items"
|
||||
required value="{{ model.items_in_feed }}">
|
||||
<input
|
||||
type="number"
|
||||
name="ItemsInFeed"
|
||||
id="itemsInFeed"
|
||||
class="form-control"
|
||||
placeholder="Items"
|
||||
required
|
||||
value="{{ model.items_in_feed }}">
|
||||
<label for="itemsInFeed"># Episodes</label>
|
||||
</div>
|
||||
</div>
|
||||
@ -111,12 +166,20 @@
|
||||
<div class="row">
|
||||
<div class="col-12 col-md-5 col-lg-4 offset-lg-1 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="AppleCategory" id="appleCategory" class="form-control"
|
||||
placeholder="iTunes Category" required value="{{ model.apple_category }}">
|
||||
<input
|
||||
type="text"
|
||||
name="AppleCategory"
|
||||
id="appleCategory"
|
||||
class="form-control"
|
||||
placeholder="iTunes Category"
|
||||
required
|
||||
value="{{ model.apple_category }}">
|
||||
<label for="appleCategory">iTunes Category</label>
|
||||
<span class="form-text fst-italic">
|
||||
<a href="https://www.thepodcasthost.com/planning/itunes-podcast-categories/" target="_blank"
|
||||
rel="noopener">
|
||||
<a
|
||||
href="https://www.thepodcasthost.com/planning/itunes-podcast-categories/"
|
||||
target="_blank"
|
||||
rel="noopener">
|
||||
iTunes Category / Subcategory List
|
||||
</a>
|
||||
</span>
|
||||
@ -124,17 +187,26 @@
|
||||
</div>
|
||||
<div class="col-12 col-md-4 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="AppleSubcategory" id="appleSubcategory" class="form-control"
|
||||
placeholder="iTunes Subcategory" value="{{ model.apple_subcategory }}">
|
||||
<input
|
||||
type="text"
|
||||
name="AppleSubcategory"
|
||||
id="appleSubcategory"
|
||||
class="form-control"
|
||||
placeholder="iTunes Subcategory"
|
||||
value="{{ model.apple_subcategory }}">
|
||||
<label for="appleSubcategory">iTunes Subcategory</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-12 col-md-3 col-lg-2 pb-3">
|
||||
<div class="form-floating">
|
||||
<select name="Explicit" id="explicit" class="form-control" required>
|
||||
<option value="yes"{% if model.explicit == "yes" %} selected="selected"{% endif %}>Yes</option>
|
||||
<option value="no"{% if model.explicit == "no" %} selected="selected"{% endif %}>No</option>
|
||||
<option value="clean"{% if model.explicit == "clean" %} selected="selected"{% endif %}>
|
||||
<select
|
||||
name="Explicit"
|
||||
id="explicit"
|
||||
class="form-control"
|
||||
required>
|
||||
<option value="yes" {% if model.explicit == "yes" %}selected="selected"{% endif %}>Yes</option>
|
||||
<option value="no" {% if model.explicit == "no" %}selected="selected"{% endif %}>No</option>
|
||||
<option value="clean" {% if model.explicit == "clean" %}selected="selected"{% endif %}>
|
||||
Clean
|
||||
</option>
|
||||
</select>
|
||||
@ -145,31 +217,54 @@
|
||||
<div class="row">
|
||||
<div class="col-12 col-md-6 col-lg-4 offset-xxl-1 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="DisplayedAuthor" id="displayedAuthor" class="form-control"
|
||||
placeholder="Author" required value="{{ model.displayed_author }}">
|
||||
<input
|
||||
type="text"
|
||||
name="DisplayedAuthor"
|
||||
id="displayedAuthor"
|
||||
class="form-control"
|
||||
placeholder="Author"
|
||||
required
|
||||
value="{{ model.displayed_author }}">
|
||||
<label for="displayedAuthor">Displayed Author</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-12 col-md-6 col-lg-4 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="email" name="Email" id="email" class="form-control" placeholder="Email" required
|
||||
value="{{ model.email }}">
|
||||
<input
|
||||
type="email"
|
||||
name="Email"
|
||||
id="email"
|
||||
class="form-control"
|
||||
placeholder="Email"
|
||||
required
|
||||
value="{{ model.email }}">
|
||||
<label for="email">Author E-mail</label>
|
||||
<span class="form-text fst-italic">For iTunes, must match registered e-mail</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-12 col-sm-5 col-md-4 col-lg-4 col-xl-3 offset-xl-1 col-xxl-2 offset-xxl-0">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="DefaultMediaType" id="defaultMediaType" class="form-control"
|
||||
placeholder="Media Type" value="{{ model.default_media_type }}">
|
||||
<input
|
||||
type="text"
|
||||
name="DefaultMediaType"
|
||||
id="defaultMediaType"
|
||||
class="form-control"
|
||||
placeholder="Media Type"
|
||||
value="{{ model.default_media_type }}">
|
||||
<label for="defaultMediaType">Default Media Type</label>
|
||||
<span class="form-text fst-italic">Optional; blank for no default</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-12 col-sm-7 col-md-8 col-lg-10 offset-lg-1">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="ImageUrl" id="imageUrl" class="form-control" placeholder="Image URL" required
|
||||
value="{{ model.image_url }}">
|
||||
<input
|
||||
type="text"
|
||||
name="ImageUrl"
|
||||
id="imageUrl"
|
||||
class="form-control"
|
||||
placeholder="Image URL"
|
||||
required
|
||||
value="{{ model.image_url }}">
|
||||
<label for="imageUrl">Image URL</label>
|
||||
<span class="form-text fst-italic">Relative URL will be appended to {{ web_log.url_base }}/</span>
|
||||
</div>
|
||||
@ -178,8 +273,14 @@
|
||||
<div class="row pb-3">
|
||||
<div class="col-12 col-lg-10 offset-lg-1">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="Summary" id="summary" class="form-control" placeholder="Summary" required
|
||||
value="{{ model.summary }}">
|
||||
<input
|
||||
type="text"
|
||||
name="Summary"
|
||||
id="summary"
|
||||
class="form-control"
|
||||
placeholder="Summary"
|
||||
required
|
||||
value="{{ model.summary }}">
|
||||
<label for="summary">Summary</label>
|
||||
<span class="form-text fst-italic">Displayed in podcast directories</span>
|
||||
</div>
|
||||
@ -188,8 +289,13 @@
|
||||
<div class="row pb-3">
|
||||
<div class="col-12 col-lg-10 offset-lg-1">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="MediaBaseUrl" id="mediaBaseUrl" class="form-control"
|
||||
placeholder="Media Base URL" value="{{ model.media_base_url }}">
|
||||
<input
|
||||
type="text"
|
||||
name="MediaBaseUrl"
|
||||
id="mediaBaseUrl"
|
||||
class="form-control"
|
||||
placeholder="Media Base URL"
|
||||
value="{{ model.media_base_url }}">
|
||||
<label for="mediaBaseUrl">Media Base URL</label>
|
||||
<span class="form-text fst-italic">Optional; prepended to episode media file if present</span>
|
||||
</div>
|
||||
@ -198,8 +304,13 @@
|
||||
<div class="row">
|
||||
<div class="col-12 col-lg-5 offset-lg-1 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="FundingUrl" id="fundingUrl" class="form-control" placeholder="Funding URL"
|
||||
value="{{ model.funding_url }}">
|
||||
<input
|
||||
type="text"
|
||||
name="FundingUrl"
|
||||
id="fundingUrl"
|
||||
class="form-control"
|
||||
placeholder="Funding URL"
|
||||
value="{{ model.funding_url }}">
|
||||
<label for="fundingUrl">Funding URL</label>
|
||||
<span class="form-text fst-italic">
|
||||
Optional; URL describing donation options for this podcast, relative URL supported
|
||||
@ -208,8 +319,14 @@
|
||||
</div>
|
||||
<div class="col-12 col-lg-5 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="FundingText" id="fundingText" class="form-control" maxlength="128"
|
||||
placeholder="Funding Text" value="{{ model.funding_text }}">
|
||||
<input
|
||||
type="text"
|
||||
name="FundingText"
|
||||
id="fundingText"
|
||||
class="form-control"
|
||||
maxlength="128"
|
||||
placeholder="Funding Text"
|
||||
value="{{ model.funding_text }}">
|
||||
<label for="fundingText">Funding Text</label>
|
||||
<span class="form-text fst-italic">Optional; text for the funding link</span>
|
||||
</div>
|
||||
@ -218,21 +335,28 @@
|
||||
<div class="row pb-3">
|
||||
<div class="col-8 col-lg-5 offset-lg-1 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="PodcastGuid" id="guid" class="form-control" placeholder="GUID"
|
||||
value="{{ model.podcast_guid }}">
|
||||
<input
|
||||
type="text"
|
||||
name="PodcastGuid"
|
||||
id="guid"
|
||||
class="form-control"
|
||||
placeholder="GUID"
|
||||
value="{{ model.podcast_guid }}">
|
||||
<label for="guid">Podcast GUID</label>
|
||||
<span class="form-text fst-italic">
|
||||
Optional; v5 UUID uniquely identifying this podcast; once entered, do not change this value
|
||||
(<a href="https://github.com/Podcastindex-org/podcast-namespace/blob/main/docs/1.0.md#guid"
|
||||
target="_blank">documentation</a>)
|
||||
(<a href="https://github.com/Podcastindex-org/podcast-namespace/blob/main/docs/1.0.md#guid" target="_blank">documentation</a>)
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-4 col-lg-3 offset-lg-2 pb-3">
|
||||
<div class="form-floating">
|
||||
<select name="Medium" id="medium" class="form-control">
|
||||
<select
|
||||
name="Medium"
|
||||
id="medium"
|
||||
class="form-control">
|
||||
{% for med in medium_values -%}
|
||||
<option value="{{ med[0] }}"{% if model.medium == med[0] %} selected{% endif %}>
|
||||
<option value="{{ med[0] }}"{% if model.medium == med[0] %}selected{% endif %}>
|
||||
{{ med[1] }}
|
||||
</option>
|
||||
{%- endfor %}
|
||||
@ -240,8 +364,7 @@
|
||||
<label for="medium">Medium</label>
|
||||
<span class="form-text fst-italic">
|
||||
Optional; medium of the podcast content
|
||||
(<a href="https://github.com/Podcastindex-org/podcast-namespace/blob/main/docs/1.0.md#medium"
|
||||
target="_blank">documentation</a>)
|
||||
(<a href="https://github.com/Podcastindex-org/podcast-namespace/blob/main/docs/1.0.md#medium" target="_blank">documentation</a>)
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
@ -256,4 +379,4 @@
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</article>
|
||||
</article>
|
@ -6,7 +6,7 @@
|
||||
{%- assign title_col = "col-12 col-md-5" -%}
|
||||
{%- assign link_col = "col-12 col-md-5" -%}
|
||||
{%- assign upd8_col = "col-12 col-md-2" -%}
|
||||
<form method="post" class="container" hx-target="body">
|
||||
<form method="post" class="container mb-3" hx-target="body">
|
||||
<input type="hidden" name="{{ csrf.form_field_name }}" value="{{ csrf.request_token }}">
|
||||
<div class="row mwl-table-heading">
|
||||
<div class="{{ title_col }}">
|
||||
@ -49,7 +49,7 @@
|
||||
</div>
|
||||
{%- endfor %}
|
||||
</form>
|
||||
{% if page_nbr > 1 or page_count == 25 %}
|
||||
{% if page_nbr > 1 or has_next %}
|
||||
<div class="d-flex justify-content-evenly mb-3">
|
||||
<div>
|
||||
{% if page_nbr > 1 %}
|
||||
@ -61,7 +61,7 @@
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="text-right">
|
||||
{% if page_count == 25 %}
|
||||
{% if has_next %}
|
||||
<p>
|
||||
<a class="btn btn-secondary" href="{{ "admin/pages" | append: next_page | relative_link }}">
|
||||
Next »
|
||||
|
@ -3,7 +3,7 @@
|
||||
<a href="{{ "admin/post/new/edit" | relative_link }}" class="btn btn-primary btn-sm mb-3">Write a New Post</a>
|
||||
{%- assign post_count = model.posts | size -%}
|
||||
{%- if post_count > 0 %}
|
||||
<form method="post" class="container" hx-target="body">
|
||||
<form method="post" class="container mb-3" hx-target="body">
|
||||
<input type="hidden" name="{{ csrf.form_field_name }}" value="{{ csrf.request_token }}">
|
||||
{%- assign date_col = "col-xs-12 col-md-3 col-lg-2" -%}
|
||||
{%- assign title_col = "col-xs-12 col-md-7 col-lg-6 col-xl-5 col-xxl-4" -%}
|
||||
|
@ -1,2 +1,2 @@
|
||||
myWebLog Admin
|
||||
2.0.0-rc2
|
||||
2.0.0
|
@ -334,27 +334,34 @@ this.Admin = {
|
||||
const theToast = new bootstrap.Toast(toast, options)
|
||||
theToast.show()
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Initialize any toasts that were pre-rendered from the server
|
||||
*/
|
||||
showPreRenderedMessages() {
|
||||
[...document.querySelectorAll(".toast")].forEach(el => {
|
||||
if (el.getAttribute("data-mwl-shown") === "true" && el.className.indexOf("hide") >= 0) {
|
||||
document.removeChild(el)
|
||||
} else {
|
||||
const toast = new bootstrap.Toast(el,
|
||||
el.getAttribute("data-bs-autohide") === "false"
|
||||
? { autohide: false } : { delay: 6000, autohide: true })
|
||||
toast.show()
|
||||
el.setAttribute("data-mwl-shown", "true")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
htmx.on("htmx:afterOnLoad", function (evt) {
|
||||
const hdrs = evt.detail.xhr.getAllResponseHeaders()
|
||||
// Initialize any toasts that were pre-rendered from the server
|
||||
Admin.showPreRenderedMessages()
|
||||
// Show messages if there were any in the response
|
||||
if (hdrs.indexOf("x-message") >= 0) {
|
||||
Admin.showMessage(evt.detail.xhr.getResponseHeader("x-message"))
|
||||
}
|
||||
// Initialize any toasts that were pre-rendered from the server
|
||||
[...document.querySelectorAll(".toast")].forEach(el => {
|
||||
if (el.getAttribute("data-mwl-shown") === "true" && el.className.indexOf("hide") >= 0) {
|
||||
document.removeChild(el)
|
||||
} else {
|
||||
const toast = new bootstrap.Toast(el,
|
||||
el.getAttribute("data-bs-autohide") === "false"
|
||||
? { autohide: false } : { delay: 6000, autohide: true })
|
||||
toast.show()
|
||||
el.setAttribute("data-mwl-shown", "true")
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
htmx.on("htmx:responseError", function (evt) {
|
||||
@ -365,3 +372,5 @@ htmx.on("htmx:responseError", function (evt) {
|
||||
Admin.showMessage(`danger|||${xhr.status}: ${xhr.statusText}`)
|
||||
}
|
||||
})
|
||||
|
||||
document.addEventListener("DOMContentLoaded", Admin.showPreRenderedMessages, { once: true})
|
||||
|
@ -1,2 +1,2 @@
|
||||
myWebLog Default Theme
|
||||
2.0.0-rc2
|
||||
2.0.0
|
Loading…
Reference in New Issue
Block a user