Compare commits
3 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 7b325dc19e | |||
| 5f3daa1de9 | |||
| 1ec664ad24 |
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"version": 1,
|
||||
"isRoot": true,
|
||||
"tools": {
|
||||
"fake-cli": {
|
||||
"version": "5.22.0",
|
||||
"commands": [
|
||||
"fake"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
166
build.fs
Normal file
166
build.fs
Normal file
@@ -0,0 +1,166 @@
|
||||
open System.IO
|
||||
open Fake.Core
|
||||
open Fake.DotNet
|
||||
open Fake.IO
|
||||
open Fake.IO.Globbing.Operators
|
||||
|
||||
let execContext = Context.FakeExecutionContext.Create false "build.fsx" []
|
||||
Context.setExecutionContext (Context.RuntimeContext.Fake execContext)
|
||||
|
||||
/// The output directory for release ZIPs
|
||||
let releasePath = "releases"
|
||||
|
||||
/// The path to the main project
|
||||
let projectPath = "src/MyWebLog"
|
||||
|
||||
/// The path and name of the main project
|
||||
let projName = $"{projectPath}/MyWebLog.fsproj"
|
||||
|
||||
/// The version being packaged (extracted from appsettings.json)
|
||||
let version =
|
||||
let settings = File.ReadAllText $"{projectPath}/appsettings.json"
|
||||
let generator = settings.Substring (settings.IndexOf "\"Generator\":")
|
||||
let appVersion = generator.Replace("\"Generator\": \"", "")
|
||||
let appVersion = appVersion.Substring (0, appVersion.IndexOf "\"")
|
||||
appVersion.Split ' ' |> Array.last
|
||||
|
||||
/// Zip a theme distributed with myWebLog
|
||||
let zipTheme (name : string) (_ : TargetParameter) =
|
||||
let path = $"src/{name}-theme"
|
||||
!! $"{path}/**/*"
|
||||
|> Zip.filesAsSpecs path
|
||||
|> Seq.filter (fun (_, name) -> not (name.EndsWith ".zip"))
|
||||
|> Zip.zipSpec $"{releasePath}/{name}-theme.zip"
|
||||
|
||||
/// Frameworks supported by this build
|
||||
let frameworks = [ "net6.0"; "net7.0" ]
|
||||
|
||||
/// Publish the project for the given runtime ID
|
||||
let publishFor rid (_ : TargetParameter) =
|
||||
frameworks
|
||||
|> List.iter (fun fwk ->
|
||||
DotNet.publish
|
||||
(fun opts ->
|
||||
{ opts with Runtime = Some rid; SelfContained = Some false; NoLogo = true; Framework = Some fwk })
|
||||
projName)
|
||||
|
||||
/// Package published output for the given runtime ID
|
||||
let packageFor rid (_ : TargetParameter) =
|
||||
frameworks
|
||||
|> List.iter (fun fwk ->
|
||||
let path = $"{projectPath}/bin/Release/{fwk}/%s{rid}/publish"
|
||||
let prodSettings = $"{path}/appsettings.Production.json"
|
||||
if File.exists prodSettings then File.delete prodSettings
|
||||
[ !! $"{path}/**/*"
|
||||
|> Zip.filesAsSpecs path
|
||||
|> Seq.map (fun (orig, dest) ->
|
||||
orig, if dest.StartsWith "MyWebLog" then dest.Replace ("MyWebLog", "myWebLog") else dest)
|
||||
Seq.singleton ($"{releasePath}/admin-theme.zip", "admin-theme.zip")
|
||||
Seq.singleton ($"{releasePath}/default-theme.zip", "default-theme.zip")
|
||||
]
|
||||
|> Seq.concat
|
||||
|> Zip.zipSpec $"{releasePath}/myWebLog-{version}.{fwk}.{rid}.zip")
|
||||
|
||||
|
||||
Target.create "Clean" (fun _ ->
|
||||
!! "src/**/bin"
|
||||
++ "src/**/obj"
|
||||
|> Shell.cleanDirs
|
||||
Shell.cleanDir releasePath
|
||||
)
|
||||
|
||||
Target.create "Build" (fun _ ->
|
||||
DotNet.build (fun opts -> { opts with NoLogo = true }) projName
|
||||
)
|
||||
|
||||
Target.create "ZipAdminTheme" (zipTheme "admin")
|
||||
Target.create "ZipDefaultTheme" (zipTheme "default")
|
||||
|
||||
Target.create "PublishWindows" (publishFor "win-x64")
|
||||
Target.create "PackageWindows" (packageFor "win-x64")
|
||||
|
||||
Target.create "PublishLinux" (publishFor "linux-x64")
|
||||
Target.create "PackageLinux" (packageFor "linux-x64")
|
||||
|
||||
Target.create "RepackageLinux" (fun _ ->
|
||||
let workDir = $"{releasePath}/linux"
|
||||
frameworks
|
||||
|> List.iter (fun fwk ->
|
||||
let zipArchive = $"{releasePath}/myWebLog-{version}.{fwk}.linux-x64.zip"
|
||||
let sh command args =
|
||||
CreateProcess.fromRawCommand command args
|
||||
|> CreateProcess.redirectOutput
|
||||
|> Proc.run
|
||||
|> ignore
|
||||
Shell.mkdir workDir
|
||||
Zip.unzip workDir zipArchive
|
||||
Shell.cd workDir
|
||||
sh "chmod" [ "+x"; "./myWebLog" ]
|
||||
sh "tar" [ "cfj"; $"../myWebLog-{version}.{fwk}.linux-x64.tar.bz2"; "." ]
|
||||
Shell.cd "../.."
|
||||
Shell.rm zipArchive)
|
||||
Shell.rm_rf workDir
|
||||
)
|
||||
|
||||
Target.create "All" ignore
|
||||
|
||||
Target.create "RemoveThemeArchives" (fun _ ->
|
||||
Shell.rm $"{releasePath}/admin-theme.zip"
|
||||
Shell.rm $"{releasePath}/default-theme.zip"
|
||||
)
|
||||
|
||||
Target.create "CI" ignore
|
||||
|
||||
open Fake.Core.TargetOperators
|
||||
|
||||
let dependencies = [
|
||||
"Clean"
|
||||
==> "All"
|
||||
|
||||
"Clean"
|
||||
?=> "Build"
|
||||
==> "All"
|
||||
|
||||
"Clean"
|
||||
?=> "ZipDefaultTheme"
|
||||
==> "All"
|
||||
|
||||
"Clean"
|
||||
?=> "ZipAdminTheme"
|
||||
==> "All"
|
||||
|
||||
"Build"
|
||||
==> "PublishWindows"
|
||||
==> "All"
|
||||
|
||||
"Build"
|
||||
==> "PublishLinux"
|
||||
==> "All"
|
||||
|
||||
"PublishWindows"
|
||||
==> "PackageWindows"
|
||||
==> "All"
|
||||
|
||||
"PublishLinux"
|
||||
==> "PackageLinux"
|
||||
==> "All"
|
||||
|
||||
"PackageLinux"
|
||||
==> "RepackageLinux"
|
||||
==> "All"
|
||||
|
||||
"All"
|
||||
==> "RemoveThemeArchives"
|
||||
==> "CI"
|
||||
]
|
||||
|
||||
[<EntryPoint>]
|
||||
let main args =
|
||||
try
|
||||
match args with
|
||||
| [| target |] -> Target.runOrDefault target
|
||||
| _ -> Target.runOrDefault "All"
|
||||
0
|
||||
with e ->
|
||||
printfn "%A" e
|
||||
1
|
||||
20
build.fsproj
Normal file
20
build.fsproj
Normal file
@@ -0,0 +1,20 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net7.0</TargetFramework>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Compile Include="build.fs" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Fake.Core.Target" Version="5.23.1" />
|
||||
<PackageReference Include="Fake.DotNet.Cli" Version="5.23.1" />
|
||||
<PackageReference Include="Fake.IO.FileSystem" Version="5.23.1" />
|
||||
<PackageReference Include="Fake.IO.Zip" Version="5.23.1" />
|
||||
<PackageReference Include="MSBuild.StructuredLogger" Version="2.1.768" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
147
build.fsx
147
build.fsx
@@ -1,147 +0,0 @@
|
||||
#r "paket:
|
||||
nuget Fake.DotNet.Cli
|
||||
nuget Fake.IO.FileSystem
|
||||
nuget Fake.IO.Zip
|
||||
nuget Fake.Core.Target //"
|
||||
#load ".fake/build.fsx/intellisense.fsx"
|
||||
open System.IO
|
||||
open Fake.Core
|
||||
open Fake.DotNet
|
||||
open Fake.IO
|
||||
open Fake.IO.Globbing.Operators
|
||||
open Fake.Core.TargetOperators
|
||||
|
||||
Target.initEnvironment ()
|
||||
|
||||
/// The output directory for release ZIPs
|
||||
let releasePath = "releases"
|
||||
|
||||
/// The path to the main project
|
||||
let projectPath = "src/MyWebLog"
|
||||
|
||||
/// The path and name of the main project
|
||||
let projName = $"{projectPath}/MyWebLog.fsproj"
|
||||
|
||||
/// The version being packaged (extracted from appsettings.json)
|
||||
let version =
|
||||
let settings = File.ReadAllText $"{projectPath}/appsettings.json"
|
||||
let generator = settings.Substring (settings.IndexOf "\"Generator\":")
|
||||
let appVersion = generator.Replace("\"Generator\": \"", "")
|
||||
let appVersion = appVersion.Substring (0, appVersion.IndexOf "\"")
|
||||
appVersion.Split ' ' |> Array.last
|
||||
|
||||
/// Zip a theme distributed with myWebLog
|
||||
let zipTheme (name : string) (_ : TargetParameter) =
|
||||
let path = $"src/{name}-theme"
|
||||
!! $"{path}/**/*"
|
||||
|> Zip.filesAsSpecs path
|
||||
|> Seq.filter (fun (_, name) -> not (name.EndsWith ".zip"))
|
||||
|> Zip.zipSpec $"{releasePath}/{name}-theme.zip"
|
||||
|
||||
/// Publish the project for the given runtime ID
|
||||
let publishFor rid (_ : TargetParameter) =
|
||||
DotNet.publish (fun opts -> { opts with Runtime = Some rid; SelfContained = Some false; NoLogo = true }) projName
|
||||
|
||||
/// Package published output for the given runtime ID
|
||||
let packageFor (rid : string) (_ : TargetParameter) =
|
||||
let path = $"{projectPath}/bin/Release/net6.0/{rid}/publish"
|
||||
let prodSettings = $"{path}/appsettings.Production.json"
|
||||
if File.exists prodSettings then File.delete prodSettings
|
||||
[ !! $"{path}/**/*"
|
||||
|> Zip.filesAsSpecs path
|
||||
|> Seq.map (fun (orig, dest) ->
|
||||
orig, if dest.StartsWith "MyWebLog" then dest.Replace ("MyWebLog", "myWebLog") else dest)
|
||||
Seq.singleton ($"{releasePath}/admin-theme.zip", "admin-theme.zip")
|
||||
Seq.singleton ($"{releasePath}/default-theme.zip", "default-theme.zip")
|
||||
]
|
||||
|> Seq.concat
|
||||
|> Zip.zipSpec $"{releasePath}/myWebLog-{version}.{rid}.zip"
|
||||
|
||||
|
||||
Target.create "Clean" (fun _ ->
|
||||
!! "src/**/bin"
|
||||
++ "src/**/obj"
|
||||
|> Shell.cleanDirs
|
||||
Shell.cleanDir releasePath
|
||||
)
|
||||
|
||||
Target.create "Build" (fun _ ->
|
||||
DotNet.build (fun opts -> { opts with NoLogo = true }) projName
|
||||
)
|
||||
|
||||
Target.create "ZipAdminTheme" (zipTheme "admin")
|
||||
Target.create "ZipDefaultTheme" (zipTheme "default")
|
||||
|
||||
Target.create "PublishWindows" (publishFor "win-x64")
|
||||
Target.create "PackageWindows" (packageFor "win-x64")
|
||||
|
||||
Target.create "PublishLinux" (publishFor "linux-x64")
|
||||
Target.create "PackageLinux" (packageFor "linux-x64")
|
||||
|
||||
Target.create "RepackageLinux" (fun _ ->
|
||||
let workDir = $"{releasePath}/linux"
|
||||
let zipArchive = $"{releasePath}/myWebLog-{version}.linux-x64.zip"
|
||||
let sh command args =
|
||||
CreateProcess.fromRawCommand command args
|
||||
|> CreateProcess.redirectOutput
|
||||
|> Proc.run
|
||||
|> ignore
|
||||
Shell.mkdir workDir
|
||||
Zip.unzip workDir zipArchive
|
||||
Shell.cd workDir
|
||||
sh "chmod" [ "+x"; "./myWebLog" ]
|
||||
sh "tar" [ "cfj"; $"../myWebLog-{version}.linux-x64.tar.bz2"; "." ]
|
||||
Shell.cd "../.."
|
||||
Shell.rm zipArchive
|
||||
Shell.rm_rf workDir
|
||||
)
|
||||
|
||||
Target.create "All" ignore
|
||||
|
||||
Target.create "RemoveThemeArchives" (fun _ ->
|
||||
Shell.rm $"{releasePath}/admin-theme.zip"
|
||||
Shell.rm $"{releasePath}/default-theme.zip"
|
||||
)
|
||||
|
||||
Target.create "CI" ignore
|
||||
|
||||
"Clean"
|
||||
==> "All"
|
||||
|
||||
"Clean"
|
||||
?=> "Build"
|
||||
==> "All"
|
||||
|
||||
"Clean"
|
||||
?=> "ZipDefaultTheme"
|
||||
==> "All"
|
||||
|
||||
"Clean"
|
||||
?=> "ZipAdminTheme"
|
||||
==> "All"
|
||||
|
||||
"Build"
|
||||
==> "PublishWindows"
|
||||
==> "All"
|
||||
|
||||
"Build"
|
||||
==> "PublishLinux"
|
||||
==> "All"
|
||||
|
||||
"PublishWindows"
|
||||
==> "PackageWindows"
|
||||
==> "All"
|
||||
|
||||
"PublishLinux"
|
||||
==> "PackageLinux"
|
||||
==> "All"
|
||||
|
||||
"PackageLinux"
|
||||
==> "RepackageLinux"
|
||||
==> "All"
|
||||
|
||||
"All"
|
||||
==> "RemoveThemeArchives"
|
||||
==> "CI"
|
||||
|
||||
Target.runOrDefault "All"
|
||||
227
build.fsx.lock
227
build.fsx.lock
@@ -1,227 +0,0 @@
|
||||
STORAGE: NONE
|
||||
RESTRICTION: || (== net6.0) (== netstandard2.0)
|
||||
NUGET
|
||||
remote: https://api.nuget.org/v3/index.json
|
||||
BlackFox.VsWhere (1.1)
|
||||
FSharp.Core (>= 4.2.3)
|
||||
Microsoft.Win32.Registry (>= 4.7)
|
||||
Fake.Core.CommandLineParsing (5.22)
|
||||
FParsec (>= 1.1.1)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.Core.Context (5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.Core.Environment (5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.Core.FakeVar (5.22)
|
||||
Fake.Core.Context (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.Core.Process (5.22)
|
||||
Fake.Core.Environment (>= 5.22)
|
||||
Fake.Core.FakeVar (>= 5.22)
|
||||
Fake.Core.String (>= 5.22)
|
||||
Fake.Core.Trace (>= 5.22)
|
||||
Fake.IO.FileSystem (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
System.Collections.Immutable (>= 5.0)
|
||||
Fake.Core.SemVer (5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.Core.String (5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.Core.Target (5.22)
|
||||
Fake.Core.CommandLineParsing (>= 5.22)
|
||||
Fake.Core.Context (>= 5.22)
|
||||
Fake.Core.Environment (>= 5.22)
|
||||
Fake.Core.FakeVar (>= 5.22)
|
||||
Fake.Core.Process (>= 5.22)
|
||||
Fake.Core.String (>= 5.22)
|
||||
Fake.Core.Trace (>= 5.22)
|
||||
FSharp.Control.Reactive (>= 5.0.2)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.Core.Tasks (5.22)
|
||||
Fake.Core.Trace (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.Core.Trace (5.22)
|
||||
Fake.Core.Environment (>= 5.22)
|
||||
Fake.Core.FakeVar (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.Core.Xml (5.22)
|
||||
Fake.Core.String (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.DotNet.Cli (5.22)
|
||||
Fake.Core.Environment (>= 5.22)
|
||||
Fake.Core.Process (>= 5.22)
|
||||
Fake.Core.String (>= 5.22)
|
||||
Fake.Core.Trace (>= 5.22)
|
||||
Fake.DotNet.MSBuild (>= 5.22)
|
||||
Fake.DotNet.NuGet (>= 5.22)
|
||||
Fake.IO.FileSystem (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Mono.Posix.NETStandard (>= 1.0)
|
||||
Newtonsoft.Json (>= 13.0.1)
|
||||
Fake.DotNet.MSBuild (5.22)
|
||||
BlackFox.VsWhere (>= 1.1)
|
||||
Fake.Core.Environment (>= 5.22)
|
||||
Fake.Core.Process (>= 5.22)
|
||||
Fake.Core.String (>= 5.22)
|
||||
Fake.Core.Trace (>= 5.22)
|
||||
Fake.IO.FileSystem (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
MSBuild.StructuredLogger (>= 2.1.545)
|
||||
Fake.DotNet.NuGet (5.22)
|
||||
Fake.Core.Environment (>= 5.22)
|
||||
Fake.Core.Process (>= 5.22)
|
||||
Fake.Core.SemVer (>= 5.22)
|
||||
Fake.Core.String (>= 5.22)
|
||||
Fake.Core.Tasks (>= 5.22)
|
||||
Fake.Core.Trace (>= 5.22)
|
||||
Fake.Core.Xml (>= 5.22)
|
||||
Fake.IO.FileSystem (>= 5.22)
|
||||
Fake.Net.Http (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Newtonsoft.Json (>= 13.0.1)
|
||||
NuGet.Protocol (>= 5.11)
|
||||
Fake.IO.FileSystem (5.22)
|
||||
Fake.Core.String (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.IO.Zip (5.22)
|
||||
Fake.Core.String (>= 5.22)
|
||||
Fake.IO.FileSystem (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
Fake.Net.Http (5.22)
|
||||
Fake.Core.Trace (>= 5.22)
|
||||
FSharp.Core (>= 6.0)
|
||||
FParsec (1.1.1)
|
||||
FSharp.Core (>= 4.3.4)
|
||||
FSharp.Control.Reactive (5.0.5)
|
||||
FSharp.Core (>= 4.7.2)
|
||||
System.Reactive (>= 5.0 < 6.0)
|
||||
FSharp.Core (6.0.5)
|
||||
Microsoft.Build (17.2)
|
||||
Microsoft.Build.Framework (>= 17.2) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
|
||||
Microsoft.NET.StringTools (>= 1.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
|
||||
Microsoft.Win32.Registry (>= 4.3) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net6.0))
|
||||
System.Collections.Immutable (>= 5.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
|
||||
System.Configuration.ConfigurationManager (>= 4.7) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
|
||||
System.Reflection.Metadata (>= 1.6) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net6.0))
|
||||
System.Security.Principal.Windows (>= 4.7) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net6.0))
|
||||
System.Text.Encoding.CodePages (>= 4.0.1) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net6.0))
|
||||
System.Text.Json (>= 6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
|
||||
System.Threading.Tasks.Dataflow (>= 6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
|
||||
Microsoft.Build.Framework (17.2)
|
||||
Microsoft.Win32.Registry (>= 4.3)
|
||||
System.Security.Permissions (>= 4.7)
|
||||
Microsoft.Build.Tasks.Core (17.2)
|
||||
Microsoft.Build.Framework (>= 17.2)
|
||||
Microsoft.Build.Utilities.Core (>= 17.2)
|
||||
Microsoft.NET.StringTools (>= 1.0)
|
||||
Microsoft.Win32.Registry (>= 4.3)
|
||||
System.CodeDom (>= 4.4)
|
||||
System.Collections.Immutable (>= 5.0)
|
||||
System.Reflection.Metadata (>= 1.6)
|
||||
System.Resources.Extensions (>= 4.6)
|
||||
System.Security.Cryptography.Pkcs (>= 4.7)
|
||||
System.Security.Cryptography.Xml (>= 4.7)
|
||||
System.Security.Permissions (>= 4.7)
|
||||
System.Threading.Tasks.Dataflow (>= 6.0)
|
||||
Microsoft.Build.Utilities.Core (17.2)
|
||||
Microsoft.Build.Framework (>= 17.2)
|
||||
Microsoft.NET.StringTools (>= 1.0)
|
||||
Microsoft.Win32.Registry (>= 4.3)
|
||||
System.Collections.Immutable (>= 5.0)
|
||||
System.Configuration.ConfigurationManager (>= 4.7)
|
||||
System.Security.Permissions (>= 4.7) - restriction: == netstandard2.0
|
||||
System.Text.Encoding.CodePages (>= 4.0.1) - restriction: == netstandard2.0
|
||||
Microsoft.NET.StringTools (1.0)
|
||||
System.Memory (>= 4.5.4)
|
||||
System.Runtime.CompilerServices.Unsafe (>= 5.0)
|
||||
Microsoft.NETCore.Platforms (6.0.4) - restriction: || (&& (== net6.0) (< netcoreapp3.1)) (&& (== net6.0) (< netstandard1.2)) (&& (== net6.0) (< netstandard1.3)) (&& (== net6.0) (< netstandard1.5)) (== netstandard2.0)
|
||||
Microsoft.NETCore.Targets (5.0) - restriction: || (&& (== net6.0) (< netcoreapp3.1)) (&& (== net6.0) (< netstandard1.2)) (&& (== net6.0) (< netstandard1.3)) (&& (== net6.0) (< netstandard1.5)) (== netstandard2.0)
|
||||
Microsoft.Win32.Registry (5.0)
|
||||
System.Buffers (>= 4.5.1) - restriction: || (&& (== net6.0) (>= monoandroid) (< netstandard1.3)) (&& (== net6.0) (>= monotouch)) (&& (== net6.0) (< netcoreapp2.0)) (&& (== net6.0) (>= xamarinios)) (&& (== net6.0) (>= xamarinmac)) (&& (== net6.0) (>= xamarintvos)) (&& (== net6.0) (>= xamarinwatchos)) (== netstandard2.0)
|
||||
System.Memory (>= 4.5.4) - restriction: || (&& (== net6.0) (< netcoreapp2.0)) (&& (== net6.0) (< netcoreapp2.1)) (&& (== net6.0) (>= uap10.1)) (== netstandard2.0)
|
||||
System.Security.AccessControl (>= 5.0)
|
||||
System.Security.Principal.Windows (>= 5.0)
|
||||
Microsoft.Win32.SystemEvents (6.0.1) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= netcoreapp3.1))
|
||||
Mono.Posix.NETStandard (1.0)
|
||||
MSBuild.StructuredLogger (2.1.669)
|
||||
Microsoft.Build (>= 16.10)
|
||||
Microsoft.Build.Framework (>= 16.10)
|
||||
Microsoft.Build.Tasks.Core (>= 16.10)
|
||||
Microsoft.Build.Utilities.Core (>= 16.10)
|
||||
Newtonsoft.Json (13.0.1)
|
||||
NuGet.Common (6.2.1)
|
||||
NuGet.Frameworks (>= 6.2.1)
|
||||
NuGet.Configuration (6.2.1)
|
||||
NuGet.Common (>= 6.2.1)
|
||||
System.Security.Cryptography.ProtectedData (>= 4.4)
|
||||
NuGet.Frameworks (6.2.1)
|
||||
NuGet.Packaging (6.2.1)
|
||||
Newtonsoft.Json (>= 13.0.1)
|
||||
NuGet.Configuration (>= 6.2.1)
|
||||
NuGet.Versioning (>= 6.2.1)
|
||||
System.Security.Cryptography.Cng (>= 5.0)
|
||||
System.Security.Cryptography.Pkcs (>= 5.0)
|
||||
NuGet.Protocol (6.2.1)
|
||||
NuGet.Packaging (>= 6.2.1)
|
||||
NuGet.Versioning (6.2.1)
|
||||
System.Buffers (4.5.1) - restriction: || (&& (== net6.0) (>= monoandroid) (< netstandard1.3)) (&& (== net6.0) (>= monotouch)) (&& (== net6.0) (< netcoreapp2.0)) (&& (== net6.0) (>= xamarinios)) (&& (== net6.0) (>= xamarinmac)) (&& (== net6.0) (>= xamarintvos)) (&& (== net6.0) (>= xamarinwatchos)) (== netstandard2.0)
|
||||
System.CodeDom (6.0)
|
||||
System.Collections.Immutable (6.0)
|
||||
System.Memory (>= 4.5.4) - restriction: || (&& (== net6.0) (>= net461)) (== netstandard2.0)
|
||||
System.Runtime.CompilerServices.Unsafe (>= 6.0)
|
||||
System.Configuration.ConfigurationManager (6.0)
|
||||
System.Security.Cryptography.ProtectedData (>= 6.0)
|
||||
System.Security.Permissions (>= 6.0)
|
||||
System.Drawing.Common (6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= netcoreapp3.1))
|
||||
Microsoft.Win32.SystemEvents (>= 6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= netcoreapp3.1))
|
||||
System.Formats.Asn1 (6.0)
|
||||
System.Buffers (>= 4.5.1) - restriction: || (&& (== net6.0) (>= net461)) (== netstandard2.0)
|
||||
System.Memory (>= 4.5.4) - restriction: || (&& (== net6.0) (>= net461)) (== netstandard2.0)
|
||||
System.Memory (4.5.5)
|
||||
System.Buffers (>= 4.5.1) - restriction: || (&& (== net6.0) (>= monotouch)) (&& (== net6.0) (>= net461)) (&& (== net6.0) (< netcoreapp2.0)) (&& (== net6.0) (< netstandard1.1)) (&& (== net6.0) (< netstandard2.0)) (&& (== net6.0) (>= xamarinios)) (&& (== net6.0) (>= xamarinmac)) (&& (== net6.0) (>= xamarintvos)) (&& (== net6.0) (>= xamarinwatchos)) (== netstandard2.0)
|
||||
System.Numerics.Vectors (>= 4.4) - restriction: || (&& (== net6.0) (< netcoreapp2.0)) (== netstandard2.0)
|
||||
System.Runtime.CompilerServices.Unsafe (>= 4.5.3) - restriction: || (&& (== net6.0) (>= monotouch)) (&& (== net6.0) (>= net461)) (&& (== net6.0) (< netcoreapp2.0)) (&& (== net6.0) (< netcoreapp2.1)) (&& (== net6.0) (< netstandard1.1)) (&& (== net6.0) (< netstandard2.0)) (&& (== net6.0) (>= uap10.1)) (&& (== net6.0) (>= xamarinios)) (&& (== net6.0) (>= xamarinmac)) (&& (== net6.0) (>= xamarintvos)) (&& (== net6.0) (>= xamarinwatchos)) (== netstandard2.0)
|
||||
System.Numerics.Vectors (4.5) - restriction: || (&& (== net6.0) (>= net461)) (== netstandard2.0)
|
||||
System.Reactive (5.0)
|
||||
System.Runtime.InteropServices.WindowsRuntime (>= 4.3) - restriction: || (&& (== net6.0) (< netcoreapp3.1)) (== netstandard2.0)
|
||||
System.Threading.Tasks.Extensions (>= 4.5.4) - restriction: || (&& (== net6.0) (>= net472)) (&& (== net6.0) (< netcoreapp3.1)) (&& (== net6.0) (>= uap10.1)) (== netstandard2.0)
|
||||
System.Reflection.Metadata (6.0.1)
|
||||
System.Collections.Immutable (>= 6.0)
|
||||
System.Resources.Extensions (6.0)
|
||||
System.Memory (>= 4.5.4) - restriction: || (&& (== net6.0) (>= net461)) (== netstandard2.0)
|
||||
System.Runtime (4.3.1) - restriction: || (&& (== net6.0) (< netcoreapp3.1)) (== netstandard2.0)
|
||||
Microsoft.NETCore.Platforms (>= 1.1.1)
|
||||
Microsoft.NETCore.Targets (>= 1.1.3)
|
||||
System.Runtime.CompilerServices.Unsafe (6.0)
|
||||
System.Runtime.InteropServices.WindowsRuntime (4.3) - restriction: || (&& (== net6.0) (< netcoreapp3.1)) (== netstandard2.0)
|
||||
System.Runtime (>= 4.3)
|
||||
System.Security.AccessControl (6.0)
|
||||
System.Security.Principal.Windows (>= 5.0) - restriction: || (&& (== net6.0) (>= net461)) (== netstandard2.0)
|
||||
System.Security.Cryptography.Cng (5.0)
|
||||
System.Formats.Asn1 (>= 5.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= netcoreapp3.0))
|
||||
System.Security.Cryptography.Pkcs (6.0.1)
|
||||
System.Buffers (>= 4.5.1) - restriction: || (&& (== net6.0) (< netstandard2.1)) (== netstandard2.0)
|
||||
System.Formats.Asn1 (>= 6.0)
|
||||
System.Memory (>= 4.5.4) - restriction: || (&& (== net6.0) (< netstandard2.1)) (== netstandard2.0)
|
||||
System.Security.Cryptography.Cng (>= 5.0) - restriction: || (&& (== net6.0) (< netcoreapp3.1)) (&& (== net6.0) (< netstandard2.1)) (== netstandard2.0)
|
||||
System.Security.Cryptography.ProtectedData (6.0)
|
||||
System.Security.Cryptography.Xml (6.0)
|
||||
System.Memory (>= 4.5.4) - restriction: == netstandard2.0
|
||||
System.Security.AccessControl (>= 6.0)
|
||||
System.Security.Cryptography.Pkcs (>= 6.0)
|
||||
System.Security.Permissions (6.0)
|
||||
System.Security.AccessControl (>= 6.0)
|
||||
System.Windows.Extensions (>= 6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= netcoreapp3.1))
|
||||
System.Security.Principal.Windows (5.0)
|
||||
System.Text.Encoding.CodePages (6.0)
|
||||
System.Runtime.CompilerServices.Unsafe (>= 6.0)
|
||||
System.Text.Encodings.Web (6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
|
||||
System.Runtime.CompilerServices.Unsafe (>= 6.0)
|
||||
System.Text.Json (6.0.5) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
|
||||
System.Runtime.CompilerServices.Unsafe (>= 6.0)
|
||||
System.Text.Encodings.Web (>= 6.0)
|
||||
System.Threading.Tasks.Dataflow (6.0)
|
||||
System.Threading.Tasks.Extensions (4.5.4) - restriction: || (&& (== net6.0) (>= net472)) (&& (== net6.0) (< netcoreapp3.1)) (&& (== net6.0) (>= uap10.1)) (== netstandard2.0)
|
||||
System.Runtime.CompilerServices.Unsafe (>= 4.5.3) - restriction: || (&& (== net6.0) (>= net461)) (&& (== net6.0) (< netcoreapp2.1)) (&& (== net6.0) (< netstandard1.0)) (&& (== net6.0) (< netstandard2.0)) (&& (== net6.0) (>= wp8)) (== netstandard2.0)
|
||||
System.Windows.Extensions (6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= netcoreapp3.1))
|
||||
System.Drawing.Common (>= 6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= netcoreapp3.1))
|
||||
7
fake.sh
7
fake.sh
@@ -1,7 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -eu
|
||||
set -o pipefail
|
||||
|
||||
dotnet tool restore
|
||||
dotnet fake "$@"
|
||||
@@ -1,10 +1,9 @@
|
||||
<Project>
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net6.0</TargetFramework>
|
||||
<TargetFrameworks>net6.0;net7.0</TargetFrameworks>
|
||||
<DebugType>embedded</DebugType>
|
||||
<AssemblyVersion>2.0.0.0</AssemblyVersion>
|
||||
<FileVersion>2.0.0.0</FileVersion>
|
||||
<Version>2.0.0</Version>
|
||||
<VersionSuffix>rc1</VersionSuffix>
|
||||
</PropertyGroup>
|
||||
</Project>
|
||||
|
||||
@@ -122,12 +122,13 @@ module Json =
|
||||
(string >> WebLogUserId) reader.Value
|
||||
|
||||
open Microsoft.FSharpLu.Json
|
||||
open NodaTime
|
||||
open NodaTime.Serialization.JsonNet
|
||||
|
||||
/// All converters to use for data conversion
|
||||
let all () : JsonConverter seq =
|
||||
seq {
|
||||
/// Configure a serializer to use these converters
|
||||
let configure (ser : JsonSerializer) =
|
||||
// Our converters
|
||||
CategoryIdConverter ()
|
||||
[ CategoryIdConverter () :> JsonConverter
|
||||
CommentIdConverter ()
|
||||
CustomFeedIdConverter ()
|
||||
CustomFeedSourceConverter ()
|
||||
@@ -143,6 +144,36 @@ module Json =
|
||||
UploadIdConverter ()
|
||||
WebLogIdConverter ()
|
||||
WebLogUserIdConverter ()
|
||||
] |> List.iter ser.Converters.Add
|
||||
// NodaTime
|
||||
let _ = ser.ConfigureForNodaTime DateTimeZoneProviders.Tzdb
|
||||
// Handles DUs with no associated data, as well as option fields
|
||||
CompactUnionJsonConverter ()
|
||||
}
|
||||
ser.Converters.Add (CompactUnionJsonConverter ())
|
||||
ser.NullValueHandling <- NullValueHandling.Ignore
|
||||
ser.MissingMemberHandling <- MissingMemberHandling.Ignore
|
||||
ser
|
||||
|
||||
/// Serializer settings extracted from a JsonSerializer (a property sure would be nice...)
|
||||
let mutable private serializerSettings : JsonSerializerSettings option = None
|
||||
|
||||
/// Extract settings from the serializer to be used in JsonConvert calls
|
||||
let settings (ser : JsonSerializer) =
|
||||
if Option.isNone serializerSettings then
|
||||
serializerSettings <- JsonSerializerSettings (
|
||||
ConstructorHandling = ser.ConstructorHandling,
|
||||
ContractResolver = ser.ContractResolver,
|
||||
Converters = ser.Converters,
|
||||
DefaultValueHandling = ser.DefaultValueHandling,
|
||||
DateFormatHandling = ser.DateFormatHandling,
|
||||
DateParseHandling = ser.DateParseHandling,
|
||||
MetadataPropertyHandling = ser.MetadataPropertyHandling,
|
||||
MissingMemberHandling = ser.MissingMemberHandling,
|
||||
NullValueHandling = ser.NullValueHandling,
|
||||
ObjectCreationHandling = ser.ObjectCreationHandling,
|
||||
ReferenceLoopHandling = ser.ReferenceLoopHandling,
|
||||
SerializationBinder = ser.SerializationBinder,
|
||||
TraceWriter = ser.TraceWriter,
|
||||
TypeNameAssemblyFormatHandling = ser.TypeNameAssemblyFormatHandling,
|
||||
TypeNameHandling = ser.TypeNameHandling)
|
||||
|> Some
|
||||
serializerSettings.Value
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
namespace MyWebLog.Data
|
||||
|
||||
open System
|
||||
open System.Threading.Tasks
|
||||
open MyWebLog
|
||||
open MyWebLog.ViewModels
|
||||
open Newtonsoft.Json
|
||||
open NodaTime
|
||||
|
||||
/// The result of a category deletion attempt
|
||||
type CategoryDeleteResult =
|
||||
@@ -137,7 +138,7 @@ type IPostData =
|
||||
WebLogId -> tag : string -> pageNbr : int -> postsPerPage : int -> Task<Post list>
|
||||
|
||||
/// Find the next older and newer post for the given published date/time (excluding revisions and prior permalinks)
|
||||
abstract member FindSurroundingPosts : WebLogId -> publishedOn : DateTime -> Task<Post option * Post option>
|
||||
abstract member FindSurroundingPosts : WebLogId -> publishedOn : Instant -> Task<Post option * Post option>
|
||||
|
||||
/// Restore posts from a backup
|
||||
abstract member Restore : Post list -> Task<unit>
|
||||
@@ -326,6 +327,9 @@ type IData =
|
||||
/// Web log user data functions
|
||||
abstract member WebLogUser : IWebLogUserData
|
||||
|
||||
/// A JSON serializer for use in persistence
|
||||
abstract member Serializer : JsonSerializer
|
||||
|
||||
/// Do any required start up data checks
|
||||
abstract member StartUp : unit -> Task<unit>
|
||||
|
||||
@@ -5,13 +5,16 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Data.Sqlite" Version="6.0.7" />
|
||||
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="6.0.0" />
|
||||
<PackageReference Include="BitBadger.Npgsql.FSharp.Documents" Version="1.0.0-beta2" />
|
||||
<PackageReference Include="Microsoft.Data.Sqlite" Version="7.0.3" />
|
||||
<PackageReference Include="Microsoft.Extensions.Caching.Abstractions" Version="7.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="7.0.0" />
|
||||
<PackageReference Include="Microsoft.FSharpLu.Json" Version="0.11.7" />
|
||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.1" />
|
||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.2" />
|
||||
<PackageReference Include="NodaTime.Serialization.JsonNet" Version="3.0.1" />
|
||||
<PackageReference Include="Npgsql.NodaTime" Version="7.0.2" />
|
||||
<PackageReference Include="RethinkDb.Driver" Version="2.3.150" />
|
||||
<PackageReference Include="RethinkDb.Driver.FSharp" Version="0.9.0-beta-07" />
|
||||
<PackageReference Update="FSharp.Core" Version="6.0.5" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
@@ -29,6 +32,17 @@
|
||||
<Compile Include="SQLite\SQLiteWebLogData.fs" />
|
||||
<Compile Include="SQLite\SQLiteWebLogUserData.fs" />
|
||||
<Compile Include="SQLiteData.fs" />
|
||||
<Compile Include="Postgres\PostgresHelpers.fs" />
|
||||
<Compile Include="Postgres\PostgresCache.fs" />
|
||||
<Compile Include="Postgres\PostgresCategoryData.fs" />
|
||||
<Compile Include="Postgres\PostgresPageData.fs" />
|
||||
<Compile Include="Postgres\PostgresPostData.fs" />
|
||||
<Compile Include="Postgres\PostgresTagMapData.fs" />
|
||||
<Compile Include="Postgres\PostgresThemeData.fs" />
|
||||
<Compile Include="Postgres\PostgresUploadData.fs" />
|
||||
<Compile Include="Postgres\PostgresWebLogData.fs" />
|
||||
<Compile Include="Postgres\PostgresWebLogUserData.fs" />
|
||||
<Compile Include="PostgresData.fs" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
183
src/MyWebLog.Data/Postgres/PostgresCache.fs
Normal file
183
src/MyWebLog.Data/Postgres/PostgresCache.fs
Normal file
@@ -0,0 +1,183 @@
|
||||
namespace MyWebLog.Data.Postgres
|
||||
|
||||
open System.Threading
|
||||
open System.Threading.Tasks
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open Microsoft.Extensions.Caching.Distributed
|
||||
open NodaTime
|
||||
open Npgsql.FSharp
|
||||
|
||||
/// Helper types and functions for the cache
|
||||
[<AutoOpen>]
|
||||
module private Helpers =
|
||||
|
||||
/// The cache entry
|
||||
type Entry =
|
||||
{ /// The ID of the cache entry
|
||||
Id : string
|
||||
|
||||
/// The value to be cached
|
||||
Payload : byte[]
|
||||
|
||||
/// When this entry will expire
|
||||
ExpireAt : Instant
|
||||
|
||||
/// The duration by which the expiration should be pushed out when being refreshed
|
||||
SlidingExpiration : Duration option
|
||||
|
||||
/// The must-expire-by date/time for the cache entry
|
||||
AbsoluteExpiration : Instant option
|
||||
}
|
||||
|
||||
/// Run a task synchronously
|
||||
let sync<'T> (it : Task<'T>) = it |> (Async.AwaitTask >> Async.RunSynchronously)
|
||||
|
||||
/// Get the current instant
|
||||
let getNow () = SystemClock.Instance.GetCurrentInstant ()
|
||||
|
||||
/// Create a parameter for the expire-at time
|
||||
let expireParam =
|
||||
typedParam "expireAt"
|
||||
|
||||
|
||||
/// A distributed cache implementation in PostgreSQL used to handle sessions for myWebLog
|
||||
type DistributedCache () =
|
||||
|
||||
// ~~~ INITIALIZATION ~~~
|
||||
|
||||
do
|
||||
task {
|
||||
let! exists =
|
||||
Custom.scalar
|
||||
$"SELECT EXISTS
|
||||
(SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = 'session')
|
||||
AS {existsName}" [] Map.toExists
|
||||
if not exists then
|
||||
do! Custom.nonQuery
|
||||
"CREATE TABLE session (
|
||||
id TEXT NOT NULL PRIMARY KEY,
|
||||
payload BYTEA NOT NULL,
|
||||
expire_at TIMESTAMPTZ NOT NULL,
|
||||
sliding_expiration INTERVAL,
|
||||
absolute_expiration TIMESTAMPTZ);
|
||||
CREATE INDEX idx_session_expiration ON session (expire_at)" []
|
||||
} |> sync
|
||||
|
||||
// ~~~ SUPPORT FUNCTIONS ~~~
|
||||
|
||||
/// Get an entry, updating it for sliding expiration
|
||||
let getEntry key = backgroundTask {
|
||||
let idParam = "@id", Sql.string key
|
||||
let! tryEntry =
|
||||
Custom.single "SELECT * FROM session WHERE id = @id" [ idParam ]
|
||||
(fun row ->
|
||||
{ Id = row.string "id"
|
||||
Payload = row.bytea "payload"
|
||||
ExpireAt = row.fieldValue<Instant> "expire_at"
|
||||
SlidingExpiration = row.fieldValueOrNone<Duration> "sliding_expiration"
|
||||
AbsoluteExpiration = row.fieldValueOrNone<Instant> "absolute_expiration" })
|
||||
match tryEntry with
|
||||
| Some entry ->
|
||||
let now = getNow ()
|
||||
let slideExp = defaultArg entry.SlidingExpiration Duration.MinValue
|
||||
let absExp = defaultArg entry.AbsoluteExpiration Instant.MinValue
|
||||
let needsRefresh, item =
|
||||
if entry.ExpireAt = absExp then false, entry
|
||||
elif slideExp = Duration.MinValue && absExp = Instant.MinValue then false, entry
|
||||
elif absExp > Instant.MinValue && entry.ExpireAt.Plus slideExp > absExp then
|
||||
true, { entry with ExpireAt = absExp }
|
||||
else true, { entry with ExpireAt = now.Plus slideExp }
|
||||
if needsRefresh then
|
||||
do! Custom.nonQuery "UPDATE session SET expire_at = @expireAt WHERE id = @id"
|
||||
[ expireParam item.ExpireAt; idParam ]
|
||||
()
|
||||
return if item.ExpireAt > now then Some entry else None
|
||||
| None -> return None
|
||||
}
|
||||
|
||||
/// The last time expired entries were purged (runs every 30 minutes)
|
||||
let mutable lastPurge = Instant.MinValue
|
||||
|
||||
/// Purge expired entries every 30 minutes
|
||||
let purge () = backgroundTask {
|
||||
let now = getNow ()
|
||||
if lastPurge.Plus (Duration.FromMinutes 30L) < now then
|
||||
do! Custom.nonQuery "DELETE FROM session WHERE expire_at < @expireAt" [ expireParam now ]
|
||||
lastPurge <- now
|
||||
}
|
||||
|
||||
/// Remove a cache entry
|
||||
let removeEntry key =
|
||||
Delete.byId "session" key
|
||||
|
||||
/// Save an entry
|
||||
let saveEntry (opts : DistributedCacheEntryOptions) key payload =
|
||||
let now = getNow ()
|
||||
let expireAt, slideExp, absExp =
|
||||
if opts.SlidingExpiration.HasValue then
|
||||
let slide = Duration.FromTimeSpan opts.SlidingExpiration.Value
|
||||
now.Plus slide, Some slide, None
|
||||
elif opts.AbsoluteExpiration.HasValue then
|
||||
let exp = Instant.FromDateTimeOffset opts.AbsoluteExpiration.Value
|
||||
exp, None, Some exp
|
||||
elif opts.AbsoluteExpirationRelativeToNow.HasValue then
|
||||
let exp = now.Plus (Duration.FromTimeSpan opts.AbsoluteExpirationRelativeToNow.Value)
|
||||
exp, None, Some exp
|
||||
else
|
||||
// Default to 1 hour sliding expiration
|
||||
let slide = Duration.FromHours 1
|
||||
now.Plus slide, Some slide, None
|
||||
Custom.nonQuery
|
||||
"INSERT INTO session (
|
||||
id, payload, expire_at, sliding_expiration, absolute_expiration
|
||||
) VALUES (
|
||||
@id, @payload, @expireAt, @slideExp, @absExp
|
||||
) ON CONFLICT (id) DO UPDATE
|
||||
SET payload = EXCLUDED.payload,
|
||||
expire_at = EXCLUDED.expire_at,
|
||||
sliding_expiration = EXCLUDED.sliding_expiration,
|
||||
absolute_expiration = EXCLUDED.absolute_expiration"
|
||||
[ "@id", Sql.string key
|
||||
"@payload", Sql.bytea payload
|
||||
expireParam expireAt
|
||||
optParam "slideExp" slideExp
|
||||
optParam "absExp" absExp ]
|
||||
|
||||
// ~~~ IMPLEMENTATION FUNCTIONS ~~~
|
||||
|
||||
/// Retrieve the data for a cache entry
|
||||
let get key (_ : CancellationToken) = backgroundTask {
|
||||
match! getEntry key with
|
||||
| Some entry ->
|
||||
do! purge ()
|
||||
return entry.Payload
|
||||
| None -> return null
|
||||
}
|
||||
|
||||
/// Refresh an entry
|
||||
let refresh key (cancelToken : CancellationToken) = backgroundTask {
|
||||
let! _ = get key cancelToken
|
||||
()
|
||||
}
|
||||
|
||||
/// Remove an entry
|
||||
let remove key (_ : CancellationToken) = backgroundTask {
|
||||
do! removeEntry key
|
||||
do! purge ()
|
||||
}
|
||||
|
||||
/// Set an entry
|
||||
let set key value options (_ : CancellationToken) = backgroundTask {
|
||||
do! saveEntry options key value
|
||||
do! purge ()
|
||||
}
|
||||
|
||||
interface IDistributedCache with
|
||||
member _.Get key = get key CancellationToken.None |> sync
|
||||
member _.GetAsync (key, token) = get key token
|
||||
member _.Refresh key = refresh key CancellationToken.None |> sync
|
||||
member _.RefreshAsync (key, token) = refresh key token
|
||||
member _.Remove key = remove key CancellationToken.None |> sync
|
||||
member _.RemoveAsync (key, token) = remove key token
|
||||
member _.Set (key, value, options) = set key value options CancellationToken.None |> sync
|
||||
member _.SetAsync (key, value, options, token) = set key value options token
|
||||
149
src/MyWebLog.Data/Postgres/PostgresCategoryData.fs
Normal file
149
src/MyWebLog.Data/Postgres/PostgresCategoryData.fs
Normal file
@@ -0,0 +1,149 @@
|
||||
namespace MyWebLog.Data.Postgres
|
||||
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open Microsoft.Extensions.Logging
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
open Npgsql.FSharp
|
||||
|
||||
/// PostgreSQL myWebLog category data implementation
|
||||
type PostgresCategoryData (log : ILogger) =
|
||||
|
||||
/// Count all categories for the given web log
|
||||
let countAll webLogId =
|
||||
log.LogTrace "Category.countAll"
|
||||
Count.byContains Table.Category (webLogDoc webLogId)
|
||||
|
||||
/// Count all top-level categories for the given web log
|
||||
let countTopLevel webLogId =
|
||||
log.LogTrace "Category.countTopLevel"
|
||||
Count.byContains Table.Category {| webLogDoc webLogId with ParentId = None |}
|
||||
|
||||
/// Retrieve all categories for the given web log in a DotLiquid-friendly format
|
||||
let findAllForView webLogId = backgroundTask {
|
||||
log.LogTrace "Category.findAllForView"
|
||||
let! cats =
|
||||
Custom.list $"{selectWithCriteria Table.Category} ORDER BY LOWER(data ->> '{nameof Category.empty.Name}')"
|
||||
[ webLogContains webLogId ] fromData<Category>
|
||||
let ordered = Utils.orderByHierarchy cats None None []
|
||||
let counts =
|
||||
ordered
|
||||
|> Seq.map (fun it ->
|
||||
// Parent category post counts include posts in subcategories
|
||||
let catIdSql, catIdParams =
|
||||
ordered
|
||||
|> Seq.filter (fun cat -> cat.ParentNames |> Array.contains it.Name)
|
||||
|> Seq.map (fun cat -> cat.Id)
|
||||
|> Seq.append (Seq.singleton it.Id)
|
||||
|> List.ofSeq
|
||||
|> arrayContains (nameof Post.empty.CategoryIds) id
|
||||
let postCount =
|
||||
Custom.scalar
|
||||
$"""SELECT COUNT(DISTINCT id) AS {countName}
|
||||
FROM {Table.Post}
|
||||
WHERE {Query.whereDataContains "@criteria"}
|
||||
AND {catIdSql}"""
|
||||
[ "@criteria",
|
||||
Query.jsonbDocParam {| webLogDoc webLogId with Status = PostStatus.toString Published |}
|
||||
catIdParams
|
||||
] Map.toCount
|
||||
|> Async.AwaitTask
|
||||
|> Async.RunSynchronously
|
||||
it.Id, postCount)
|
||||
|> List.ofSeq
|
||||
return
|
||||
ordered
|
||||
|> Seq.map (fun cat ->
|
||||
{ cat with
|
||||
PostCount = counts
|
||||
|> List.tryFind (fun c -> fst c = cat.Id)
|
||||
|> Option.map snd
|
||||
|> Option.defaultValue 0
|
||||
})
|
||||
|> Array.ofSeq
|
||||
}
|
||||
/// Find a category by its ID for the given web log
|
||||
let findById catId webLogId =
|
||||
log.LogTrace "Category.findById"
|
||||
Document.findByIdAndWebLog<CategoryId, Category> Table.Category catId CategoryId.toString webLogId
|
||||
|
||||
/// Find all categories for the given web log
|
||||
let findByWebLog webLogId =
|
||||
log.LogTrace "Category.findByWebLog"
|
||||
Document.findByWebLog<Category> Table.Category webLogId
|
||||
|
||||
/// Create parameters for a category insert / update
|
||||
let catParameters (cat : Category) =
|
||||
Query.docParameters (CategoryId.toString cat.Id) cat
|
||||
|
||||
/// Delete a category
|
||||
let delete catId webLogId = backgroundTask {
|
||||
log.LogTrace "Category.delete"
|
||||
match! findById catId webLogId with
|
||||
| Some cat ->
|
||||
// Reassign any children to the category's parent category
|
||||
let! children = Find.byContains<Category> Table.Category {| ParentId = CategoryId.toString catId |}
|
||||
let hasChildren = not (List.isEmpty children)
|
||||
if hasChildren then
|
||||
let! _ =
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.executeTransactionAsync [
|
||||
Query.Update.partialById Table.Category,
|
||||
children |> List.map (fun child -> [
|
||||
"@id", Sql.string (CategoryId.toString child.Id)
|
||||
"@data", Query.jsonbDocParam {| ParentId = cat.ParentId |}
|
||||
])
|
||||
]
|
||||
()
|
||||
// Delete the category off all posts where it is assigned
|
||||
let! posts =
|
||||
Custom.list $"SELECT data FROM {Table.Post} WHERE data -> '{nameof Post.empty.CategoryIds}' @> @id"
|
||||
[ "@id", Query.jsonbDocParam [| CategoryId.toString catId |] ] fromData<Post>
|
||||
if not (List.isEmpty posts) then
|
||||
let! _ =
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.executeTransactionAsync [
|
||||
Query.Update.partialById Table.Post,
|
||||
posts |> List.map (fun post -> [
|
||||
"@id", Sql.string (PostId.toString post.Id)
|
||||
"@data", Query.jsonbDocParam
|
||||
{| CategoryIds = post.CategoryIds |> List.filter (fun cat -> cat <> catId) |}
|
||||
])
|
||||
]
|
||||
()
|
||||
// Delete the category itself
|
||||
do! Delete.byId Table.Category (CategoryId.toString catId)
|
||||
return if hasChildren then ReassignedChildCategories else CategoryDeleted
|
||||
| None -> return CategoryNotFound
|
||||
}
|
||||
|
||||
/// Save a category
|
||||
let save (cat : Category) = backgroundTask {
|
||||
log.LogTrace "Category.save"
|
||||
do! save Table.Category (CategoryId.toString cat.Id) cat
|
||||
}
|
||||
|
||||
/// Restore categories from a backup
|
||||
let restore cats = backgroundTask {
|
||||
log.LogTrace "Category.restore"
|
||||
let! _ =
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.executeTransactionAsync [
|
||||
Query.insert Table.Category, cats |> List.map catParameters
|
||||
]
|
||||
()
|
||||
}
|
||||
|
||||
interface ICategoryData with
|
||||
member _.Add cat = save cat
|
||||
member _.CountAll webLogId = countAll webLogId
|
||||
member _.CountTopLevel webLogId = countTopLevel webLogId
|
||||
member _.FindAllForView webLogId = findAllForView webLogId
|
||||
member _.FindById catId webLogId = findById catId webLogId
|
||||
member _.FindByWebLog webLogId = findByWebLog webLogId
|
||||
member _.Delete catId webLogId = delete catId webLogId
|
||||
member _.Restore cats = restore cats
|
||||
member _.Update cat = save cat
|
||||
236
src/MyWebLog.Data/Postgres/PostgresHelpers.fs
Normal file
236
src/MyWebLog.Data/Postgres/PostgresHelpers.fs
Normal file
@@ -0,0 +1,236 @@
|
||||
/// Helper functions for the PostgreSQL data implementation
|
||||
[<AutoOpen>]
|
||||
module MyWebLog.Data.Postgres.PostgresHelpers
|
||||
|
||||
/// The table names used in the PostgreSQL implementation
|
||||
[<RequireQualifiedAccess>]
|
||||
module Table =
|
||||
|
||||
/// Categories
|
||||
[<Literal>]
|
||||
let Category = "category"
|
||||
|
||||
/// Database Version
|
||||
[<Literal>]
|
||||
let DbVersion = "db_version"
|
||||
|
||||
/// Pages
|
||||
[<Literal>]
|
||||
let Page = "page"
|
||||
|
||||
/// Page Revisions
|
||||
[<Literal>]
|
||||
let PageRevision = "page_revision"
|
||||
|
||||
/// Posts
|
||||
[<Literal>]
|
||||
let Post = "post"
|
||||
|
||||
/// Post Comments
|
||||
[<Literal>]
|
||||
let PostComment = "post_comment"
|
||||
|
||||
/// Post Revisions
|
||||
[<Literal>]
|
||||
let PostRevision = "post_revision"
|
||||
|
||||
/// Tag/URL Mappings
|
||||
[<Literal>]
|
||||
let TagMap = "tag_map"
|
||||
|
||||
/// Themes
|
||||
[<Literal>]
|
||||
let Theme = "theme"
|
||||
|
||||
/// Theme Assets
|
||||
[<Literal>]
|
||||
let ThemeAsset = "theme_asset"
|
||||
|
||||
/// Uploads
|
||||
[<Literal>]
|
||||
let Upload = "upload"
|
||||
|
||||
/// Web Logs
|
||||
[<Literal>]
|
||||
let WebLog = "web_log"
|
||||
|
||||
/// Users
|
||||
[<Literal>]
|
||||
let WebLogUser = "web_log_user"
|
||||
|
||||
|
||||
open System
|
||||
open System.Threading.Tasks
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
open NodaTime
|
||||
open Npgsql
|
||||
open Npgsql.FSharp
|
||||
|
||||
/// Create a SQL parameter for the web log ID
|
||||
let webLogIdParam webLogId =
|
||||
"@webLogId", Sql.string (WebLogId.toString webLogId)
|
||||
|
||||
/// Create an anonymous record with the given web log ID
|
||||
let webLogDoc (webLogId : WebLogId) =
|
||||
{| WebLogId = webLogId |}
|
||||
|
||||
/// Create a parameter for a web log document-contains query
|
||||
let webLogContains webLogId =
|
||||
"@criteria", Query.jsonbDocParam (webLogDoc webLogId)
|
||||
|
||||
/// The name of the field to select to be able to use Map.toCount
|
||||
let countName = "the_count"
|
||||
|
||||
/// The name of the field to select to be able to use Map.toExists
|
||||
let existsName = "does_exist"
|
||||
|
||||
/// A SQL string to select data from a table with the given JSON document contains criteria
|
||||
let selectWithCriteria tableName =
|
||||
$"""{Query.selectFromTable tableName} WHERE {Query.whereDataContains "@criteria"}"""
|
||||
|
||||
/// Create the SQL and parameters for an IN clause
|
||||
let inClause<'T> colNameAndPrefix paramName (valueFunc: 'T -> string) (items : 'T list) =
|
||||
if List.isEmpty items then "", []
|
||||
else
|
||||
let mutable idx = 0
|
||||
items
|
||||
|> List.skip 1
|
||||
|> List.fold (fun (itemS, itemP) it ->
|
||||
idx <- idx + 1
|
||||
$"{itemS}, @%s{paramName}{idx}", ($"@%s{paramName}{idx}", Sql.string (valueFunc it)) :: itemP)
|
||||
(Seq.ofList items
|
||||
|> Seq.map (fun it ->
|
||||
$"%s{colNameAndPrefix} IN (@%s{paramName}0", [ $"@%s{paramName}0", Sql.string (valueFunc it) ])
|
||||
|> Seq.head)
|
||||
|> function sql, ps -> $"{sql})", ps
|
||||
|
||||
/// Create the SQL and parameters for match-any array query
|
||||
let arrayContains<'T> name (valueFunc : 'T -> string) (items : 'T list) =
|
||||
$"data['{name}'] ?| @{name}Values",
|
||||
($"@{name}Values", Sql.stringArray (items |> List.map valueFunc |> Array.ofList))
|
||||
|
||||
/// Get the first result of the given query
|
||||
let tryHead<'T> (query : Task<'T list>) = backgroundTask {
|
||||
let! results = query
|
||||
return List.tryHead results
|
||||
}
|
||||
|
||||
/// Create a parameter for a non-standard type
|
||||
let typedParam<'T> name (it : 'T) =
|
||||
$"@%s{name}", Sql.parameter (NpgsqlParameter ($"@{name}", it))
|
||||
|
||||
/// Create a parameter for a possibly-missing non-standard type
|
||||
let optParam<'T> name (it : 'T option) =
|
||||
let p = NpgsqlParameter ($"@%s{name}", if Option.isSome it then box it.Value else DBNull.Value)
|
||||
p.ParameterName, Sql.parameter p
|
||||
|
||||
/// Mapping functions for SQL queries
|
||||
module Map =
|
||||
|
||||
/// Get a count from a row
|
||||
let toCount (row : RowReader) =
|
||||
row.int countName
|
||||
|
||||
/// Get a true/false value as to whether an item exists
|
||||
let toExists (row : RowReader) =
|
||||
row.bool existsName
|
||||
|
||||
/// Create a permalink from the current row
|
||||
let toPermalink (row : RowReader) =
|
||||
Permalink (row.string "permalink")
|
||||
|
||||
/// Create a revision from the current row
|
||||
let toRevision (row : RowReader) : Revision =
|
||||
{ AsOf = row.fieldValue<Instant> "as_of"
|
||||
Text = row.string "revision_text" |> MarkupText.parse
|
||||
}
|
||||
|
||||
/// Create a theme asset from the current row
|
||||
let toThemeAsset includeData (row : RowReader) : ThemeAsset =
|
||||
{ Id = ThemeAssetId (ThemeId (row.string "theme_id"), row.string "path")
|
||||
UpdatedOn = row.fieldValue<Instant> "updated_on"
|
||||
Data = if includeData then row.bytea "data" else [||]
|
||||
}
|
||||
|
||||
/// Create an uploaded file from the current row
|
||||
let toUpload includeData (row : RowReader) : Upload =
|
||||
{ Id = row.string "id" |> UploadId
|
||||
WebLogId = row.string "web_log_id" |> WebLogId
|
||||
Path = row.string "path" |> Permalink
|
||||
UpdatedOn = row.fieldValue<Instant> "updated_on"
|
||||
Data = if includeData then row.bytea "data" else [||]
|
||||
}
|
||||
|
||||
/// Document manipulation functions
|
||||
module Document =
|
||||
|
||||
/// Determine whether a document exists with the given key for the given web log
|
||||
let existsByWebLog<'TKey> table (key : 'TKey) (keyFunc : 'TKey -> string) webLogId =
|
||||
Custom.scalar
|
||||
$""" SELECT EXISTS (
|
||||
SELECT 1 FROM %s{table} WHERE id = @id AND {Query.whereDataContains "@criteria"}
|
||||
) AS {existsName}"""
|
||||
[ "@id", Sql.string (keyFunc key); webLogContains webLogId ] Map.toExists
|
||||
|
||||
/// Find a document by its ID for the given web log
|
||||
let findByIdAndWebLog<'TKey, 'TDoc> table (key : 'TKey) (keyFunc : 'TKey -> string) webLogId =
|
||||
Custom.single $"""{Query.selectFromTable table} WHERE id = @id AND {Query.whereDataContains "@criteria"}"""
|
||||
[ "@id", Sql.string (keyFunc key); webLogContains webLogId ] fromData<'TDoc>
|
||||
|
||||
/// Find a document by its ID for the given web log
|
||||
let findByWebLog<'TDoc> table webLogId : Task<'TDoc list> =
|
||||
Find.byContains table (webLogDoc webLogId)
|
||||
|
||||
|
||||
/// Functions to support revisions
|
||||
module Revisions =
|
||||
|
||||
/// Find all revisions for the given entity
|
||||
let findByEntityId<'TKey> revTable entityTable (key : 'TKey) (keyFunc : 'TKey -> string) =
|
||||
Custom.list $"SELECT as_of, revision_text FROM %s{revTable} WHERE %s{entityTable}_id = @id ORDER BY as_of DESC"
|
||||
[ "@id", Sql.string (keyFunc key) ] Map.toRevision
|
||||
|
||||
/// Find all revisions for all posts for the given web log
|
||||
let findByWebLog<'TKey> revTable entityTable (keyFunc : string -> 'TKey) webLogId =
|
||||
Custom.list
|
||||
$"""SELECT pr.*
|
||||
FROM %s{revTable} pr
|
||||
INNER JOIN %s{entityTable} p ON p.id = pr.{entityTable}_id
|
||||
WHERE p.{Query.whereDataContains "@criteria"}
|
||||
ORDER BY as_of DESC"""
|
||||
[ webLogContains webLogId ] (fun row -> keyFunc (row.string $"{entityTable}_id"), Map.toRevision row)
|
||||
|
||||
/// Parameters for a revision INSERT statement
|
||||
let revParams<'TKey> (key : 'TKey) (keyFunc : 'TKey -> string) rev = [
|
||||
typedParam "asOf" rev.AsOf
|
||||
"@id", Sql.string (keyFunc key)
|
||||
"@text", Sql.string (MarkupText.toString rev.Text)
|
||||
]
|
||||
|
||||
/// The SQL statement to insert a revision
|
||||
let insertSql table =
|
||||
$"INSERT INTO %s{table} VALUES (@id, @asOf, @text)"
|
||||
|
||||
/// Update a page's revisions
|
||||
let update<'TKey> revTable entityTable (key : 'TKey) (keyFunc : 'TKey -> string) oldRevs newRevs = backgroundTask {
|
||||
let toDelete, toAdd = Utils.diffRevisions oldRevs newRevs
|
||||
if not (List.isEmpty toDelete) || not (List.isEmpty toAdd) then
|
||||
let! _ =
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.executeTransactionAsync [
|
||||
if not (List.isEmpty toDelete) then
|
||||
$"DELETE FROM %s{revTable} WHERE %s{entityTable}_id = @id AND as_of = @asOf",
|
||||
toDelete
|
||||
|> List.map (fun it -> [
|
||||
"@id", Sql.string (keyFunc key)
|
||||
typedParam "asOf" it.AsOf
|
||||
])
|
||||
if not (List.isEmpty toAdd) then
|
||||
insertSql revTable, toAdd |> List.map (revParams key keyFunc)
|
||||
]
|
||||
()
|
||||
}
|
||||
|
||||
178
src/MyWebLog.Data/Postgres/PostgresPageData.fs
Normal file
178
src/MyWebLog.Data/Postgres/PostgresPageData.fs
Normal file
@@ -0,0 +1,178 @@
|
||||
namespace MyWebLog.Data.Postgres
|
||||
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open Microsoft.Extensions.Logging
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
open Npgsql.FSharp
|
||||
|
||||
/// PostgreSQL myWebLog page data implementation
|
||||
type PostgresPageData (log : ILogger) =
|
||||
|
||||
// SUPPORT FUNCTIONS
|
||||
|
||||
/// Append revisions to a page
|
||||
let appendPageRevisions (page : Page) = backgroundTask {
|
||||
log.LogTrace "Page.appendPageRevisions"
|
||||
let! revisions = Revisions.findByEntityId Table.PageRevision Table.Page page.Id PageId.toString
|
||||
return { page with Revisions = revisions }
|
||||
}
|
||||
|
||||
/// Return a page with no text or revisions
|
||||
let pageWithoutText (row : RowReader) =
|
||||
{ fromData<Page> row with Text = "" }
|
||||
|
||||
/// Update a page's revisions
|
||||
let updatePageRevisions pageId oldRevs newRevs =
|
||||
log.LogTrace "Page.updatePageRevisions"
|
||||
Revisions.update Table.PageRevision Table.Page pageId PageId.toString oldRevs newRevs
|
||||
|
||||
/// Does the given page exist?
|
||||
let pageExists pageId webLogId =
|
||||
log.LogTrace "Page.pageExists"
|
||||
Document.existsByWebLog Table.Page pageId PageId.toString webLogId
|
||||
|
||||
// IMPLEMENTATION FUNCTIONS
|
||||
|
||||
/// Get all pages for a web log (without text or revisions)
|
||||
let all webLogId =
|
||||
log.LogTrace "Page.all"
|
||||
Custom.list $"{selectWithCriteria Table.Page} ORDER BY LOWER(data ->> '{nameof Page.empty.Title}')"
|
||||
[ webLogContains webLogId ] fromData<Page>
|
||||
|
||||
/// Count all pages for the given web log
|
||||
let countAll webLogId =
|
||||
log.LogTrace "Page.countAll"
|
||||
Count.byContains Table.Page (webLogDoc webLogId)
|
||||
|
||||
/// Count all pages shown in the page list for the given web log
|
||||
let countListed webLogId =
|
||||
log.LogTrace "Page.countListed"
|
||||
Count.byContains Table.Page {| webLogDoc webLogId with IsInPageList = true |}
|
||||
|
||||
/// Find a page by its ID (without revisions)
|
||||
let findById pageId webLogId =
|
||||
log.LogTrace "Page.findById"
|
||||
Document.findByIdAndWebLog<PageId, Page> Table.Page pageId PageId.toString webLogId
|
||||
|
||||
/// Find a complete page by its ID
|
||||
let findFullById pageId webLogId = backgroundTask {
|
||||
log.LogTrace "Page.findFullById"
|
||||
match! findById pageId webLogId with
|
||||
| Some page ->
|
||||
let! withMore = appendPageRevisions page
|
||||
return Some withMore
|
||||
| None -> return None
|
||||
}
|
||||
|
||||
/// Delete a page by its ID
|
||||
let delete pageId webLogId = backgroundTask {
|
||||
log.LogTrace "Page.delete"
|
||||
match! pageExists pageId webLogId with
|
||||
| true ->
|
||||
do! Delete.byId Table.Page (PageId.toString pageId)
|
||||
return true
|
||||
| false -> return false
|
||||
}
|
||||
|
||||
/// Find a page by its permalink for the given web log
|
||||
let findByPermalink permalink webLogId =
|
||||
log.LogTrace "Page.findByPermalink"
|
||||
Find.byContains<Page> Table.Page {| webLogDoc webLogId with Permalink = Permalink.toString permalink |}
|
||||
|> tryHead
|
||||
|
||||
/// Find the current permalink within a set of potential prior permalinks for the given web log
|
||||
let findCurrentPermalink permalinks webLogId = backgroundTask {
|
||||
log.LogTrace "Page.findCurrentPermalink"
|
||||
if List.isEmpty permalinks then return None
|
||||
else
|
||||
let linkSql, linkParam =
|
||||
arrayContains (nameof Page.empty.PriorPermalinks) Permalink.toString permalinks
|
||||
return!
|
||||
Custom.single
|
||||
$"""SELECT data ->> '{nameof Page.empty.Permalink}' AS permalink
|
||||
FROM page
|
||||
WHERE {Query.whereDataContains "@criteria"}
|
||||
AND {linkSql}""" [ webLogContains webLogId; linkParam ] Map.toPermalink
|
||||
}
|
||||
|
||||
/// Get all complete pages for the given web log
|
||||
let findFullByWebLog webLogId = backgroundTask {
|
||||
log.LogTrace "Page.findFullByWebLog"
|
||||
let! pages = Document.findByWebLog<Page> Table.Page webLogId
|
||||
let! revisions = Revisions.findByWebLog Table.PageRevision Table.Page PageId webLogId
|
||||
return
|
||||
pages
|
||||
|> List.map (fun it ->
|
||||
{ it with Revisions = revisions |> List.filter (fun r -> fst r = it.Id) |> List.map snd })
|
||||
}
|
||||
|
||||
/// Get all listed pages for the given web log (without revisions or text)
|
||||
let findListed webLogId =
|
||||
log.LogTrace "Page.findListed"
|
||||
Custom.list $"{selectWithCriteria Table.Page} ORDER BY LOWER(data ->> '{nameof Page.empty.Title}')"
|
||||
[ "@criteria", Query.jsonbDocParam {| webLogDoc webLogId with IsInPageList = true |} ]
|
||||
pageWithoutText
|
||||
|
||||
/// Get a page of pages for the given web log (without revisions)
|
||||
let findPageOfPages webLogId pageNbr =
|
||||
log.LogTrace "Page.findPageOfPages"
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Page}
|
||||
ORDER BY LOWER(data->>'{nameof Page.empty.Title}')
|
||||
LIMIT @pageSize OFFSET @toSkip"
|
||||
[ webLogContains webLogId; "@pageSize", Sql.int 26; "@toSkip", Sql.int ((pageNbr - 1) * 25) ]
|
||||
fromData<Page>
|
||||
|
||||
/// Restore pages from a backup
|
||||
let restore (pages : Page list) = backgroundTask {
|
||||
log.LogTrace "Page.restore"
|
||||
let revisions = pages |> List.collect (fun p -> p.Revisions |> List.map (fun r -> p.Id, r))
|
||||
let! _ =
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.executeTransactionAsync [
|
||||
Query.insert Table.Page,
|
||||
pages
|
||||
|> List.map (fun page -> Query.docParameters (PageId.toString page.Id) { page with Revisions = [] })
|
||||
Revisions.insertSql Table.PageRevision,
|
||||
revisions |> List.map (fun (pageId, rev) -> Revisions.revParams pageId PageId.toString rev)
|
||||
]
|
||||
()
|
||||
}
|
||||
|
||||
/// Save a page
|
||||
let save (page : Page) = backgroundTask {
|
||||
log.LogTrace "Page.save"
|
||||
let! oldPage = findFullById page.Id page.WebLogId
|
||||
do! save Table.Page (PageId.toString page.Id) { page with Revisions = [] }
|
||||
do! updatePageRevisions page.Id (match oldPage with Some p -> p.Revisions | None -> []) page.Revisions
|
||||
()
|
||||
}
|
||||
|
||||
/// Update a page's prior permalinks
|
||||
let updatePriorPermalinks pageId webLogId permalinks = backgroundTask {
|
||||
log.LogTrace "Page.updatePriorPermalinks"
|
||||
match! pageExists pageId webLogId with
|
||||
| true ->
|
||||
do! Update.partialById Table.Page (PageId.toString pageId) {| PriorPermalinks = permalinks |}
|
||||
return true
|
||||
| false -> return false
|
||||
}
|
||||
|
||||
interface IPageData with
|
||||
member _.Add page = save page
|
||||
member _.All webLogId = all webLogId
|
||||
member _.CountAll webLogId = countAll webLogId
|
||||
member _.CountListed webLogId = countListed webLogId
|
||||
member _.Delete pageId webLogId = delete pageId webLogId
|
||||
member _.FindById pageId webLogId = findById pageId webLogId
|
||||
member _.FindByPermalink permalink webLogId = findByPermalink permalink webLogId
|
||||
member _.FindCurrentPermalink permalinks webLogId = findCurrentPermalink permalinks webLogId
|
||||
member _.FindFullById pageId webLogId = findFullById pageId webLogId
|
||||
member _.FindFullByWebLog webLogId = findFullByWebLog webLogId
|
||||
member _.FindListed webLogId = findListed webLogId
|
||||
member _.FindPageOfPages webLogId pageNbr = findPageOfPages webLogId pageNbr
|
||||
member _.Restore pages = restore pages
|
||||
member _.Update page = save page
|
||||
member _.UpdatePriorPermalinks pageId webLogId permalinks = updatePriorPermalinks pageId webLogId permalinks
|
||||
228
src/MyWebLog.Data/Postgres/PostgresPostData.fs
Normal file
228
src/MyWebLog.Data/Postgres/PostgresPostData.fs
Normal file
@@ -0,0 +1,228 @@
|
||||
namespace MyWebLog.Data.Postgres
|
||||
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open Microsoft.Extensions.Logging
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
open NodaTime.Text
|
||||
open Npgsql.FSharp
|
||||
|
||||
/// PostgreSQL myWebLog post data implementation
|
||||
type PostgresPostData (log : ILogger) =
|
||||
|
||||
// SUPPORT FUNCTIONS
|
||||
|
||||
/// Append revisions to a post
|
||||
let appendPostRevisions (post : Post) = backgroundTask {
|
||||
log.LogTrace "Post.appendPostRevisions"
|
||||
let! revisions = Revisions.findByEntityId Table.PostRevision Table.Post post.Id PostId.toString
|
||||
return { post with Revisions = revisions }
|
||||
}
|
||||
|
||||
/// Return a post with no revisions, prior permalinks, or text
|
||||
let postWithoutText row =
|
||||
{ fromData<Post> row with Text = "" }
|
||||
|
||||
/// Update a post's revisions
|
||||
let updatePostRevisions postId oldRevs newRevs =
|
||||
log.LogTrace "Post.updatePostRevisions"
|
||||
Revisions.update Table.PostRevision Table.Post postId PostId.toString oldRevs newRevs
|
||||
|
||||
/// Does the given post exist?
|
||||
let postExists postId webLogId =
|
||||
log.LogTrace "Post.postExists"
|
||||
Document.existsByWebLog Table.Post postId PostId.toString webLogId
|
||||
|
||||
// IMPLEMENTATION FUNCTIONS
|
||||
|
||||
/// Count posts in a status for the given web log
|
||||
let countByStatus status webLogId =
|
||||
log.LogTrace "Post.countByStatus"
|
||||
Count.byContains Table.Post {| webLogDoc webLogId with Status = PostStatus.toString status |}
|
||||
|
||||
/// Find a post by its ID for the given web log (excluding revisions)
|
||||
let findById postId webLogId =
|
||||
log.LogTrace "Post.findById"
|
||||
Document.findByIdAndWebLog<PostId, Post> Table.Post postId PostId.toString webLogId
|
||||
|
||||
/// Find a post by its permalink for the given web log (excluding revisions and prior permalinks)
|
||||
let findByPermalink permalink webLogId =
|
||||
log.LogTrace "Post.findByPermalink"
|
||||
Custom.single (selectWithCriteria Table.Post)
|
||||
[ "@criteria",
|
||||
Query.jsonbDocParam {| webLogDoc webLogId with Permalink = Permalink.toString permalink |}
|
||||
] fromData<Post>
|
||||
|
||||
/// Find a complete post by its ID for the given web log
|
||||
let findFullById postId webLogId = backgroundTask {
|
||||
log.LogTrace "Post.findFullById"
|
||||
match! findById postId webLogId with
|
||||
| Some post ->
|
||||
let! withRevisions = appendPostRevisions post
|
||||
return Some withRevisions
|
||||
| None -> return None
|
||||
}
|
||||
|
||||
/// Delete a post by its ID for the given web log
|
||||
let delete postId webLogId = backgroundTask {
|
||||
log.LogTrace "Post.delete"
|
||||
match! postExists postId webLogId with
|
||||
| true ->
|
||||
let theId = PostId.toString postId
|
||||
do! Custom.nonQuery
|
||||
$"""DELETE FROM {Table.PostComment} WHERE {Query.whereDataContains "@criteria"};
|
||||
DELETE FROM {Table.Post} WHERE id = @id"""
|
||||
[ "@id", Sql.string theId; "@criteria", Query.jsonbDocParam {| PostId = theId |} ]
|
||||
return true
|
||||
| false -> return false
|
||||
}
|
||||
|
||||
/// Find the current permalink from a list of potential prior permalinks for the given web log
|
||||
let findCurrentPermalink permalinks webLogId = backgroundTask {
|
||||
log.LogTrace "Post.findCurrentPermalink"
|
||||
if List.isEmpty permalinks then return None
|
||||
else
|
||||
let linkSql, linkParam =
|
||||
arrayContains (nameof Post.empty.PriorPermalinks) Permalink.toString permalinks
|
||||
return!
|
||||
Custom.single
|
||||
$"""SELECT data ->> '{nameof Post.empty.Permalink}' AS permalink
|
||||
FROM {Table.Post}
|
||||
WHERE {Query.whereDataContains "@criteria"}
|
||||
AND {linkSql}""" [ webLogContains webLogId; linkParam ] Map.toPermalink
|
||||
}
|
||||
|
||||
/// Get all complete posts for the given web log
|
||||
let findFullByWebLog webLogId = backgroundTask {
|
||||
log.LogTrace "Post.findFullByWebLog"
|
||||
let! posts = Document.findByWebLog<Post> Table.Post webLogId
|
||||
let! revisions = Revisions.findByWebLog Table.PostRevision Table.Post PostId webLogId
|
||||
return
|
||||
posts
|
||||
|> List.map (fun it ->
|
||||
{ it with Revisions = revisions |> List.filter (fun r -> fst r = it.Id) |> List.map snd })
|
||||
}
|
||||
|
||||
/// Get a page of categorized posts for the given web log (excludes revisions)
|
||||
let findPageOfCategorizedPosts webLogId categoryIds pageNbr postsPerPage =
|
||||
log.LogTrace "Post.findPageOfCategorizedPosts"
|
||||
let catSql, catParam = arrayContains (nameof Post.empty.CategoryIds) CategoryId.toString categoryIds
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Post}
|
||||
AND {catSql}
|
||||
ORDER BY data ->> '{nameof Post.empty.PublishedOn}' DESC
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
[ "@criteria", Query.jsonbDocParam {| webLogDoc webLogId with Status = PostStatus.toString Published |}
|
||||
catParam
|
||||
] fromData<Post>
|
||||
|
||||
/// Get a page of posts for the given web log (excludes text and revisions)
|
||||
let findPageOfPosts webLogId pageNbr postsPerPage =
|
||||
log.LogTrace "Post.findPageOfPosts"
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Post}
|
||||
ORDER BY data ->> '{nameof Post.empty.PublishedOn}' DESC NULLS FIRST,
|
||||
data ->> '{nameof Post.empty.UpdatedOn}'
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
[ webLogContains webLogId ] postWithoutText
|
||||
|
||||
/// Get a page of published posts for the given web log (excludes revisions)
|
||||
let findPageOfPublishedPosts webLogId pageNbr postsPerPage =
|
||||
log.LogTrace "Post.findPageOfPublishedPosts"
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Post}
|
||||
ORDER BY data ->> '{nameof Post.empty.PublishedOn}' DESC
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
[ "@criteria", Query.jsonbDocParam {| webLogDoc webLogId with Status = PostStatus.toString Published |} ]
|
||||
fromData<Post>
|
||||
|
||||
/// Get a page of tagged posts for the given web log (excludes revisions and prior permalinks)
|
||||
let findPageOfTaggedPosts webLogId (tag : string) pageNbr postsPerPage =
|
||||
log.LogTrace "Post.findPageOfTaggedPosts"
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Post}
|
||||
AND data['{nameof Post.empty.Tags}'] @> @tag
|
||||
ORDER BY data ->> '{nameof Post.empty.PublishedOn}' DESC
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
[ "@criteria", Query.jsonbDocParam {| webLogDoc webLogId with Status = PostStatus.toString Published |}
|
||||
"@tag", Query.jsonbDocParam [| tag |]
|
||||
] fromData<Post>
|
||||
|
||||
/// Find the next newest and oldest post from a publish date for the given web log
|
||||
let findSurroundingPosts webLogId publishedOn = backgroundTask {
|
||||
log.LogTrace "Post.findSurroundingPosts"
|
||||
let queryParams () = [
|
||||
"@criteria", Query.jsonbDocParam {| webLogDoc webLogId with Status = PostStatus.toString Published |}
|
||||
"@publishedOn", Sql.string ((InstantPattern.General.Format publishedOn).Substring (0, 19))
|
||||
]
|
||||
let pubField = nameof Post.empty.PublishedOn
|
||||
let! older =
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Post}
|
||||
AND SUBSTR(data ->> '{pubField}', 1, 19) < @publishedOn
|
||||
ORDER BY data ->> '{pubField}' DESC
|
||||
LIMIT 1" (queryParams ()) fromData<Post>
|
||||
let! newer =
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.Post}
|
||||
AND SUBSTR(data ->> '{pubField}', 1, 19) > @publishedOn
|
||||
ORDER BY data ->> '{pubField}'
|
||||
LIMIT 1" (queryParams ()) fromData<Post>
|
||||
return List.tryHead older, List.tryHead newer
|
||||
}
|
||||
|
||||
/// Save a post
|
||||
let save (post : Post) = backgroundTask {
|
||||
log.LogTrace "Post.save"
|
||||
let! oldPost = findFullById post.Id post.WebLogId
|
||||
do! save Table.Post (PostId.toString post.Id) { post with Revisions = [] }
|
||||
do! updatePostRevisions post.Id (match oldPost with Some p -> p.Revisions | None -> []) post.Revisions
|
||||
}
|
||||
|
||||
/// Restore posts from a backup
|
||||
let restore posts = backgroundTask {
|
||||
log.LogTrace "Post.restore"
|
||||
let revisions = posts |> List.collect (fun p -> p.Revisions |> List.map (fun r -> p.Id, r))
|
||||
let! _ =
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.executeTransactionAsync [
|
||||
Query.insert Table.Post,
|
||||
posts
|
||||
|> List.map (fun post -> Query.docParameters (PostId.toString post.Id) { post with Revisions = [] })
|
||||
Revisions.insertSql Table.PostRevision,
|
||||
revisions |> List.map (fun (postId, rev) -> Revisions.revParams postId PostId.toString rev)
|
||||
]
|
||||
()
|
||||
}
|
||||
|
||||
/// Update prior permalinks for a post
|
||||
let updatePriorPermalinks postId webLogId permalinks = backgroundTask {
|
||||
log.LogTrace "Post.updatePriorPermalinks"
|
||||
match! postExists postId webLogId with
|
||||
| true ->
|
||||
do! Update.partialById Table.Post (PostId.toString postId) {| PriorPermalinks = permalinks |}
|
||||
return true
|
||||
| false -> return false
|
||||
}
|
||||
|
||||
interface IPostData with
|
||||
member _.Add post = save post
|
||||
member _.CountByStatus status webLogId = countByStatus status webLogId
|
||||
member _.Delete postId webLogId = delete postId webLogId
|
||||
member _.FindById postId webLogId = findById postId webLogId
|
||||
member _.FindByPermalink permalink webLogId = findByPermalink permalink webLogId
|
||||
member _.FindCurrentPermalink permalinks webLogId = findCurrentPermalink permalinks webLogId
|
||||
member _.FindFullById postId webLogId = findFullById postId webLogId
|
||||
member _.FindFullByWebLog webLogId = findFullByWebLog webLogId
|
||||
member _.FindPageOfCategorizedPosts webLogId categoryIds pageNbr postsPerPage =
|
||||
findPageOfCategorizedPosts webLogId categoryIds pageNbr postsPerPage
|
||||
member _.FindPageOfPosts webLogId pageNbr postsPerPage = findPageOfPosts webLogId pageNbr postsPerPage
|
||||
member _.FindPageOfPublishedPosts webLogId pageNbr postsPerPage =
|
||||
findPageOfPublishedPosts webLogId pageNbr postsPerPage
|
||||
member _.FindPageOfTaggedPosts webLogId tag pageNbr postsPerPage =
|
||||
findPageOfTaggedPosts webLogId tag pageNbr postsPerPage
|
||||
member _.FindSurroundingPosts webLogId publishedOn = findSurroundingPosts webLogId publishedOn
|
||||
member _.Restore posts = restore posts
|
||||
member _.Update post = save post
|
||||
member _.UpdatePriorPermalinks postId webLogId permalinks = updatePriorPermalinks postId webLogId permalinks
|
||||
70
src/MyWebLog.Data/Postgres/PostgresTagMapData.fs
Normal file
70
src/MyWebLog.Data/Postgres/PostgresTagMapData.fs
Normal file
@@ -0,0 +1,70 @@
|
||||
namespace MyWebLog.Data.Postgres
|
||||
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open Microsoft.Extensions.Logging
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
open Npgsql.FSharp
|
||||
|
||||
/// PostgreSQL myWebLog tag mapping data implementation
|
||||
type PostgresTagMapData (log : ILogger) =
|
||||
|
||||
/// Find a tag mapping by its ID for the given web log
|
||||
let findById tagMapId webLogId =
|
||||
log.LogTrace "TagMap.findById"
|
||||
Document.findByIdAndWebLog<TagMapId, TagMap> Table.TagMap tagMapId TagMapId.toString webLogId
|
||||
|
||||
/// Delete a tag mapping for the given web log
|
||||
let delete tagMapId webLogId = backgroundTask {
|
||||
log.LogTrace "TagMap.delete"
|
||||
let! exists = Document.existsByWebLog Table.TagMap tagMapId TagMapId.toString webLogId
|
||||
if exists then
|
||||
do! Delete.byId Table.TagMap (TagMapId.toString tagMapId)
|
||||
return true
|
||||
else return false
|
||||
}
|
||||
|
||||
/// Find a tag mapping by its URL value for the given web log
|
||||
let findByUrlValue (urlValue : string) webLogId =
|
||||
log.LogTrace "TagMap.findByUrlValue"
|
||||
Custom.single (selectWithCriteria Table.TagMap)
|
||||
[ "@criteria", Query.jsonbDocParam {| webLogDoc webLogId with UrlValue = urlValue |} ]
|
||||
fromData<TagMap>
|
||||
|
||||
/// Get all tag mappings for the given web log
|
||||
let findByWebLog webLogId =
|
||||
log.LogTrace "TagMap.findByWebLog"
|
||||
Custom.list $"{selectWithCriteria Table.TagMap} ORDER BY data ->> 'tag'" [ webLogContains webLogId ]
|
||||
fromData<TagMap>
|
||||
|
||||
/// Find any tag mappings in a list of tags for the given web log
|
||||
let findMappingForTags tags webLogId =
|
||||
log.LogTrace "TagMap.findMappingForTags"
|
||||
let tagSql, tagParam = arrayContains (nameof TagMap.empty.Tag) id tags
|
||||
Custom.list $"{selectWithCriteria Table.TagMap} AND {tagSql}" [ webLogContains webLogId; tagParam ]
|
||||
fromData<TagMap>
|
||||
|
||||
/// Save a tag mapping
|
||||
let save (tagMap : TagMap) =
|
||||
save Table.TagMap (TagMapId.toString tagMap.Id) tagMap
|
||||
|
||||
/// Restore tag mappings from a backup
|
||||
let restore (tagMaps : TagMap list) = backgroundTask {
|
||||
let! _ =
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.executeTransactionAsync [
|
||||
Query.insert Table.TagMap,
|
||||
tagMaps |> List.map (fun tagMap -> Query.docParameters (TagMapId.toString tagMap.Id) tagMap)
|
||||
]
|
||||
()
|
||||
}
|
||||
|
||||
interface ITagMapData with
|
||||
member _.Delete tagMapId webLogId = delete tagMapId webLogId
|
||||
member _.FindById tagMapId webLogId = findById tagMapId webLogId
|
||||
member _.FindByUrlValue urlValue webLogId = findByUrlValue urlValue webLogId
|
||||
member _.FindByWebLog webLogId = findByWebLog webLogId
|
||||
member _.FindMappingForTags tags webLogId = findMappingForTags tags webLogId
|
||||
member _.Save tagMap = save tagMap
|
||||
member _.Restore tagMaps = restore tagMaps
|
||||
117
src/MyWebLog.Data/Postgres/PostgresThemeData.fs
Normal file
117
src/MyWebLog.Data/Postgres/PostgresThemeData.fs
Normal file
@@ -0,0 +1,117 @@
|
||||
namespace MyWebLog.Data.Postgres
|
||||
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open Microsoft.Extensions.Logging
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
open Npgsql.FSharp
|
||||
|
||||
/// PostreSQL myWebLog theme data implementation
|
||||
type PostgresThemeData (log : ILogger) =
|
||||
|
||||
/// Clear out the template text from a theme
|
||||
let withoutTemplateText row =
|
||||
let theme = fromData<Theme> row
|
||||
{ theme with Templates = theme.Templates |> List.map (fun template -> { template with Text = "" }) }
|
||||
|
||||
/// Retrieve all themes (except 'admin'; excludes template text)
|
||||
let all () =
|
||||
log.LogTrace "Theme.all"
|
||||
Custom.list $"{Query.selectFromTable Table.Theme} WHERE id <> 'admin' ORDER BY id" [] withoutTemplateText
|
||||
|
||||
/// Does a given theme exist?
|
||||
let exists themeId =
|
||||
log.LogTrace "Theme.exists"
|
||||
Exists.byId Table.Theme (ThemeId.toString themeId)
|
||||
|
||||
/// Find a theme by its ID
|
||||
let findById themeId =
|
||||
log.LogTrace "Theme.findById"
|
||||
Find.byId<Theme> Table.Theme (ThemeId.toString themeId)
|
||||
|
||||
/// Find a theme by its ID (excludes the text of templates)
|
||||
let findByIdWithoutText themeId =
|
||||
log.LogTrace "Theme.findByIdWithoutText"
|
||||
Custom.single (Query.Find.byId Table.Theme) [ "@id", Sql.string (ThemeId.toString themeId) ] withoutTemplateText
|
||||
|
||||
/// Delete a theme by its ID
|
||||
let delete themeId = backgroundTask {
|
||||
log.LogTrace "Theme.delete"
|
||||
match! exists themeId with
|
||||
| true ->
|
||||
do! Delete.byId Table.Theme (ThemeId.toString themeId)
|
||||
return true
|
||||
| false -> return false
|
||||
}
|
||||
|
||||
/// Save a theme
|
||||
let save (theme : Theme) =
|
||||
log.LogTrace "Theme.save"
|
||||
save Table.Theme (ThemeId.toString theme.Id) theme
|
||||
|
||||
interface IThemeData with
|
||||
member _.All () = all ()
|
||||
member _.Delete themeId = delete themeId
|
||||
member _.Exists themeId = exists themeId
|
||||
member _.FindById themeId = findById themeId
|
||||
member _.FindByIdWithoutText themeId = findByIdWithoutText themeId
|
||||
member _.Save theme = save theme
|
||||
|
||||
|
||||
/// PostreSQL myWebLog theme data implementation
|
||||
type PostgresThemeAssetData (log : ILogger) =
|
||||
|
||||
/// Get all theme assets (excludes data)
|
||||
let all () =
|
||||
log.LogTrace "ThemeAsset.all"
|
||||
Custom.list $"SELECT theme_id, path, updated_on FROM {Table.ThemeAsset}" [] (Map.toThemeAsset false)
|
||||
|
||||
/// Delete all assets for the given theme
|
||||
let deleteByTheme themeId =
|
||||
log.LogTrace "ThemeAsset.deleteByTheme"
|
||||
Custom.nonQuery $"DELETE FROM {Table.ThemeAsset} WHERE theme_id = @themeId"
|
||||
[ "@themeId", Sql.string (ThemeId.toString themeId) ]
|
||||
|
||||
/// Find a theme asset by its ID
|
||||
let findById assetId =
|
||||
log.LogTrace "ThemeAsset.findById"
|
||||
let (ThemeAssetId (ThemeId themeId, path)) = assetId
|
||||
Custom.single $"SELECT * FROM {Table.ThemeAsset} WHERE theme_id = @themeId AND path = @path"
|
||||
[ "@themeId", Sql.string themeId; "@path", Sql.string path ] (Map.toThemeAsset true)
|
||||
|
||||
/// Get theme assets for the given theme (excludes data)
|
||||
let findByTheme themeId =
|
||||
log.LogTrace "ThemeAsset.findByTheme"
|
||||
Custom.list $"SELECT theme_id, path, updated_on FROM {Table.ThemeAsset} WHERE theme_id = @themeId"
|
||||
[ "@themeId", Sql.string (ThemeId.toString themeId) ] (Map.toThemeAsset false)
|
||||
|
||||
/// Get theme assets for the given theme
|
||||
let findByThemeWithData themeId =
|
||||
log.LogTrace "ThemeAsset.findByThemeWithData"
|
||||
Custom.list $"SELECT * FROM {Table.ThemeAsset} WHERE theme_id = @themeId"
|
||||
[ "@themeId", Sql.string (ThemeId.toString themeId) ] (Map.toThemeAsset true)
|
||||
|
||||
/// Save a theme asset
|
||||
let save (asset : ThemeAsset) =
|
||||
log.LogTrace "ThemeAsset.save"
|
||||
let (ThemeAssetId (ThemeId themeId, path)) = asset.Id
|
||||
Custom.nonQuery
|
||||
$"INSERT INTO {Table.ThemeAsset} (
|
||||
theme_id, path, updated_on, data
|
||||
) VALUES (
|
||||
@themeId, @path, @updatedOn, @data
|
||||
) ON CONFLICT (theme_id, path) DO UPDATE
|
||||
SET updated_on = EXCLUDED.updated_on,
|
||||
data = EXCLUDED.data"
|
||||
[ "@themeId", Sql.string themeId
|
||||
"@path", Sql.string path
|
||||
"@data", Sql.bytea asset.Data
|
||||
typedParam "updatedOn" asset.UpdatedOn ]
|
||||
|
||||
interface IThemeAssetData with
|
||||
member _.All () = all ()
|
||||
member _.DeleteByTheme themeId = deleteByTheme themeId
|
||||
member _.FindById assetId = findById assetId
|
||||
member _.FindByTheme themeId = findByTheme themeId
|
||||
member _.FindByThemeWithData themeId = findByThemeWithData themeId
|
||||
member _.Save asset = save asset
|
||||
83
src/MyWebLog.Data/Postgres/PostgresUploadData.fs
Normal file
83
src/MyWebLog.Data/Postgres/PostgresUploadData.fs
Normal file
@@ -0,0 +1,83 @@
|
||||
namespace MyWebLog.Data.Postgres
|
||||
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open Microsoft.Extensions.Logging
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
open Npgsql.FSharp
|
||||
|
||||
/// PostgreSQL myWebLog uploaded file data implementation
|
||||
type PostgresUploadData (log : ILogger) =
|
||||
|
||||
/// The INSERT statement for an uploaded file
|
||||
let upInsert = $"
|
||||
INSERT INTO {Table.Upload} (
|
||||
id, web_log_id, path, updated_on, data
|
||||
) VALUES (
|
||||
@id, @webLogId, @path, @updatedOn, @data
|
||||
)"
|
||||
|
||||
/// Parameters for adding an uploaded file
|
||||
let upParams (upload : Upload) = [
|
||||
webLogIdParam upload.WebLogId
|
||||
typedParam "updatedOn" upload.UpdatedOn
|
||||
"@id", Sql.string (UploadId.toString upload.Id)
|
||||
"@path", Sql.string (Permalink.toString upload.Path)
|
||||
"@data", Sql.bytea upload.Data
|
||||
]
|
||||
|
||||
/// Save an uploaded file
|
||||
let add upload =
|
||||
log.LogTrace "Upload.add"
|
||||
Custom.nonQuery upInsert (upParams upload)
|
||||
|
||||
/// Delete an uploaded file by its ID
|
||||
let delete uploadId webLogId = backgroundTask {
|
||||
log.LogTrace "Upload.delete"
|
||||
let idParam = [ "@id", Sql.string (UploadId.toString uploadId) ]
|
||||
let! path =
|
||||
Custom.single $"SELECT path FROM {Table.Upload} WHERE id = @id AND web_log_id = @webLogId"
|
||||
(webLogIdParam webLogId :: idParam) (fun row -> row.string "path")
|
||||
if Option.isSome path then
|
||||
do! Custom.nonQuery (Query.Delete.byId Table.Upload) idParam
|
||||
return Ok path.Value
|
||||
else return Error $"""Upload ID {UploadId.toString uploadId} not found"""
|
||||
}
|
||||
|
||||
/// Find an uploaded file by its path for the given web log
|
||||
let findByPath path webLogId =
|
||||
log.LogTrace "Upload.findByPath"
|
||||
Custom.single $"SELECT * FROM {Table.Upload} WHERE web_log_id = @webLogId AND path = @path"
|
||||
[ webLogIdParam webLogId; "@path", Sql.string path ] (Map.toUpload true)
|
||||
|
||||
/// Find all uploaded files for the given web log (excludes data)
|
||||
let findByWebLog webLogId =
|
||||
log.LogTrace "Upload.findByWebLog"
|
||||
Custom.list $"SELECT id, web_log_id, path, updated_on FROM {Table.Upload} WHERE web_log_id = @webLogId"
|
||||
[ webLogIdParam webLogId ] (Map.toUpload false)
|
||||
|
||||
/// Find all uploaded files for the given web log
|
||||
let findByWebLogWithData webLogId =
|
||||
log.LogTrace "Upload.findByWebLogWithData"
|
||||
Custom.list $"SELECT * FROM {Table.Upload} WHERE web_log_id = @webLogId" [ webLogIdParam webLogId ]
|
||||
(Map.toUpload true)
|
||||
|
||||
/// Restore uploads from a backup
|
||||
let restore uploads = backgroundTask {
|
||||
log.LogTrace "Upload.restore"
|
||||
for batch in uploads |> List.chunkBySize 5 do
|
||||
let! _ =
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.executeTransactionAsync [ upInsert, batch |> List.map upParams ]
|
||||
()
|
||||
}
|
||||
|
||||
interface IUploadData with
|
||||
member _.Add upload = add upload
|
||||
member _.Delete uploadId webLogId = delete uploadId webLogId
|
||||
member _.FindByPath path webLogId = findByPath path webLogId
|
||||
member _.FindByWebLog webLogId = findByWebLog webLogId
|
||||
member _.FindByWebLogWithData webLogId = findByWebLogWithData webLogId
|
||||
member _.Restore uploads = restore uploads
|
||||
|
||||
68
src/MyWebLog.Data/Postgres/PostgresWebLogData.fs
Normal file
68
src/MyWebLog.Data/Postgres/PostgresWebLogData.fs
Normal file
@@ -0,0 +1,68 @@
|
||||
namespace MyWebLog.Data.Postgres
|
||||
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open Microsoft.Extensions.Logging
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
|
||||
/// PostgreSQL myWebLog web log data implementation
|
||||
type PostgresWebLogData (log : ILogger) =
|
||||
|
||||
/// Add a web log
|
||||
let add (webLog : WebLog) =
|
||||
log.LogTrace "WebLog.add"
|
||||
insert Table.WebLog (WebLogId.toString webLog.Id) webLog
|
||||
|
||||
/// Retrieve all web logs
|
||||
let all () =
|
||||
log.LogTrace "WebLog.all"
|
||||
Find.all<WebLog> Table.WebLog
|
||||
|
||||
/// Delete a web log by its ID
|
||||
let delete webLogId =
|
||||
log.LogTrace "WebLog.delete"
|
||||
Custom.nonQuery
|
||||
$"""DELETE FROM {Table.PostComment}
|
||||
WHERE data ->> '{nameof Comment.empty.PostId}' IN
|
||||
(SELECT id FROM {Table.Post} WHERE {Query.whereDataContains "@criteria"});
|
||||
{Query.Delete.byContains Table.Post};
|
||||
{Query.Delete.byContains Table.Page};
|
||||
{Query.Delete.byContains Table.Category};
|
||||
{Query.Delete.byContains Table.TagMap};
|
||||
{Query.Delete.byContains Table.WebLogUser};
|
||||
DELETE FROM {Table.Upload} WHERE web_log_id = @webLogId;
|
||||
DELETE FROM {Table.WebLog} WHERE id = @webLogId"""
|
||||
[ webLogIdParam webLogId; webLogContains webLogId ]
|
||||
|
||||
/// Find a web log by its host (URL base)
|
||||
let findByHost (url : string) =
|
||||
log.LogTrace "WebLog.findByHost"
|
||||
Custom.single (selectWithCriteria Table.WebLog) [ "@criteria", Query.jsonbDocParam {| UrlBase = url |} ]
|
||||
fromData<WebLog>
|
||||
|
||||
/// Find a web log by its ID
|
||||
let findById webLogId =
|
||||
log.LogTrace "WebLog.findById"
|
||||
Find.byId<WebLog> Table.WebLog (WebLogId.toString webLogId)
|
||||
|
||||
/// Update settings for a web log
|
||||
let updateSettings (webLog : WebLog) =
|
||||
log.LogTrace "WebLog.updateSettings"
|
||||
Update.full Table.WebLog (WebLogId.toString webLog.Id) webLog
|
||||
|
||||
/// Update RSS options for a web log
|
||||
let updateRssOptions (webLog : WebLog) = backgroundTask {
|
||||
log.LogTrace "WebLog.updateRssOptions"
|
||||
match! findById webLog.Id with
|
||||
| Some _ -> do! Update.partialById Table.WebLog (WebLogId.toString webLog.Id) {| Rss = webLog.Rss |}
|
||||
| None -> ()
|
||||
}
|
||||
|
||||
interface IWebLogData with
|
||||
member _.Add webLog = add webLog
|
||||
member _.All () = all ()
|
||||
member _.Delete webLogId = delete webLogId
|
||||
member _.FindByHost url = findByHost url
|
||||
member _.FindById webLogId = findById webLogId
|
||||
member _.UpdateSettings webLog = updateSettings webLog
|
||||
member _.UpdateRssOptions webLog = updateRssOptions webLog
|
||||
100
src/MyWebLog.Data/Postgres/PostgresWebLogUserData.fs
Normal file
100
src/MyWebLog.Data/Postgres/PostgresWebLogUserData.fs
Normal file
@@ -0,0 +1,100 @@
|
||||
namespace MyWebLog.Data.Postgres
|
||||
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open Microsoft.Extensions.Logging
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
open Npgsql.FSharp
|
||||
|
||||
/// PostgreSQL myWebLog user data implementation
|
||||
type PostgresWebLogUserData (log : ILogger) =
|
||||
|
||||
/// Find a user by their ID for the given web log
|
||||
let findById userId webLogId =
|
||||
log.LogTrace "WebLogUser.findById"
|
||||
Document.findByIdAndWebLog<WebLogUserId, WebLogUser> Table.WebLogUser userId WebLogUserId.toString webLogId
|
||||
|
||||
/// Delete a user if they have no posts or pages
|
||||
let delete userId webLogId = backgroundTask {
|
||||
log.LogTrace "WebLogUser.delete"
|
||||
match! findById userId webLogId with
|
||||
| Some _ ->
|
||||
let criteria = Query.whereDataContains "@criteria"
|
||||
let! isAuthor =
|
||||
Custom.scalar
|
||||
$" SELECT ( EXISTS (SELECT 1 FROM {Table.Page} WHERE {criteria}
|
||||
OR EXISTS (SELECT 1 FROM {Table.Post} WHERE {criteria})
|
||||
) AS {existsName}"
|
||||
[ "@criteria", Query.jsonbDocParam {| AuthorId = userId |} ] Map.toExists
|
||||
if isAuthor then
|
||||
return Error "User has pages or posts; cannot delete"
|
||||
else
|
||||
do! Delete.byId Table.WebLogUser (WebLogUserId.toString userId)
|
||||
return Ok true
|
||||
| None -> return Error "User does not exist"
|
||||
}
|
||||
|
||||
/// Find a user by their e-mail address for the given web log
|
||||
let findByEmail (email : string) webLogId =
|
||||
log.LogTrace "WebLogUser.findByEmail"
|
||||
Custom.single (selectWithCriteria Table.WebLogUser)
|
||||
[ "@criteria", Query.jsonbDocParam {| webLogDoc webLogId with Email = email |} ]
|
||||
fromData<WebLogUser>
|
||||
|
||||
/// Get all users for the given web log
|
||||
let findByWebLog webLogId =
|
||||
log.LogTrace "WebLogUser.findByWebLog"
|
||||
Custom.list
|
||||
$"{selectWithCriteria Table.WebLogUser} ORDER BY LOWER(data->>'{nameof WebLogUser.empty.PreferredName}')"
|
||||
[ webLogContains webLogId ] fromData<WebLogUser>
|
||||
|
||||
/// Find the names of users by their IDs for the given web log
|
||||
let findNames webLogId userIds = backgroundTask {
|
||||
log.LogTrace "WebLogUser.findNames"
|
||||
let idSql, idParams = inClause "AND id" "id" WebLogUserId.toString userIds
|
||||
let! users =
|
||||
Custom.list $"{selectWithCriteria Table.WebLogUser} {idSql}" (webLogContains webLogId :: idParams)
|
||||
fromData<WebLogUser>
|
||||
return
|
||||
users
|
||||
|> List.map (fun u -> { Name = WebLogUserId.toString u.Id; Value = WebLogUser.displayName u })
|
||||
}
|
||||
|
||||
/// Restore users from a backup
|
||||
let restore (users : WebLogUser list) = backgroundTask {
|
||||
log.LogTrace "WebLogUser.restore"
|
||||
let! _ =
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.executeTransactionAsync [
|
||||
Query.insert Table.WebLogUser,
|
||||
users |> List.map (fun user -> Query.docParameters (WebLogUserId.toString user.Id) user)
|
||||
]
|
||||
()
|
||||
}
|
||||
|
||||
/// Set a user's last seen date/time to now
|
||||
let setLastSeen userId webLogId = backgroundTask {
|
||||
log.LogTrace "WebLogUser.setLastSeen"
|
||||
match! Document.existsByWebLog Table.WebLogUser userId WebLogUserId.toString webLogId with
|
||||
| true ->
|
||||
do! Update.partialById Table.WebLogUser (WebLogUserId.toString userId) {| LastSeenOn = Some (Noda.now ()) |}
|
||||
| false -> ()
|
||||
}
|
||||
|
||||
/// Save a user
|
||||
let save (user : WebLogUser) =
|
||||
log.LogTrace "WebLogUser.save"
|
||||
save Table.WebLogUser (WebLogUserId.toString user.Id) user
|
||||
|
||||
interface IWebLogUserData with
|
||||
member _.Add user = save user
|
||||
member _.Delete userId webLogId = delete userId webLogId
|
||||
member _.FindByEmail email webLogId = findByEmail email webLogId
|
||||
member _.FindById userId webLogId = findById userId webLogId
|
||||
member _.FindByWebLog webLogId = findByWebLog webLogId
|
||||
member _.FindNames webLogId userIds = findNames webLogId userIds
|
||||
member _.Restore users = restore users
|
||||
member _.SetLastSeen userId webLogId = setLastSeen userId webLogId
|
||||
member _.Update user = save user
|
||||
|
||||
199
src/MyWebLog.Data/PostgresData.fs
Normal file
199
src/MyWebLog.Data/PostgresData.fs
Normal file
@@ -0,0 +1,199 @@
|
||||
namespace MyWebLog.Data
|
||||
|
||||
open BitBadger.Npgsql.Documents
|
||||
open BitBadger.Npgsql.FSharp.Documents
|
||||
open Microsoft.Extensions.Logging
|
||||
open MyWebLog
|
||||
open MyWebLog.Data.Postgres
|
||||
open Newtonsoft.Json
|
||||
open Npgsql
|
||||
open Npgsql.FSharp
|
||||
|
||||
/// Data implementation for PostgreSQL
|
||||
type PostgresData (source : NpgsqlDataSource, log : ILogger<PostgresData>, ser : JsonSerializer) =
|
||||
|
||||
/// Create any needed tables
|
||||
let ensureTables () = backgroundTask {
|
||||
// Set up the PostgreSQL document store
|
||||
Configuration.useDataSource source
|
||||
Configuration.useSerializer
|
||||
{ new IDocumentSerializer with
|
||||
member _.Serialize<'T> (it : 'T) : string = Utils.serialize ser it
|
||||
member _.Deserialize<'T> (it : string) : 'T = Utils.deserialize ser it
|
||||
}
|
||||
|
||||
let! tables =
|
||||
Sql.fromDataSource source
|
||||
|> Sql.query "SELECT tablename FROM pg_tables WHERE schemaname = 'public'"
|
||||
|> Sql.executeAsync (fun row -> row.string "tablename")
|
||||
let needsTable table = not (List.contains table tables)
|
||||
// Create a document table
|
||||
let mutable isNew = false
|
||||
|
||||
let sql = seq {
|
||||
// Theme tables
|
||||
if needsTable Table.Theme then
|
||||
isNew <- true
|
||||
Definition.createTable Table.Theme
|
||||
if needsTable Table.ThemeAsset then
|
||||
$"CREATE TABLE {Table.ThemeAsset} (
|
||||
theme_id TEXT NOT NULL REFERENCES {Table.Theme} (id) ON DELETE CASCADE,
|
||||
path TEXT NOT NULL,
|
||||
updated_on TIMESTAMPTZ NOT NULL,
|
||||
data BYTEA NOT NULL,
|
||||
PRIMARY KEY (theme_id, path))"
|
||||
|
||||
// Web log table
|
||||
if needsTable Table.WebLog then
|
||||
Definition.createTable Table.WebLog
|
||||
Definition.createIndex Table.WebLog Optimized
|
||||
|
||||
// Category table
|
||||
if needsTable Table.Category then
|
||||
Definition.createTable Table.Category
|
||||
Definition.createIndex Table.Category Optimized
|
||||
|
||||
// Web log user table
|
||||
if needsTable Table.WebLogUser then
|
||||
Definition.createTable Table.WebLogUser
|
||||
Definition.createIndex Table.WebLogUser Optimized
|
||||
|
||||
// Page tables
|
||||
if needsTable Table.Page then
|
||||
Definition.createTable Table.Page
|
||||
$"CREATE INDEX page_web_log_idx ON {Table.Page} ((data ->> '{nameof Page.empty.WebLogId}'))"
|
||||
$"CREATE INDEX page_author_idx ON {Table.Page} ((data ->> '{nameof Page.empty.AuthorId}'))"
|
||||
$"CREATE INDEX page_permalink_idx ON {Table.Page}
|
||||
((data ->> '{nameof Page.empty.WebLogId}'), (data ->> '{nameof Page.empty.Permalink}'))"
|
||||
if needsTable Table.PageRevision then
|
||||
$"CREATE TABLE {Table.PageRevision} (
|
||||
page_id TEXT NOT NULL REFERENCES {Table.Page} (id) ON DELETE CASCADE,
|
||||
as_of TIMESTAMPTZ NOT NULL,
|
||||
revision_text TEXT NOT NULL,
|
||||
PRIMARY KEY (page_id, as_of))"
|
||||
|
||||
// Post tables
|
||||
if needsTable Table.Post then
|
||||
Definition.createTable Table.Post
|
||||
$"CREATE INDEX post_web_log_idx ON {Table.Post} ((data ->> '{nameof Post.empty.WebLogId}'))"
|
||||
$"CREATE INDEX post_author_idx ON {Table.Post} ((data ->> '{nameof Post.empty.AuthorId}'))"
|
||||
$"CREATE INDEX post_status_idx ON {Table.Post}
|
||||
((data ->> '{nameof Post.empty.WebLogId}'), (data ->> '{nameof Post.empty.Status}'),
|
||||
(data ->> '{nameof Post.empty.UpdatedOn}'))"
|
||||
$"CREATE INDEX post_permalink_idx ON {Table.Post}
|
||||
((data ->> '{nameof Post.empty.WebLogId}'), (data ->> '{nameof Post.empty.Permalink}'))"
|
||||
$"CREATE INDEX post_category_idx ON {Table.Post} USING GIN ((data['{nameof Post.empty.CategoryIds}']))"
|
||||
$"CREATE INDEX post_tag_idx ON {Table.Post} USING GIN ((data['{nameof Post.empty.Tags}']))"
|
||||
if needsTable Table.PostRevision then
|
||||
$"CREATE TABLE {Table.PostRevision} (
|
||||
post_id TEXT NOT NULL REFERENCES {Table.Post} (id) ON DELETE CASCADE,
|
||||
as_of TIMESTAMPTZ NOT NULL,
|
||||
revision_text TEXT NOT NULL,
|
||||
PRIMARY KEY (post_id, as_of))"
|
||||
if needsTable Table.PostComment then
|
||||
Definition.createTable Table.PostComment
|
||||
$"CREATE INDEX post_comment_post_idx ON {Table.PostComment}
|
||||
((data ->> '{nameof Comment.empty.PostId}'))"
|
||||
|
||||
// Tag map table
|
||||
if needsTable Table.TagMap then
|
||||
Definition.createTable Table.TagMap
|
||||
Definition.createIndex Table.TagMap Optimized
|
||||
|
||||
// Uploaded file table
|
||||
if needsTable Table.Upload then
|
||||
$"CREATE TABLE {Table.Upload} (
|
||||
id TEXT NOT NULL PRIMARY KEY,
|
||||
web_log_id TEXT NOT NULL REFERENCES {Table.WebLog} (id),
|
||||
path TEXT NOT NULL,
|
||||
updated_on TIMESTAMPTZ NOT NULL,
|
||||
data BYTEA NOT NULL)"
|
||||
$"CREATE INDEX upload_web_log_idx ON {Table.Upload} (web_log_id)"
|
||||
$"CREATE INDEX upload_path_idx ON {Table.Upload} (web_log_id, path)"
|
||||
|
||||
// Database version table
|
||||
if needsTable Table.DbVersion then
|
||||
$"CREATE TABLE {Table.DbVersion} (id TEXT NOT NULL PRIMARY KEY)"
|
||||
$"INSERT INTO {Table.DbVersion} VALUES ('{Utils.currentDbVersion}')"
|
||||
}
|
||||
|
||||
Sql.fromDataSource source
|
||||
|> Sql.executeTransactionAsync
|
||||
(sql
|
||||
|> Seq.map (fun s ->
|
||||
let parts = s.Replace(" IF NOT EXISTS", "", System.StringComparison.OrdinalIgnoreCase).Split ' '
|
||||
if parts[1].ToLowerInvariant () = "table" then
|
||||
log.LogInformation $"Creating {parts[2]} table..."
|
||||
s, [ [] ])
|
||||
|> List.ofSeq)
|
||||
|> Async.AwaitTask
|
||||
|> Async.RunSynchronously
|
||||
|> ignore
|
||||
}
|
||||
|
||||
/// Set a specific database version
|
||||
let setDbVersion version =
|
||||
Custom.nonQuery $"DELETE FROM db_version; INSERT INTO db_version VALUES ('%s{version}')" []
|
||||
|
||||
/// Migrate from v2-rc2 to v2 (manual migration required)
|
||||
let migrateV2Rc2ToV2 () = backgroundTask {
|
||||
Utils.logMigrationStep log "v2-rc2 to v2" "Requires user action"
|
||||
|
||||
let! webLogs =
|
||||
Configuration.dataSource ()
|
||||
|> Sql.fromDataSource
|
||||
|> Sql.query $"SELECT url_base, slug FROM {Table.WebLog}"
|
||||
|> Sql.executeAsync (fun row -> row.string "url_base", row.string "slug")
|
||||
|
||||
[ "** MANUAL DATABASE UPGRADE REQUIRED **"; ""
|
||||
"The data structure for PostgreSQL changed significantly between v2-rc2 and v2."
|
||||
"To migrate your data:"
|
||||
" - Use a v2-rc2 executable to back up each web log"
|
||||
" - Drop all tables from the database"
|
||||
" - Use this executable to restore each backup"; ""
|
||||
"Commands to back up all web logs:"
|
||||
yield! webLogs |> List.map (fun (url, slug) -> sprintf "./myWebLog backup %s v2-rc2.%s.json" url slug)
|
||||
]
|
||||
|> String.concat "\n"
|
||||
|> log.LogWarning
|
||||
|
||||
log.LogCritical "myWebLog will now exit"
|
||||
exit 1
|
||||
}
|
||||
|
||||
/// Do required data migration between versions
|
||||
let migrate version = backgroundTask {
|
||||
match version with
|
||||
| Some "v2" -> ()
|
||||
| Some "v2-rc2" -> do! migrateV2Rc2ToV2 ()
|
||||
// Future versions will be inserted here
|
||||
| Some _
|
||||
| None ->
|
||||
log.LogWarning $"Unknown database version; assuming {Utils.currentDbVersion}"
|
||||
do! setDbVersion Utils.currentDbVersion
|
||||
}
|
||||
|
||||
interface IData with
|
||||
|
||||
member _.Category = PostgresCategoryData log
|
||||
member _.Page = PostgresPageData log
|
||||
member _.Post = PostgresPostData log
|
||||
member _.TagMap = PostgresTagMapData log
|
||||
member _.Theme = PostgresThemeData log
|
||||
member _.ThemeAsset = PostgresThemeAssetData log
|
||||
member _.Upload = PostgresUploadData log
|
||||
member _.WebLog = PostgresWebLogData log
|
||||
member _.WebLogUser = PostgresWebLogUserData log
|
||||
|
||||
member _.Serializer = ser
|
||||
|
||||
member _.StartUp () = backgroundTask {
|
||||
log.LogTrace "PostgresData.StartUp"
|
||||
do! ensureTables ()
|
||||
|
||||
let! version = Custom.single "SELECT id FROM db_version" [] (fun row -> row.string "id")
|
||||
match version with
|
||||
| Some v when v = Utils.currentDbVersion -> ()
|
||||
| Some _
|
||||
| None -> do! migrate version
|
||||
}
|
||||
@@ -5,7 +5,6 @@ open MyWebLog
|
||||
open RethinkDb.Driver
|
||||
|
||||
/// Functions to assist with retrieving data
|
||||
[<AutoOpen>]
|
||||
module private RethinkHelpers =
|
||||
|
||||
/// Table names
|
||||
@@ -18,6 +17,9 @@ module private RethinkHelpers =
|
||||
/// The comment table
|
||||
let Comment = "Comment"
|
||||
|
||||
/// The database version table
|
||||
let DbVersion = "DbVersion"
|
||||
|
||||
/// The page table
|
||||
let Page = "Page"
|
||||
|
||||
@@ -43,7 +45,7 @@ module private RethinkHelpers =
|
||||
let WebLogUser = "WebLogUser"
|
||||
|
||||
/// A list of all tables
|
||||
let all = [ Category; Comment; Page; Post; TagMap; Theme; ThemeAsset; Upload; WebLog; WebLogUser ]
|
||||
let all = [ Category; Comment; DbVersion; Page; Post; TagMap; Theme; ThemeAsset; Upload; WebLog; WebLogUser ]
|
||||
|
||||
|
||||
/// Index names for indexes not on a data item's name
|
||||
@@ -87,6 +89,7 @@ open System
|
||||
open Microsoft.Extensions.Logging
|
||||
open MyWebLog.ViewModels
|
||||
open RethinkDb.Driver.FSharp
|
||||
open RethinkHelpers
|
||||
|
||||
/// RethinkDB implementation of data functions for myWebLog
|
||||
type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<RethinkDbData>) =
|
||||
@@ -188,6 +191,48 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
|
||||
write; withRetryDefault; ignoreResult conn
|
||||
}
|
||||
|
||||
/// Set a specific database version
|
||||
let setDbVersion (version : string) = backgroundTask {
|
||||
do! rethink {
|
||||
withTable Table.DbVersion
|
||||
delete
|
||||
write; withRetryOnce; ignoreResult conn
|
||||
}
|
||||
do! rethink {
|
||||
withTable Table.DbVersion
|
||||
insert {| Id = version |}
|
||||
write; withRetryOnce; ignoreResult conn
|
||||
}
|
||||
}
|
||||
|
||||
/// Migrate from v2-rc1 to v2-rc2
|
||||
let migrateV2Rc1ToV2Rc2 () = backgroundTask {
|
||||
let logStep = Utils.logMigrationStep log "v2-rc1 to v2-rc2"
|
||||
logStep "**IMPORTANT**"
|
||||
logStep "See release notes about required backup/restoration for RethinkDB."
|
||||
logStep "If there is an error immediately below this message, this is why."
|
||||
logStep "Setting database version to v2-rc2"
|
||||
do! setDbVersion "v2-rc2"
|
||||
}
|
||||
|
||||
/// Migrate from v2-rc2 to v2
|
||||
let migrateV2Rc2ToV2 () = backgroundTask {
|
||||
Utils.logMigrationStep log "v2-rc2 to v2" "Setting database version; no migration required"
|
||||
do! setDbVersion "v2"
|
||||
}
|
||||
|
||||
/// Migrate data between versions
|
||||
let migrate version = backgroundTask {
|
||||
match version with
|
||||
| Some v when v = "v2" -> ()
|
||||
| Some v when v = "v2-rc2" -> do! migrateV2Rc2ToV2 ()
|
||||
| Some v when v = "v2-rc1" -> do! migrateV2Rc1ToV2Rc2 ()
|
||||
| Some _
|
||||
| None ->
|
||||
log.LogWarning $"Unknown database version; assuming {Utils.currentDbVersion}"
|
||||
do! setDbVersion Utils.currentDbVersion
|
||||
}
|
||||
|
||||
/// The connection for this instance
|
||||
member _.Conn = conn
|
||||
|
||||
@@ -1079,7 +1124,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
|
||||
do! rethink {
|
||||
withTable Table.WebLogUser
|
||||
get userId
|
||||
update [ nameof WebLogUser.empty.LastSeenOn, DateTime.UtcNow :> obj ]
|
||||
update [ nameof WebLogUser.empty.LastSeenOn, Noda.now () :> obj ]
|
||||
write; withRetryOnce; ignoreResult conn
|
||||
}
|
||||
| None -> ()
|
||||
@@ -1094,7 +1139,6 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
|
||||
nameof user.LastName, user.LastName
|
||||
nameof user.PreferredName, user.PreferredName
|
||||
nameof user.PasswordHash, user.PasswordHash
|
||||
nameof user.Salt, user.Salt
|
||||
nameof user.Url, user.Url
|
||||
nameof user.AccessLevel, user.AccessLevel
|
||||
]
|
||||
@@ -1102,6 +1146,9 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
|
||||
}
|
||||
}
|
||||
|
||||
member _.Serializer =
|
||||
Net.Converter.Serializer
|
||||
|
||||
member _.StartUp () = backgroundTask {
|
||||
let! dbs = rethink<string list> { dbList; result; withRetryOnce conn }
|
||||
if not (dbs |> List.contains config.Database) then
|
||||
@@ -1114,6 +1161,14 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
|
||||
log.LogInformation $"Creating table {tbl}..."
|
||||
do! rethink { tableCreate tbl [ PrimaryKey "Id" ]; write; withRetryOnce; ignoreResult conn }
|
||||
|
||||
if not (List.contains Table.DbVersion tables) then
|
||||
// Version table added in v2-rc2; this will flag that migration to be run
|
||||
do! rethink {
|
||||
withTable Table.DbVersion
|
||||
insert {| Id = "v2-rc1" |}
|
||||
write; withRetryOnce; ignoreResult conn
|
||||
}
|
||||
|
||||
do! ensureIndexes Table.Category [ nameof Category.empty.WebLogId ]
|
||||
do! ensureIndexes Table.Comment [ nameof Comment.empty.PostId ]
|
||||
do! ensureIndexes Table.Page [ nameof Page.empty.WebLogId; nameof Page.empty.AuthorId ]
|
||||
@@ -1122,4 +1177,13 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
|
||||
do! ensureIndexes Table.Upload []
|
||||
do! ensureIndexes Table.WebLog [ nameof WebLog.empty.UrlBase ]
|
||||
do! ensureIndexes Table.WebLogUser [ nameof WebLogUser.empty.WebLogId ]
|
||||
|
||||
let! version = rethink<{| Id : string |} list> {
|
||||
withTable Table.DbVersion
|
||||
limit 1
|
||||
result; withRetryOnce conn
|
||||
}
|
||||
match List.tryHead version with
|
||||
| Some v when v.Id = "v2-rc2" -> ()
|
||||
| it -> do! migrate (it |> Option.map (fun x -> x.Id))
|
||||
}
|
||||
|
||||
@@ -5,6 +5,8 @@ module MyWebLog.Data.SQLite.Helpers
|
||||
open System
|
||||
open Microsoft.Data.Sqlite
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
open NodaTime.Text
|
||||
|
||||
/// Run a command that returns a count
|
||||
let count (cmd : SqliteCommand) = backgroundTask {
|
||||
@@ -12,23 +14,6 @@ let count (cmd : SqliteCommand) = backgroundTask {
|
||||
return int (it :?> int64)
|
||||
}
|
||||
|
||||
/// Get lists of items removed from and added to the given lists
|
||||
let diffLists<'T, 'U when 'U : equality> oldItems newItems (f : 'T -> 'U) =
|
||||
let diff compList = fun item -> not (compList |> List.exists (fun other -> f item = f other))
|
||||
List.filter (diff newItems) oldItems, List.filter (diff oldItems) newItems
|
||||
|
||||
/// Find meta items added and removed
|
||||
let diffMetaItems (oldItems : MetaItem list) newItems =
|
||||
diffLists oldItems newItems (fun item -> $"{item.Name}|{item.Value}")
|
||||
|
||||
/// Find the permalinks added and removed
|
||||
let diffPermalinks oldLinks newLinks =
|
||||
diffLists oldLinks newLinks Permalink.toString
|
||||
|
||||
/// Find the revisions added and removed
|
||||
let diffRevisions oldRevs newRevs =
|
||||
diffLists oldRevs newRevs (fun (rev : Revision) -> $"{rev.AsOf.Ticks}|{MarkupText.toString rev.Text}")
|
||||
|
||||
/// Create a list of items from the given data reader
|
||||
let toList<'T> (it : SqliteDataReader -> 'T) (rdr : SqliteDataReader) =
|
||||
seq { while rdr.Read () do it rdr }
|
||||
@@ -47,6 +32,42 @@ let write (cmd : SqliteCommand) = backgroundTask {
|
||||
()
|
||||
}
|
||||
|
||||
/// Add a possibly-missing parameter, substituting null for None
|
||||
let maybe<'T> (it : 'T option) : obj = match it with Some x -> x :> obj | None -> DBNull.Value
|
||||
|
||||
/// Create a value for a Duration
|
||||
let durationParam =
|
||||
DurationPattern.Roundtrip.Format
|
||||
|
||||
/// Create a value for an Instant
|
||||
let instantParam =
|
||||
InstantPattern.General.Format
|
||||
|
||||
/// Create an optional value for a Duration
|
||||
let maybeDuration =
|
||||
Option.map durationParam >> maybe
|
||||
|
||||
/// Create an optional value for an Instant
|
||||
let maybeInstant =
|
||||
Option.map instantParam >> maybe
|
||||
|
||||
/// Create the SQL and parameters for an IN clause
|
||||
let inClause<'T> colNameAndPrefix paramName (valueFunc: 'T -> string) (items : 'T list) =
|
||||
if List.isEmpty items then "", []
|
||||
else
|
||||
let mutable idx = 0
|
||||
items
|
||||
|> List.skip 1
|
||||
|> List.fold (fun (itemS, itemP) it ->
|
||||
idx <- idx + 1
|
||||
$"{itemS}, @%s{paramName}{idx}", (SqliteParameter ($"@%s{paramName}{idx}", valueFunc it) :: itemP))
|
||||
(Seq.ofList items
|
||||
|> Seq.map (fun it ->
|
||||
$"%s{colNameAndPrefix} IN (@%s{paramName}0", [ SqliteParameter ($"@%s{paramName}0", valueFunc it) ])
|
||||
|> Seq.head)
|
||||
|> function sql, ps -> $"{sql})", ps
|
||||
|
||||
|
||||
/// Functions to map domain items from a data reader
|
||||
module Map =
|
||||
|
||||
@@ -73,6 +94,26 @@ module Map =
|
||||
/// Get a string value from a data reader
|
||||
let getString col (rdr : SqliteDataReader) = rdr.GetString (rdr.GetOrdinal col)
|
||||
|
||||
/// Parse a Duration from the given value
|
||||
let parseDuration value =
|
||||
match DurationPattern.Roundtrip.Parse value with
|
||||
| it when it.Success -> it.Value
|
||||
| it -> raise it.Exception
|
||||
|
||||
/// Get a Duration value from a data reader
|
||||
let getDuration col rdr =
|
||||
getString col rdr |> parseDuration
|
||||
|
||||
/// Parse an Instant from the given value
|
||||
let parseInstant value =
|
||||
match InstantPattern.General.Parse value with
|
||||
| it when it.Success -> it.Value
|
||||
| it -> raise it.Exception
|
||||
|
||||
/// Get an Instant value from a data reader
|
||||
let getInstant col rdr =
|
||||
getString col rdr |> parseInstant
|
||||
|
||||
/// Get a timespan value from a data reader
|
||||
let getTimeSpan col (rdr : SqliteDataReader) = rdr.GetTimeSpan (rdr.GetOrdinal col)
|
||||
|
||||
@@ -96,6 +137,14 @@ module Map =
|
||||
let tryString col (rdr : SqliteDataReader) =
|
||||
if rdr.IsDBNull (rdr.GetOrdinal col) then None else Some (getString col rdr)
|
||||
|
||||
/// Get a possibly null Duration value from a data reader
|
||||
let tryDuration col rdr =
|
||||
tryString col rdr |> Option.map parseDuration
|
||||
|
||||
/// Get a possibly null Instant value from a data reader
|
||||
let tryInstant col rdr =
|
||||
tryString col rdr |> Option.map parseInstant
|
||||
|
||||
/// Get a possibly null timespan value from a data reader
|
||||
let tryTimeSpan col (rdr : SqliteDataReader) =
|
||||
if rdr.IsDBNull (rdr.GetOrdinal col) then None else Some (getTimeSpan col rdr)
|
||||
@@ -114,60 +163,36 @@ module Map =
|
||||
}
|
||||
|
||||
/// Create a custom feed from the current row in the given data reader
|
||||
let toCustomFeed rdr : CustomFeed =
|
||||
let toCustomFeed ser rdr : CustomFeed =
|
||||
{ Id = getString "id" rdr |> CustomFeedId
|
||||
Source = getString "source" rdr |> CustomFeedSource.parse
|
||||
Path = getString "path" rdr |> Permalink
|
||||
Podcast =
|
||||
if rdr.IsDBNull (rdr.GetOrdinal "title") then
|
||||
None
|
||||
else
|
||||
Some {
|
||||
Title = getString "title" rdr
|
||||
Subtitle = tryString "subtitle" rdr
|
||||
ItemsInFeed = getInt "items_in_feed" rdr
|
||||
Summary = getString "summary" rdr
|
||||
DisplayedAuthor = getString "displayed_author" rdr
|
||||
Email = getString "email" rdr
|
||||
ImageUrl = getString "image_url" rdr |> Permalink
|
||||
AppleCategory = getString "apple_category" rdr
|
||||
AppleSubcategory = tryString "apple_subcategory" rdr
|
||||
Explicit = getString "explicit" rdr |> ExplicitRating.parse
|
||||
DefaultMediaType = tryString "default_media_type" rdr
|
||||
MediaBaseUrl = tryString "media_base_url" rdr
|
||||
PodcastGuid = tryGuid "podcast_guid" rdr
|
||||
FundingUrl = tryString "funding_url" rdr
|
||||
FundingText = tryString "funding_text" rdr
|
||||
Medium = tryString "medium" rdr |> Option.map PodcastMedium.parse
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a meta item from the current row in the given data reader
|
||||
let toMetaItem rdr : MetaItem =
|
||||
{ Name = getString "name" rdr
|
||||
Value = getString "value" rdr
|
||||
Podcast = tryString "podcast" rdr |> Option.map (Utils.deserialize ser)
|
||||
}
|
||||
|
||||
/// Create a permalink from the current row in the given data reader
|
||||
let toPermalink rdr = getString "permalink" rdr |> Permalink
|
||||
|
||||
/// Create a page from the current row in the given data reader
|
||||
let toPage rdr : Page =
|
||||
let toPage ser rdr : Page =
|
||||
{ Page.empty with
|
||||
Id = getString "id" rdr |> PageId
|
||||
WebLogId = getString "web_log_id" rdr |> WebLogId
|
||||
AuthorId = getString "author_id" rdr |> WebLogUserId
|
||||
Title = getString "title" rdr
|
||||
Permalink = toPermalink rdr
|
||||
PublishedOn = getDateTime "published_on" rdr
|
||||
UpdatedOn = getDateTime "updated_on" rdr
|
||||
PublishedOn = getInstant "published_on" rdr
|
||||
UpdatedOn = getInstant "updated_on" rdr
|
||||
IsInPageList = getBoolean "is_in_page_list" rdr
|
||||
Template = tryString "template" rdr
|
||||
Text = getString "page_text" rdr
|
||||
Metadata = tryString "meta_items" rdr
|
||||
|> Option.map (Utils.deserialize ser)
|
||||
|> Option.defaultValue []
|
||||
}
|
||||
|
||||
/// Create a post from the current row in the given data reader
|
||||
let toPost rdr : Post =
|
||||
let toPost ser rdr : Post =
|
||||
{ Post.empty with
|
||||
Id = getString "id" rdr |> PostId
|
||||
WebLogId = getString "web_log_id" rdr |> WebLogId
|
||||
@@ -175,38 +200,19 @@ module Map =
|
||||
Status = getString "status" rdr |> PostStatus.parse
|
||||
Title = getString "title" rdr
|
||||
Permalink = toPermalink rdr
|
||||
PublishedOn = tryDateTime "published_on" rdr
|
||||
UpdatedOn = getDateTime "updated_on" rdr
|
||||
PublishedOn = tryInstant "published_on" rdr
|
||||
UpdatedOn = getInstant "updated_on" rdr
|
||||
Template = tryString "template" rdr
|
||||
Text = getString "post_text" rdr
|
||||
Episode =
|
||||
match tryString "media" rdr with
|
||||
| Some media ->
|
||||
Some {
|
||||
Media = media
|
||||
Length = getLong "length" rdr
|
||||
Duration = tryTimeSpan "duration" rdr
|
||||
MediaType = tryString "media_type" rdr
|
||||
ImageUrl = tryString "image_url" rdr
|
||||
Subtitle = tryString "subtitle" rdr
|
||||
Explicit = tryString "explicit" rdr |> Option.map ExplicitRating.parse
|
||||
ChapterFile = tryString "chapter_file" rdr
|
||||
ChapterType = tryString "chapter_type" rdr
|
||||
TranscriptUrl = tryString "transcript_url" rdr
|
||||
TranscriptType = tryString "transcript_type" rdr
|
||||
TranscriptLang = tryString "transcript_lang" rdr
|
||||
TranscriptCaptions = tryBoolean "transcript_captions" rdr
|
||||
SeasonNumber = tryInt "season_number" rdr
|
||||
SeasonDescription = tryString "season_description" rdr
|
||||
EpisodeNumber = tryString "episode_number" rdr |> Option.map Double.Parse
|
||||
EpisodeDescription = tryString "episode_description" rdr
|
||||
}
|
||||
| None -> None
|
||||
Episode = tryString "episode" rdr |> Option.map (Utils.deserialize ser)
|
||||
Metadata = tryString "meta_items" rdr
|
||||
|> Option.map (Utils.deserialize ser)
|
||||
|> Option.defaultValue []
|
||||
}
|
||||
|
||||
/// Create a revision from the current row in the given data reader
|
||||
let toRevision rdr : Revision =
|
||||
{ AsOf = getDateTime "as_of" rdr
|
||||
{ AsOf = getInstant "as_of" rdr
|
||||
Text = getString "revision_text" rdr |> MarkupText.parse
|
||||
}
|
||||
|
||||
@@ -237,7 +243,7 @@ module Map =
|
||||
else
|
||||
[||]
|
||||
{ Id = ThemeAssetId (ThemeId (getString "theme_id" rdr), getString "path" rdr)
|
||||
UpdatedOn = getDateTime "updated_on" rdr
|
||||
UpdatedOn = getInstant "updated_on" rdr
|
||||
Data = assetData
|
||||
}
|
||||
|
||||
@@ -260,7 +266,7 @@ module Map =
|
||||
{ Id = getString "id" rdr |> UploadId
|
||||
WebLogId = getString "web_log_id" rdr |> WebLogId
|
||||
Path = getString "path" rdr |> Permalink
|
||||
UpdatedOn = getDateTime "updated_on" rdr
|
||||
UpdatedOn = getInstant "updated_on" rdr
|
||||
Data = data
|
||||
}
|
||||
|
||||
@@ -297,16 +303,12 @@ module Map =
|
||||
LastName = getString "last_name" rdr
|
||||
PreferredName = getString "preferred_name" rdr
|
||||
PasswordHash = getString "password_hash" rdr
|
||||
Salt = getGuid "salt" rdr
|
||||
Url = tryString "url" rdr
|
||||
AccessLevel = getString "access_level" rdr |> AccessLevel.parse
|
||||
CreatedOn = getDateTime "created_on" rdr
|
||||
LastSeenOn = tryDateTime "last_seen_on" rdr
|
||||
CreatedOn = getInstant "created_on" rdr
|
||||
LastSeenOn = tryInstant "last_seen_on" rdr
|
||||
}
|
||||
|
||||
/// Add a possibly-missing parameter, substituting null for None
|
||||
let maybe<'T> (it : 'T option) : obj = match it with Some x -> x :> obj | None -> DBNull.Value
|
||||
|
||||
/// Add a web log ID parameter
|
||||
let addWebLogId (cmd : SqliteCommand) webLogId =
|
||||
cmd.Parameters.AddWithValue ("@webLogId", WebLogId.toString webLogId) |> ignore
|
||||
|
||||
@@ -21,12 +21,12 @@ type SQLiteCategoryData (conn : SqliteConnection) =
|
||||
/// Add a category
|
||||
let add cat = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
INSERT INTO category (
|
||||
cmd.CommandText <-
|
||||
"INSERT INTO category (
|
||||
id, web_log_id, name, slug, description, parent_id
|
||||
) VALUES (
|
||||
@id, @webLogId, @name, @slug, @description, @parentId
|
||||
)"""
|
||||
)"
|
||||
addCategoryParameters cmd cat
|
||||
let! _ = cmd.ExecuteNonQueryAsync ()
|
||||
()
|
||||
@@ -68,24 +68,23 @@ type SQLiteCategoryData (conn : SqliteConnection) =
|
||||
ordered
|
||||
|> Seq.map (fun it -> backgroundTask {
|
||||
// Parent category post counts include posts in subcategories
|
||||
let catSql, catParams =
|
||||
ordered
|
||||
|> Seq.filter (fun cat -> cat.ParentNames |> Array.contains it.Name)
|
||||
|> Seq.map (fun cat -> cat.Id)
|
||||
|> Seq.append (Seq.singleton it.Id)
|
||||
|> List.ofSeq
|
||||
|> inClause "AND pc.category_id" "catId" id
|
||||
cmd.Parameters.Clear ()
|
||||
addWebLogId cmd webLogId
|
||||
cmd.CommandText <- """
|
||||
cmd.Parameters.AddRange catParams
|
||||
cmd.CommandText <- $"
|
||||
SELECT COUNT(DISTINCT p.id)
|
||||
FROM post p
|
||||
INNER JOIN post_category pc ON pc.post_id = p.id
|
||||
WHERE p.web_log_id = @webLogId
|
||||
AND p.status = 'Published'
|
||||
AND pc.category_id IN ("""
|
||||
ordered
|
||||
|> Seq.filter (fun cat -> cat.ParentNames |> Array.contains it.Name)
|
||||
|> Seq.map (fun cat -> cat.Id)
|
||||
|> Seq.append (Seq.singleton it.Id)
|
||||
|> Seq.iteri (fun idx item ->
|
||||
if idx > 0 then cmd.CommandText <- $"{cmd.CommandText}, "
|
||||
cmd.CommandText <- $"{cmd.CommandText}@catId{idx}"
|
||||
cmd.Parameters.AddWithValue ($"@catId{idx}", item) |> ignore)
|
||||
cmd.CommandText <- $"{cmd.CommandText})"
|
||||
{catSql}"
|
||||
let! postCount = count cmd
|
||||
return it.Id, postCount
|
||||
})
|
||||
@@ -133,19 +132,15 @@ type SQLiteCategoryData (conn : SqliteConnection) =
|
||||
cmd.Parameters.AddWithValue ("@newParentId", maybe (cat.ParentId |> Option.map CategoryId.toString))
|
||||
|> ignore
|
||||
do! write cmd
|
||||
// Delete the category off all posts where it is assigned
|
||||
cmd.CommandText <- """
|
||||
DELETE FROM post_category
|
||||
// Delete the category off all posts where it is assigned, and the category itself
|
||||
cmd.CommandText <-
|
||||
"DELETE FROM post_category
|
||||
WHERE category_id = @id
|
||||
AND post_id IN (SELECT id FROM post WHERE web_log_id = @webLogId)"""
|
||||
AND post_id IN (SELECT id FROM post WHERE web_log_id = @webLogId);
|
||||
DELETE FROM category WHERE id = @id"
|
||||
cmd.Parameters.Clear ()
|
||||
let catIdParameter = cmd.Parameters.AddWithValue ("@id", CategoryId.toString catId)
|
||||
cmd.Parameters.AddWithValue ("@webLogId", WebLogId.toString webLogId) |> ignore
|
||||
do! write cmd
|
||||
// Delete the category itself
|
||||
cmd.CommandText <- "DELETE FROM category WHERE id = @id"
|
||||
cmd.Parameters.Clear ()
|
||||
cmd.Parameters.Add catIdParameter |> ignore
|
||||
let _ = cmd.Parameters.AddWithValue ("@id", CategoryId.toString catId)
|
||||
addWebLogId cmd webLogId
|
||||
do! write cmd
|
||||
return if children = 0 then CategoryDeleted else ReassignedChildCategories
|
||||
| None -> return CategoryNotFound
|
||||
@@ -160,14 +155,14 @@ type SQLiteCategoryData (conn : SqliteConnection) =
|
||||
/// Update a category
|
||||
let update cat = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
UPDATE category
|
||||
cmd.CommandText <-
|
||||
"UPDATE category
|
||||
SET name = @name,
|
||||
slug = @slug,
|
||||
description = @description,
|
||||
parent_id = @parentId
|
||||
WHERE id = @id
|
||||
AND web_log_id = @webLogId"""
|
||||
AND web_log_id = @webLogId"
|
||||
addCategoryParameters cmd cat
|
||||
do! write cmd
|
||||
}
|
||||
|
||||
@@ -4,9 +4,10 @@ open System.Threading.Tasks
|
||||
open Microsoft.Data.Sqlite
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
open Newtonsoft.Json
|
||||
|
||||
/// SQLite myWebLog page data implementation
|
||||
type SQLitePageData (conn : SqliteConnection) =
|
||||
type SQLitePageData (conn : SqliteConnection, ser : JsonSerializer) =
|
||||
|
||||
// SUPPORT FUNCTIONS
|
||||
|
||||
@@ -17,22 +18,15 @@ type SQLitePageData (conn : SqliteConnection) =
|
||||
cmd.Parameters.AddWithValue ("@authorId", WebLogUserId.toString page.AuthorId)
|
||||
cmd.Parameters.AddWithValue ("@title", page.Title)
|
||||
cmd.Parameters.AddWithValue ("@permalink", Permalink.toString page.Permalink)
|
||||
cmd.Parameters.AddWithValue ("@publishedOn", page.PublishedOn)
|
||||
cmd.Parameters.AddWithValue ("@updatedOn", page.UpdatedOn)
|
||||
cmd.Parameters.AddWithValue ("@publishedOn", instantParam page.PublishedOn)
|
||||
cmd.Parameters.AddWithValue ("@updatedOn", instantParam page.UpdatedOn)
|
||||
cmd.Parameters.AddWithValue ("@isInPageList", page.IsInPageList)
|
||||
cmd.Parameters.AddWithValue ("@template", maybe page.Template)
|
||||
cmd.Parameters.AddWithValue ("@text", page.Text)
|
||||
cmd.Parameters.AddWithValue ("@metaItems", maybe (if List.isEmpty page.Metadata then None
|
||||
else Some (Utils.serialize ser page.Metadata)))
|
||||
] |> ignore
|
||||
|
||||
/// Append meta items to a page
|
||||
let appendPageMeta (page : Page) = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- "SELECT name, value FROM page_meta WHERE page_id = @id"
|
||||
cmd.Parameters.AddWithValue ("@id", PageId.toString page.Id) |> ignore
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
return { page with Metadata = toList Map.toMetaItem rdr }
|
||||
}
|
||||
|
||||
/// Append revisions and permalinks to a page
|
||||
let appendPageRevisionsAndPermalinks (page : Page) = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
@@ -48,41 +42,17 @@ type SQLitePageData (conn : SqliteConnection) =
|
||||
return { page with Revisions = toList Map.toRevision rdr }
|
||||
}
|
||||
|
||||
/// Return a page with no text (or meta items, prior permalinks, or revisions)
|
||||
let pageWithoutTextOrMeta rdr =
|
||||
{ Map.toPage rdr with Text = "" }
|
||||
/// Shorthand for mapping a data reader to a page
|
||||
let toPage =
|
||||
Map.toPage ser
|
||||
|
||||
/// Update a page's metadata items
|
||||
let updatePageMeta pageId oldItems newItems = backgroundTask {
|
||||
let toDelete, toAdd = diffMetaItems oldItems newItems
|
||||
if List.isEmpty toDelete && List.isEmpty toAdd then
|
||||
return ()
|
||||
else
|
||||
use cmd = conn.CreateCommand ()
|
||||
[ cmd.Parameters.AddWithValue ("@pageId", PageId.toString pageId)
|
||||
cmd.Parameters.Add ("@name", SqliteType.Text)
|
||||
cmd.Parameters.Add ("@value", SqliteType.Text)
|
||||
] |> ignore
|
||||
let runCmd (item : MetaItem) = backgroundTask {
|
||||
cmd.Parameters["@name" ].Value <- item.Name
|
||||
cmd.Parameters["@value"].Value <- item.Value
|
||||
do! write cmd
|
||||
}
|
||||
cmd.CommandText <- "DELETE FROM page_meta WHERE page_id = @pageId AND name = @name AND value = @value"
|
||||
toDelete
|
||||
|> List.map runCmd
|
||||
|> Task.WhenAll
|
||||
|> ignore
|
||||
cmd.CommandText <- "INSERT INTO page_meta VALUES (@pageId, @name, @value)"
|
||||
toAdd
|
||||
|> List.map runCmd
|
||||
|> Task.WhenAll
|
||||
|> ignore
|
||||
}
|
||||
/// Return a page with no text (or prior permalinks or revisions)
|
||||
let pageWithoutText rdr =
|
||||
{ toPage rdr with Text = "" }
|
||||
|
||||
/// Update a page's prior permalinks
|
||||
let updatePagePermalinks pageId oldLinks newLinks = backgroundTask {
|
||||
let toDelete, toAdd = diffPermalinks oldLinks newLinks
|
||||
let toDelete, toAdd = Utils.diffPermalinks oldLinks newLinks
|
||||
if List.isEmpty toDelete && List.isEmpty toAdd then
|
||||
return ()
|
||||
else
|
||||
@@ -108,7 +78,7 @@ type SQLitePageData (conn : SqliteConnection) =
|
||||
|
||||
/// Update a page's revisions
|
||||
let updatePageRevisions pageId oldRevs newRevs = backgroundTask {
|
||||
let toDelete, toAdd = diffRevisions oldRevs newRevs
|
||||
let toDelete, toAdd = Utils.diffRevisions oldRevs newRevs
|
||||
if List.isEmpty toDelete && List.isEmpty toAdd then
|
||||
return ()
|
||||
else
|
||||
@@ -116,7 +86,7 @@ type SQLitePageData (conn : SqliteConnection) =
|
||||
let runCmd withText rev = backgroundTask {
|
||||
cmd.Parameters.Clear ()
|
||||
[ cmd.Parameters.AddWithValue ("@pageId", PageId.toString pageId)
|
||||
cmd.Parameters.AddWithValue ("@asOf", rev.AsOf)
|
||||
cmd.Parameters.AddWithValue ("@asOf", instantParam rev.AsOf)
|
||||
] |> ignore
|
||||
if withText then cmd.Parameters.AddWithValue ("@text", MarkupText.toString rev.Text) |> ignore
|
||||
do! write cmd
|
||||
@@ -139,17 +109,16 @@ type SQLitePageData (conn : SqliteConnection) =
|
||||
let add page = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
// The page itself
|
||||
cmd.CommandText <- """
|
||||
INSERT INTO page (
|
||||
cmd.CommandText <-
|
||||
"INSERT INTO page (
|
||||
id, web_log_id, author_id, title, permalink, published_on, updated_on, is_in_page_list, template,
|
||||
page_text
|
||||
page_text, meta_items
|
||||
) VALUES (
|
||||
@id, @webLogId, @authorId, @title, @permalink, @publishedOn, @updatedOn, @isInPageList, @template,
|
||||
@text
|
||||
)"""
|
||||
@text, @metaItems
|
||||
)"
|
||||
addPageParameters cmd page
|
||||
do! write cmd
|
||||
do! updatePageMeta page.Id [] page.Metadata
|
||||
do! updatePagePermalinks page.Id [] page.PriorPermalinks
|
||||
do! updatePageRevisions page.Id [] page.Revisions
|
||||
}
|
||||
@@ -160,7 +129,7 @@ type SQLitePageData (conn : SqliteConnection) =
|
||||
cmd.CommandText <- "SELECT * FROM page WHERE web_log_id = @webLogId ORDER BY LOWER(title)"
|
||||
addWebLogId cmd webLogId
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
return toList pageWithoutTextOrMeta rdr
|
||||
return toList pageWithoutText rdr
|
||||
}
|
||||
|
||||
/// Count all pages for the given web log
|
||||
@@ -174,11 +143,11 @@ type SQLitePageData (conn : SqliteConnection) =
|
||||
/// Count all pages shown in the page list for the given web log
|
||||
let countListed webLogId = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
SELECT COUNT(id)
|
||||
cmd.CommandText <-
|
||||
"SELECT COUNT(id)
|
||||
FROM page
|
||||
WHERE web_log_id = @webLogId
|
||||
AND is_in_page_list = @isInPageList"""
|
||||
AND is_in_page_list = @isInPageList"
|
||||
addWebLogId cmd webLogId
|
||||
cmd.Parameters.AddWithValue ("@isInPageList", true) |> ignore
|
||||
return! count cmd
|
||||
@@ -190,11 +159,7 @@ type SQLitePageData (conn : SqliteConnection) =
|
||||
cmd.CommandText <- "SELECT * FROM page WHERE id = @id"
|
||||
cmd.Parameters.AddWithValue ("@id", PageId.toString pageId) |> ignore
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
match Helpers.verifyWebLog<Page> webLogId (fun it -> it.WebLogId) Map.toPage rdr with
|
||||
| Some page ->
|
||||
let! page = appendPageMeta page
|
||||
return Some page
|
||||
| None -> return None
|
||||
return Helpers.verifyWebLog<Page> webLogId (fun it -> it.WebLogId) (Map.toPage ser) rdr
|
||||
}
|
||||
|
||||
/// Find a complete page by its ID
|
||||
@@ -211,11 +176,10 @@ type SQLitePageData (conn : SqliteConnection) =
|
||||
| Some _ ->
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.Parameters.AddWithValue ("@id", PageId.toString pageId) |> ignore
|
||||
cmd.CommandText <- """
|
||||
DELETE FROM page_revision WHERE page_id = @id;
|
||||
cmd.CommandText <-
|
||||
"DELETE FROM page_revision WHERE page_id = @id;
|
||||
DELETE FROM page_permalink WHERE page_id = @id;
|
||||
DELETE FROM page_meta WHERE page_id = @id;
|
||||
DELETE FROM page WHERE id = @id"""
|
||||
DELETE FROM page WHERE id = @id"
|
||||
do! write cmd
|
||||
return true
|
||||
| None -> return false
|
||||
@@ -228,29 +192,21 @@ type SQLitePageData (conn : SqliteConnection) =
|
||||
addWebLogId cmd webLogId
|
||||
cmd.Parameters.AddWithValue ("@link", Permalink.toString permalink) |> ignore
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
if rdr.Read () then
|
||||
let! page = appendPageMeta (Map.toPage rdr)
|
||||
return Some page
|
||||
else
|
||||
return None
|
||||
return if rdr.Read () then Some (toPage rdr) else None
|
||||
}
|
||||
|
||||
/// Find the current permalink within a set of potential prior permalinks for the given web log
|
||||
let findCurrentPermalink permalinks webLogId = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
let linkSql, linkParams = inClause "AND pp.permalink" "link" Permalink.toString permalinks
|
||||
cmd.CommandText <- $"
|
||||
SELECT p.permalink
|
||||
FROM page p
|
||||
INNER JOIN page_permalink pp ON pp.page_id = p.id
|
||||
WHERE p.web_log_id = @webLogId
|
||||
AND pp.permalink IN ("""
|
||||
permalinks
|
||||
|> List.iteri (fun idx link ->
|
||||
if idx > 0 then cmd.CommandText <- $"{cmd.CommandText}, "
|
||||
cmd.CommandText <- $"{cmd.CommandText}@link{idx}"
|
||||
cmd.Parameters.AddWithValue ($"@link{idx}", Permalink.toString link) |> ignore)
|
||||
cmd.CommandText <- $"{cmd.CommandText})"
|
||||
{linkSql}"
|
||||
addWebLogId cmd webLogId
|
||||
cmd.Parameters.AddRange linkParams
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
return if rdr.Read () then Some (Map.toPermalink rdr) else None
|
||||
}
|
||||
@@ -262,11 +218,8 @@ type SQLitePageData (conn : SqliteConnection) =
|
||||
addWebLogId cmd webLogId
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
let! pages =
|
||||
toList Map.toPage rdr
|
||||
|> List.map (fun page -> backgroundTask {
|
||||
let! page = appendPageMeta page
|
||||
return! appendPageRevisionsAndPermalinks page
|
||||
})
|
||||
toList toPage rdr
|
||||
|> List.map (fun page -> backgroundTask { return! appendPageRevisionsAndPermalinks page })
|
||||
|> Task.WhenAll
|
||||
return List.ofArray pages
|
||||
}
|
||||
@@ -274,37 +227,33 @@ type SQLitePageData (conn : SqliteConnection) =
|
||||
/// Get all listed pages for the given web log (without revisions, prior permalinks, or text)
|
||||
let findListed webLogId = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
SELECT *
|
||||
cmd.CommandText <-
|
||||
"SELECT *
|
||||
FROM page
|
||||
WHERE web_log_id = @webLogId
|
||||
AND is_in_page_list = @isInPageList
|
||||
ORDER BY LOWER(title)"""
|
||||
ORDER BY LOWER(title)"
|
||||
addWebLogId cmd webLogId
|
||||
cmd.Parameters.AddWithValue ("@isInPageList", true) |> ignore
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
let! pages =
|
||||
toList pageWithoutTextOrMeta rdr
|
||||
|> List.map (fun page -> backgroundTask { return! appendPageMeta page })
|
||||
|> Task.WhenAll
|
||||
return List.ofArray pages
|
||||
return toList pageWithoutText rdr
|
||||
}
|
||||
|
||||
/// Get a page of pages for the given web log (without revisions, prior permalinks, or metadata)
|
||||
let findPageOfPages webLogId pageNbr = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
SELECT *
|
||||
cmd.CommandText <-
|
||||
"SELECT *
|
||||
FROM page
|
||||
WHERE web_log_id = @webLogId
|
||||
ORDER BY LOWER(title)
|
||||
LIMIT @pageSize OFFSET @toSkip"""
|
||||
LIMIT @pageSize OFFSET @toSkip"
|
||||
addWebLogId cmd webLogId
|
||||
[ cmd.Parameters.AddWithValue ("@pageSize", 26)
|
||||
cmd.Parameters.AddWithValue ("@toSkip", (pageNbr - 1) * 25)
|
||||
] |> ignore
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
return toList Map.toPage rdr
|
||||
return toList toPage rdr
|
||||
}
|
||||
|
||||
/// Restore pages from a backup
|
||||
@@ -318,8 +267,8 @@ type SQLitePageData (conn : SqliteConnection) =
|
||||
match! findFullById page.Id page.WebLogId with
|
||||
| Some oldPage ->
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
UPDATE page
|
||||
cmd.CommandText <-
|
||||
"UPDATE page
|
||||
SET author_id = @authorId,
|
||||
title = @title,
|
||||
permalink = @permalink,
|
||||
@@ -327,12 +276,12 @@ type SQLitePageData (conn : SqliteConnection) =
|
||||
updated_on = @updatedOn,
|
||||
is_in_page_list = @isInPageList,
|
||||
template = @template,
|
||||
page_text = @text
|
||||
page_text = @text,
|
||||
meta_items = @metaItems
|
||||
WHERE id = @id
|
||||
AND web_log_id = @webLogId"""
|
||||
AND web_log_id = @webLogId"
|
||||
addPageParameters cmd page
|
||||
do! write cmd
|
||||
do! updatePageMeta page.Id oldPage.Metadata page.Metadata
|
||||
do! updatePagePermalinks page.Id oldPage.PriorPermalinks page.PriorPermalinks
|
||||
do! updatePageRevisions page.Id oldPage.Revisions page.Revisions
|
||||
return ()
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
namespace MyWebLog.Data.SQLite
|
||||
|
||||
open System
|
||||
open System.Threading.Tasks
|
||||
open Microsoft.Data.Sqlite
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
open Newtonsoft.Json
|
||||
open NodaTime
|
||||
|
||||
/// SQLite myWebLog post data implementation
|
||||
type SQLitePostData (conn : SqliteConnection) =
|
||||
type SQLitePostData (conn : SqliteConnection, ser : JsonSerializer) =
|
||||
|
||||
// SUPPORT FUNCTIONS
|
||||
|
||||
@@ -19,35 +20,19 @@ type SQLitePostData (conn : SqliteConnection) =
|
||||
cmd.Parameters.AddWithValue ("@status", PostStatus.toString post.Status)
|
||||
cmd.Parameters.AddWithValue ("@title", post.Title)
|
||||
cmd.Parameters.AddWithValue ("@permalink", Permalink.toString post.Permalink)
|
||||
cmd.Parameters.AddWithValue ("@publishedOn", maybe post.PublishedOn)
|
||||
cmd.Parameters.AddWithValue ("@updatedOn", post.UpdatedOn)
|
||||
cmd.Parameters.AddWithValue ("@publishedOn", maybeInstant post.PublishedOn)
|
||||
cmd.Parameters.AddWithValue ("@updatedOn", instantParam post.UpdatedOn)
|
||||
cmd.Parameters.AddWithValue ("@template", maybe post.Template)
|
||||
cmd.Parameters.AddWithValue ("@text", post.Text)
|
||||
cmd.Parameters.AddWithValue ("@episode", maybe (if Option.isSome post.Episode then
|
||||
Some (Utils.serialize ser post.Episode)
|
||||
else None))
|
||||
cmd.Parameters.AddWithValue ("@metaItems", maybe (if List.isEmpty post.Metadata then None
|
||||
else Some (Utils.serialize ser post.Metadata)))
|
||||
] |> ignore
|
||||
|
||||
/// Add parameters for episode INSERT or UPDATE statements
|
||||
let addEpisodeParameters (cmd : SqliteCommand) (ep : Episode) =
|
||||
[ cmd.Parameters.AddWithValue ("@media", ep.Media)
|
||||
cmd.Parameters.AddWithValue ("@length", ep.Length)
|
||||
cmd.Parameters.AddWithValue ("@duration", maybe ep.Duration)
|
||||
cmd.Parameters.AddWithValue ("@mediaType", maybe ep.MediaType)
|
||||
cmd.Parameters.AddWithValue ("@imageUrl", maybe ep.ImageUrl)
|
||||
cmd.Parameters.AddWithValue ("@subtitle", maybe ep.Subtitle)
|
||||
cmd.Parameters.AddWithValue ("@explicit", maybe (ep.Explicit |> Option.map ExplicitRating.toString))
|
||||
cmd.Parameters.AddWithValue ("@chapterFile", maybe ep.ChapterFile)
|
||||
cmd.Parameters.AddWithValue ("@chapterType", maybe ep.ChapterType)
|
||||
cmd.Parameters.AddWithValue ("@transcriptUrl", maybe ep.TranscriptUrl)
|
||||
cmd.Parameters.AddWithValue ("@transcriptType", maybe ep.TranscriptType)
|
||||
cmd.Parameters.AddWithValue ("@transcriptLang", maybe ep.TranscriptLang)
|
||||
cmd.Parameters.AddWithValue ("@transcriptCaptions", maybe ep.TranscriptCaptions)
|
||||
cmd.Parameters.AddWithValue ("@seasonNumber", maybe ep.SeasonNumber)
|
||||
cmd.Parameters.AddWithValue ("@seasonDescription", maybe ep.SeasonDescription)
|
||||
cmd.Parameters.AddWithValue ("@episodeNumber", maybe (ep.EpisodeNumber |> Option.map string))
|
||||
cmd.Parameters.AddWithValue ("@episodeDescription", maybe ep.EpisodeDescription)
|
||||
] |> ignore
|
||||
|
||||
/// Append category IDs, tags, and meta items to a post
|
||||
let appendPostCategoryTagAndMeta (post : Post) = backgroundTask {
|
||||
/// Append category IDs and tags to a post
|
||||
let appendPostCategoryAndTag (post : Post) = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.Parameters.AddWithValue ("@id", PostId.toString post.Id) |> ignore
|
||||
|
||||
@@ -58,12 +43,7 @@ type SQLitePostData (conn : SqliteConnection) =
|
||||
|
||||
cmd.CommandText <- "SELECT tag FROM post_tag WHERE post_id = @id"
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
let post = { post with Tags = toList (Map.getString "tag") rdr }
|
||||
do! rdr.CloseAsync ()
|
||||
|
||||
cmd.CommandText <- "SELECT name, value FROM post_meta WHERE post_id = @id"
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
return { post with Metadata = toList Map.toMetaItem rdr }
|
||||
return { post with Tags = toList (Map.getString "tag") rdr }
|
||||
}
|
||||
|
||||
/// Append revisions and permalinks to a post
|
||||
@@ -82,7 +62,11 @@ type SQLitePostData (conn : SqliteConnection) =
|
||||
}
|
||||
|
||||
/// The SELECT statement for a post that will include episode data, if it exists
|
||||
let selectPost = "SELECT p.*, e.* FROM post p LEFT JOIN post_episode e ON e.post_id = p.id"
|
||||
let selectPost = "SELECT p.* FROM post p"
|
||||
|
||||
/// Shorthand for mapping a data reader to a post
|
||||
let toPost =
|
||||
Map.toPost ser
|
||||
|
||||
/// Find just-the-post by its ID for the given web log (excludes category, tag, meta, revisions, and permalinks)
|
||||
let findPostById postId webLogId = backgroundTask {
|
||||
@@ -90,16 +74,16 @@ type SQLitePostData (conn : SqliteConnection) =
|
||||
cmd.CommandText <- $"{selectPost} WHERE p.id = @id"
|
||||
cmd.Parameters.AddWithValue ("@id", PostId.toString postId) |> ignore
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
return Helpers.verifyWebLog<Post> webLogId (fun p -> p.WebLogId) Map.toPost rdr
|
||||
return Helpers.verifyWebLog<Post> webLogId (fun p -> p.WebLogId) toPost rdr
|
||||
}
|
||||
|
||||
/// Return a post with no revisions, prior permalinks, or text
|
||||
let postWithoutText rdr =
|
||||
{ Map.toPost rdr with Text = "" }
|
||||
{ toPost rdr with Text = "" }
|
||||
|
||||
/// Update a post's assigned categories
|
||||
let updatePostCategories postId oldCats newCats = backgroundTask {
|
||||
let toDelete, toAdd = diffLists oldCats newCats CategoryId.toString
|
||||
let toDelete, toAdd = Utils.diffLists oldCats newCats CategoryId.toString
|
||||
if List.isEmpty toDelete && List.isEmpty toAdd then
|
||||
return ()
|
||||
else
|
||||
@@ -125,7 +109,7 @@ type SQLitePostData (conn : SqliteConnection) =
|
||||
|
||||
/// Update a post's assigned categories
|
||||
let updatePostTags postId (oldTags : string list) newTags = backgroundTask {
|
||||
let toDelete, toAdd = diffLists oldTags newTags id
|
||||
let toDelete, toAdd = Utils.diffLists oldTags newTags id
|
||||
if List.isEmpty toDelete && List.isEmpty toAdd then
|
||||
return ()
|
||||
else
|
||||
@@ -149,89 +133,9 @@ type SQLitePostData (conn : SqliteConnection) =
|
||||
|> ignore
|
||||
}
|
||||
|
||||
/// Update an episode
|
||||
let updatePostEpisode (post : Post) = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- "SELECT COUNT(post_id) FROM post_episode WHERE post_id = @postId"
|
||||
cmd.Parameters.AddWithValue ("@postId", PostId.toString post.Id) |> ignore
|
||||
let! count = count cmd
|
||||
if count = 1 then
|
||||
match post.Episode with
|
||||
| Some ep ->
|
||||
cmd.CommandText <- """
|
||||
UPDATE post_episode
|
||||
SET media = @media,
|
||||
length = @length,
|
||||
duration = @duration,
|
||||
media_type = @mediaType,
|
||||
image_url = @imageUrl,
|
||||
subtitle = @subtitle,
|
||||
explicit = @explicit,
|
||||
chapter_file = @chapterFile,
|
||||
chapter_type = @chapterType,
|
||||
transcript_url = @transcriptUrl,
|
||||
transcript_type = @transcriptType,
|
||||
transcript_lang = @transcriptLang,
|
||||
transcript_captions = @transcriptCaptions,
|
||||
season_number = @seasonNumber,
|
||||
season_description = @seasonDescription,
|
||||
episode_number = @episodeNumber,
|
||||
episode_description = @episodeDescription
|
||||
WHERE post_id = @postId"""
|
||||
addEpisodeParameters cmd ep
|
||||
do! write cmd
|
||||
| None ->
|
||||
cmd.CommandText <- "DELETE FROM post_episode WHERE post_id = @postId"
|
||||
do! write cmd
|
||||
else
|
||||
match post.Episode with
|
||||
| Some ep ->
|
||||
cmd.CommandText <- """
|
||||
INSERT INTO post_episode (
|
||||
post_id, media, length, duration, media_type, image_url, subtitle, explicit, chapter_file,
|
||||
chapter_type, transcript_url, transcript_type, transcript_lang, transcript_captions,
|
||||
season_number, season_description, episode_number, episode_description
|
||||
) VALUES (
|
||||
@postId, @media, @length, @duration, @mediaType, @imageUrl, @subtitle, @explicit, @chapterFile,
|
||||
@chapterType, @transcriptUrl, @transcriptType, @transcriptLang, @transcriptCaptions,
|
||||
@seasonNumber, @seasonDescription, @episodeNumber, @episodeDescription
|
||||
)"""
|
||||
addEpisodeParameters cmd ep
|
||||
do! write cmd
|
||||
| None -> ()
|
||||
}
|
||||
|
||||
/// Update a post's metadata items
|
||||
let updatePostMeta postId oldItems newItems = backgroundTask {
|
||||
let toDelete, toAdd = diffMetaItems oldItems newItems
|
||||
if List.isEmpty toDelete && List.isEmpty toAdd then
|
||||
return ()
|
||||
else
|
||||
use cmd = conn.CreateCommand ()
|
||||
[ cmd.Parameters.AddWithValue ("@postId", PostId.toString postId)
|
||||
cmd.Parameters.Add ("@name", SqliteType.Text)
|
||||
cmd.Parameters.Add ("@value", SqliteType.Text)
|
||||
] |> ignore
|
||||
let runCmd (item : MetaItem) = backgroundTask {
|
||||
cmd.Parameters["@name" ].Value <- item.Name
|
||||
cmd.Parameters["@value"].Value <- item.Value
|
||||
do! write cmd
|
||||
}
|
||||
cmd.CommandText <- "DELETE FROM post_meta WHERE post_id = @postId AND name = @name AND value = @value"
|
||||
toDelete
|
||||
|> List.map runCmd
|
||||
|> Task.WhenAll
|
||||
|> ignore
|
||||
cmd.CommandText <- "INSERT INTO post_meta VALUES (@postId, @name, @value)"
|
||||
toAdd
|
||||
|> List.map runCmd
|
||||
|> Task.WhenAll
|
||||
|> ignore
|
||||
}
|
||||
|
||||
/// Update a post's prior permalinks
|
||||
let updatePostPermalinks postId oldLinks newLinks = backgroundTask {
|
||||
let toDelete, toAdd = diffPermalinks oldLinks newLinks
|
||||
let toDelete, toAdd = Utils.diffPermalinks oldLinks newLinks
|
||||
if List.isEmpty toDelete && List.isEmpty toAdd then
|
||||
return ()
|
||||
else
|
||||
@@ -257,7 +161,7 @@ type SQLitePostData (conn : SqliteConnection) =
|
||||
|
||||
/// Update a post's revisions
|
||||
let updatePostRevisions postId oldRevs newRevs = backgroundTask {
|
||||
let toDelete, toAdd = diffRevisions oldRevs newRevs
|
||||
let toDelete, toAdd = Utils.diffRevisions oldRevs newRevs
|
||||
if List.isEmpty toDelete && List.isEmpty toAdd then
|
||||
return ()
|
||||
else
|
||||
@@ -265,7 +169,7 @@ type SQLitePostData (conn : SqliteConnection) =
|
||||
let runCmd withText rev = backgroundTask {
|
||||
cmd.Parameters.Clear ()
|
||||
[ cmd.Parameters.AddWithValue ("@postId", PostId.toString postId)
|
||||
cmd.Parameters.AddWithValue ("@asOf", rev.AsOf)
|
||||
cmd.Parameters.AddWithValue ("@asOf", instantParam rev.AsOf)
|
||||
] |> ignore
|
||||
if withText then cmd.Parameters.AddWithValue ("@text", MarkupText.toString rev.Text) |> ignore
|
||||
do! write cmd
|
||||
@@ -287,18 +191,18 @@ type SQLitePostData (conn : SqliteConnection) =
|
||||
/// Add a post
|
||||
let add post = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
INSERT INTO post (
|
||||
id, web_log_id, author_id, status, title, permalink, published_on, updated_on, template, post_text
|
||||
cmd.CommandText <-
|
||||
"INSERT INTO post (
|
||||
id, web_log_id, author_id, status, title, permalink, published_on, updated_on, template, post_text,
|
||||
episode, meta_items
|
||||
) VALUES (
|
||||
@id, @webLogId, @authorId, @status, @title, @permalink, @publishedOn, @updatedOn, @template, @text
|
||||
)"""
|
||||
@id, @webLogId, @authorId, @status, @title, @permalink, @publishedOn, @updatedOn, @template, @text,
|
||||
@episode, @metaItems
|
||||
)"
|
||||
addPostParameters cmd post
|
||||
do! write cmd
|
||||
do! updatePostCategories post.Id [] post.CategoryIds
|
||||
do! updatePostTags post.Id [] post.Tags
|
||||
do! updatePostEpisode post
|
||||
do! updatePostMeta post.Id [] post.Metadata
|
||||
do! updatePostPermalinks post.Id [] post.PriorPermalinks
|
||||
do! updatePostRevisions post.Id [] post.Revisions
|
||||
}
|
||||
@@ -316,7 +220,7 @@ type SQLitePostData (conn : SqliteConnection) =
|
||||
let findById postId webLogId = backgroundTask {
|
||||
match! findPostById postId webLogId with
|
||||
| Some post ->
|
||||
let! post = appendPostCategoryTagAndMeta post
|
||||
let! post = appendPostCategoryAndTag post
|
||||
return Some post
|
||||
| None -> return None
|
||||
}
|
||||
@@ -329,7 +233,7 @@ type SQLitePostData (conn : SqliteConnection) =
|
||||
cmd.Parameters.AddWithValue ("@link", Permalink.toString permalink) |> ignore
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
if rdr.Read () then
|
||||
let! post = appendPostCategoryTagAndMeta (Map.toPost rdr)
|
||||
let! post = appendPostCategoryAndTag (toPost rdr)
|
||||
return Some post
|
||||
else
|
||||
return None
|
||||
@@ -350,14 +254,13 @@ type SQLitePostData (conn : SqliteConnection) =
|
||||
| Some _ ->
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.Parameters.AddWithValue ("@id", PostId.toString postId) |> ignore
|
||||
cmd.CommandText <- """
|
||||
DELETE FROM post_revision WHERE post_id = @id;
|
||||
cmd.CommandText <-
|
||||
"DELETE FROM post_revision WHERE post_id = @id;
|
||||
DELETE FROM post_permalink WHERE post_id = @id;
|
||||
DELETE FROM post_meta WHERE post_id = @id;
|
||||
DELETE FROM post_episode WHERE post_id = @id;
|
||||
DELETE FROM post_tag WHERE post_id = @id;
|
||||
DELETE FROM post_category WHERE post_id = @id;
|
||||
DELETE FROM post WHERE id = @id"""
|
||||
DELETE FROM post_comment WHERE post_id = @id;
|
||||
DELETE FROM post WHERE id = @id"
|
||||
do! write cmd
|
||||
return true
|
||||
| None -> return false
|
||||
@@ -366,19 +269,15 @@ type SQLitePostData (conn : SqliteConnection) =
|
||||
/// Find the current permalink from a list of potential prior permalinks for the given web log
|
||||
let findCurrentPermalink permalinks webLogId = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
let linkSql, linkParams = inClause "AND pp.permalink" "link" Permalink.toString permalinks
|
||||
cmd.CommandText <- $"
|
||||
SELECT p.permalink
|
||||
FROM post p
|
||||
INNER JOIN post_permalink pp ON pp.post_id = p.id
|
||||
WHERE p.web_log_id = @webLogId
|
||||
AND pp.permalink IN ("""
|
||||
permalinks
|
||||
|> List.iteri (fun idx link ->
|
||||
if idx > 0 then cmd.CommandText <- $"{cmd.CommandText}, "
|
||||
cmd.CommandText <- $"{cmd.CommandText}@link{idx}"
|
||||
cmd.Parameters.AddWithValue ($"@link{idx}", Permalink.toString link) |> ignore)
|
||||
cmd.CommandText <- $"{cmd.CommandText})"
|
||||
{linkSql}"
|
||||
addWebLogId cmd webLogId
|
||||
cmd.Parameters.AddRange linkParams
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
return if rdr.Read () then Some (Map.toPermalink rdr) else None
|
||||
}
|
||||
@@ -390,9 +289,9 @@ type SQLitePostData (conn : SqliteConnection) =
|
||||
addWebLogId cmd webLogId
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
let! posts =
|
||||
toList Map.toPost rdr
|
||||
toList toPost rdr
|
||||
|> List.map (fun post -> backgroundTask {
|
||||
let! post = appendPostCategoryTagAndMeta post
|
||||
let! post = appendPostCategoryAndTag post
|
||||
return! appendPostRevisionsAndPermalinks post
|
||||
})
|
||||
|> Task.WhenAll
|
||||
@@ -402,27 +301,22 @@ type SQLitePostData (conn : SqliteConnection) =
|
||||
/// Get a page of categorized posts for the given web log (excludes revisions and prior permalinks)
|
||||
let findPageOfCategorizedPosts webLogId categoryIds pageNbr postsPerPage = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- $"""
|
||||
let catSql, catParams = inClause "AND pc.category_id" "catId" CategoryId.toString categoryIds
|
||||
cmd.CommandText <- $"
|
||||
{selectPost}
|
||||
INNER JOIN post_category pc ON pc.post_id = p.id
|
||||
WHERE p.web_log_id = @webLogId
|
||||
AND p.status = @status
|
||||
AND pc.category_id IN ("""
|
||||
categoryIds
|
||||
|> List.iteri (fun idx catId ->
|
||||
if idx > 0 then cmd.CommandText <- $"{cmd.CommandText}, "
|
||||
cmd.CommandText <- $"{cmd.CommandText}@catId{idx}"
|
||||
cmd.Parameters.AddWithValue ($"@catId{idx}", CategoryId.toString catId) |> ignore)
|
||||
cmd.CommandText <-
|
||||
$"""{cmd.CommandText})
|
||||
{catSql}
|
||||
ORDER BY published_on DESC
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"""
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
addWebLogId cmd webLogId
|
||||
cmd.Parameters.AddWithValue ("@status", PostStatus.toString Published) |> ignore
|
||||
cmd.Parameters.AddRange catParams
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
let! posts =
|
||||
toList Map.toPost rdr
|
||||
|> List.map (fun post -> backgroundTask { return! appendPostCategoryTagAndMeta post })
|
||||
toList toPost rdr
|
||||
|> List.map (fun post -> backgroundTask { return! appendPostCategoryAndTag post })
|
||||
|> Task.WhenAll
|
||||
return List.ofArray posts
|
||||
}
|
||||
@@ -430,16 +324,16 @@ type SQLitePostData (conn : SqliteConnection) =
|
||||
/// Get a page of posts for the given web log (excludes text, revisions, and prior permalinks)
|
||||
let findPageOfPosts webLogId pageNbr postsPerPage = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- $"""
|
||||
cmd.CommandText <- $"
|
||||
{selectPost}
|
||||
WHERE p.web_log_id = @webLogId
|
||||
ORDER BY p.published_on DESC NULLS FIRST, p.updated_on
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"""
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
addWebLogId cmd webLogId
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
let! posts =
|
||||
toList postWithoutText rdr
|
||||
|> List.map (fun post -> backgroundTask { return! appendPostCategoryTagAndMeta post })
|
||||
|> List.map (fun post -> backgroundTask { return! appendPostCategoryAndTag post })
|
||||
|> Task.WhenAll
|
||||
return List.ofArray posts
|
||||
}
|
||||
@@ -447,18 +341,18 @@ type SQLitePostData (conn : SqliteConnection) =
|
||||
/// Get a page of published posts for the given web log (excludes revisions and prior permalinks)
|
||||
let findPageOfPublishedPosts webLogId pageNbr postsPerPage = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- $"""
|
||||
cmd.CommandText <- $"
|
||||
{selectPost}
|
||||
WHERE p.web_log_id = @webLogId
|
||||
AND p.status = @status
|
||||
ORDER BY p.published_on DESC
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"""
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
addWebLogId cmd webLogId
|
||||
cmd.Parameters.AddWithValue ("@status", PostStatus.toString Published) |> ignore
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
let! posts =
|
||||
toList Map.toPost rdr
|
||||
|> List.map (fun post -> backgroundTask { return! appendPostCategoryTagAndMeta post })
|
||||
toList toPost rdr
|
||||
|> List.map (fun post -> backgroundTask { return! appendPostCategoryAndTag post })
|
||||
|> Task.WhenAll
|
||||
return List.ofArray posts
|
||||
}
|
||||
@@ -466,60 +360,60 @@ type SQLitePostData (conn : SqliteConnection) =
|
||||
/// Get a page of tagged posts for the given web log (excludes revisions and prior permalinks)
|
||||
let findPageOfTaggedPosts webLogId (tag : string) pageNbr postsPerPage = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- $"""
|
||||
cmd.CommandText <- $"
|
||||
{selectPost}
|
||||
INNER JOIN post_tag pt ON pt.post_id = p.id
|
||||
WHERE p.web_log_id = @webLogId
|
||||
AND p.status = @status
|
||||
AND pt.tag = @tag
|
||||
ORDER BY p.published_on DESC
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"""
|
||||
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|
||||
addWebLogId cmd webLogId
|
||||
[ cmd.Parameters.AddWithValue ("@status", PostStatus.toString Published)
|
||||
cmd.Parameters.AddWithValue ("@tag", tag)
|
||||
] |> ignore
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
let! posts =
|
||||
toList Map.toPost rdr
|
||||
|> List.map (fun post -> backgroundTask { return! appendPostCategoryTagAndMeta post })
|
||||
toList toPost rdr
|
||||
|> List.map (fun post -> backgroundTask { return! appendPostCategoryAndTag post })
|
||||
|> Task.WhenAll
|
||||
return List.ofArray posts
|
||||
}
|
||||
|
||||
/// Find the next newest and oldest post from a publish date for the given web log
|
||||
let findSurroundingPosts webLogId (publishedOn : DateTime) = backgroundTask {
|
||||
let findSurroundingPosts webLogId (publishedOn : Instant) = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- $"""
|
||||
cmd.CommandText <- $"
|
||||
{selectPost}
|
||||
WHERE p.web_log_id = @webLogId
|
||||
AND p.status = @status
|
||||
AND p.published_on < @publishedOn
|
||||
ORDER BY p.published_on DESC
|
||||
LIMIT 1"""
|
||||
LIMIT 1"
|
||||
addWebLogId cmd webLogId
|
||||
[ cmd.Parameters.AddWithValue ("@status", PostStatus.toString Published)
|
||||
cmd.Parameters.AddWithValue ("@publishedOn", publishedOn)
|
||||
cmd.Parameters.AddWithValue ("@publishedOn", instantParam publishedOn)
|
||||
] |> ignore
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
let! older = backgroundTask {
|
||||
if rdr.Read () then
|
||||
let! post = appendPostCategoryTagAndMeta (postWithoutText rdr)
|
||||
let! post = appendPostCategoryAndTag (postWithoutText rdr)
|
||||
return Some post
|
||||
else
|
||||
return None
|
||||
}
|
||||
do! rdr.CloseAsync ()
|
||||
cmd.CommandText <- $"""
|
||||
cmd.CommandText <- $"
|
||||
{selectPost}
|
||||
WHERE p.web_log_id = @webLogId
|
||||
AND p.status = @status
|
||||
AND p.published_on > @publishedOn
|
||||
ORDER BY p.published_on
|
||||
LIMIT 1"""
|
||||
LIMIT 1"
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
let! newer = backgroundTask {
|
||||
if rdr.Read () then
|
||||
let! post = appendPostCategoryTagAndMeta (postWithoutText rdr)
|
||||
let! post = appendPostCategoryAndTag (postWithoutText rdr)
|
||||
return Some post
|
||||
else
|
||||
return None
|
||||
@@ -538,8 +432,8 @@ type SQLitePostData (conn : SqliteConnection) =
|
||||
match! findFullById post.Id post.WebLogId with
|
||||
| Some oldPost ->
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
UPDATE post
|
||||
cmd.CommandText <-
|
||||
"UPDATE post
|
||||
SET author_id = @authorId,
|
||||
status = @status,
|
||||
title = @title,
|
||||
@@ -547,15 +441,15 @@ type SQLitePostData (conn : SqliteConnection) =
|
||||
published_on = @publishedOn,
|
||||
updated_on = @updatedOn,
|
||||
template = @template,
|
||||
post_text = @text
|
||||
post_text = @text,
|
||||
episode = @episode,
|
||||
meta_items = @metaItems
|
||||
WHERE id = @id
|
||||
AND web_log_id = @webLogId"""
|
||||
AND web_log_id = @webLogId"
|
||||
addPostParameters cmd post
|
||||
do! write cmd
|
||||
do! updatePostCategories post.Id oldPost.CategoryIds post.CategoryIds
|
||||
do! updatePostTags post.Id oldPost.Tags post.Tags
|
||||
do! updatePostEpisode post
|
||||
do! updatePostMeta post.Id oldPost.Metadata post.Metadata
|
||||
do! updatePostPermalinks post.Id oldPost.PriorPermalinks post.PriorPermalinks
|
||||
do! updatePostRevisions post.Id oldPost.Revisions post.Revisions
|
||||
| None -> return ()
|
||||
|
||||
@@ -50,18 +50,14 @@ type SQLiteTagMapData (conn : SqliteConnection) =
|
||||
/// Find any tag mappings in a list of tags for the given web log
|
||||
let findMappingForTags (tags : string list) webLogId = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
let mapSql, mapParams = inClause "AND tag" "tag" id tags
|
||||
cmd.CommandText <- $"
|
||||
SELECT *
|
||||
FROM tag_map
|
||||
WHERE web_log_id = @webLogId
|
||||
AND tag IN ("""
|
||||
tags
|
||||
|> List.iteri (fun idx tag ->
|
||||
if idx > 0 then cmd.CommandText <- $"{cmd.CommandText}, "
|
||||
cmd.CommandText <- $"{cmd.CommandText}@tag{idx}"
|
||||
cmd.Parameters.AddWithValue ($"@tag{idx}", tag) |> ignore)
|
||||
cmd.CommandText <- $"{cmd.CommandText})"
|
||||
{mapSql}"
|
||||
addWebLogId cmd webLogId
|
||||
cmd.Parameters.AddRange mapParams
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
return toList Map.toTagMap rdr
|
||||
}
|
||||
@@ -71,19 +67,19 @@ type SQLiteTagMapData (conn : SqliteConnection) =
|
||||
use cmd = conn.CreateCommand ()
|
||||
match! findById tagMap.Id tagMap.WebLogId with
|
||||
| Some _ ->
|
||||
cmd.CommandText <- """
|
||||
UPDATE tag_map
|
||||
cmd.CommandText <-
|
||||
"UPDATE tag_map
|
||||
SET tag = @tag,
|
||||
url_value = @urlValue
|
||||
WHERE id = @id
|
||||
AND web_log_id = @webLogId"""
|
||||
AND web_log_id = @webLogId"
|
||||
| None ->
|
||||
cmd.CommandText <- """
|
||||
INSERT INTO tag_map (
|
||||
cmd.CommandText <-
|
||||
"INSERT INTO tag_map (
|
||||
id, web_log_id, tag, url_value
|
||||
) VALUES (
|
||||
@id, @webLogId, @tag, @urlValue
|
||||
)"""
|
||||
)"
|
||||
addWebLogId cmd tagMap.WebLogId
|
||||
[ cmd.Parameters.AddWithValue ("@id", TagMapId.toString tagMap.Id)
|
||||
cmd.Parameters.AddWithValue ("@tag", tagMap.Tag)
|
||||
|
||||
@@ -17,13 +17,13 @@ type SQLiteThemeData (conn : SqliteConnection) =
|
||||
do! rdr.CloseAsync ()
|
||||
cmd.CommandText <- "SELECT name, theme_id FROM theme_template WHERE theme_id <> 'admin' ORDER BY name"
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
let mutable templates = []
|
||||
while rdr.Read () do
|
||||
templates <- (ThemeId (Map.getString "theme_id" rdr), Map.toThemeTemplate false rdr) :: templates
|
||||
let templates =
|
||||
seq { while rdr.Read () do ThemeId (Map.getString "theme_id" rdr), Map.toThemeTemplate false rdr }
|
||||
|> List.ofSeq
|
||||
return
|
||||
themes
|
||||
|> List.map (fun t ->
|
||||
{ t with Templates = templates |> List.filter (fun tt -> fst tt = t.Id) |> List.map snd })
|
||||
{ t with Templates = templates |> List.filter (fun (themeId, _) -> themeId = t.Id) |> List.map snd })
|
||||
}
|
||||
|
||||
/// Does a given theme exist?
|
||||
@@ -67,10 +67,10 @@ type SQLiteThemeData (conn : SqliteConnection) =
|
||||
match! findByIdWithoutText themeId with
|
||||
| Some _ ->
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
DELETE FROM theme_asset WHERE theme_id = @id;
|
||||
cmd.CommandText <-
|
||||
"DELETE FROM theme_asset WHERE theme_id = @id;
|
||||
DELETE FROM theme_template WHERE theme_id = @id;
|
||||
DELETE FROM theme WHERE id = @id"""
|
||||
DELETE FROM theme WHERE id = @id"
|
||||
cmd.Parameters.AddWithValue ("@id", ThemeId.toString themeId) |> ignore
|
||||
do! write cmd
|
||||
return true
|
||||
@@ -92,7 +92,7 @@ type SQLiteThemeData (conn : SqliteConnection) =
|
||||
do! write cmd
|
||||
|
||||
let toDelete, toAdd =
|
||||
diffLists (oldTheme |> Option.map (fun t -> t.Templates) |> Option.defaultValue [])
|
||||
Utils.diffLists (oldTheme |> Option.map (fun t -> t.Templates) |> Option.defaultValue [])
|
||||
theme.Templates (fun t -> t.Name)
|
||||
let toUpdate =
|
||||
theme.Templates
|
||||
@@ -208,20 +208,20 @@ type SQLiteThemeAssetData (conn : SqliteConnection) =
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <-
|
||||
if exists = 1 then
|
||||
"""UPDATE theme_asset
|
||||
"UPDATE theme_asset
|
||||
SET updated_on = @updatedOn,
|
||||
data = ZEROBLOB(@dataLength)
|
||||
WHERE theme_id = @themeId
|
||||
AND path = @path"""
|
||||
AND path = @path"
|
||||
else
|
||||
"""INSERT INTO theme_asset (
|
||||
"INSERT INTO theme_asset (
|
||||
theme_id, path, updated_on, data
|
||||
) VALUES (
|
||||
@themeId, @path, @updatedOn, ZEROBLOB(@dataLength)
|
||||
)"""
|
||||
)"
|
||||
[ cmd.Parameters.AddWithValue ("@themeId", themeId)
|
||||
cmd.Parameters.AddWithValue ("@path", path)
|
||||
cmd.Parameters.AddWithValue ("@updatedOn", asset.UpdatedOn)
|
||||
cmd.Parameters.AddWithValue ("@updatedOn", instantParam asset.UpdatedOn)
|
||||
cmd.Parameters.AddWithValue ("@dataLength", asset.Data.Length)
|
||||
] |> ignore
|
||||
do! write cmd
|
||||
|
||||
@@ -13,19 +13,19 @@ type SQLiteUploadData (conn : SqliteConnection) =
|
||||
[ cmd.Parameters.AddWithValue ("@id", UploadId.toString upload.Id)
|
||||
cmd.Parameters.AddWithValue ("@webLogId", WebLogId.toString upload.WebLogId)
|
||||
cmd.Parameters.AddWithValue ("@path", Permalink.toString upload.Path)
|
||||
cmd.Parameters.AddWithValue ("@updatedOn", upload.UpdatedOn)
|
||||
cmd.Parameters.AddWithValue ("@updatedOn", instantParam upload.UpdatedOn)
|
||||
cmd.Parameters.AddWithValue ("@dataLength", upload.Data.Length)
|
||||
] |> ignore
|
||||
|
||||
/// Save an uploaded file
|
||||
let add upload = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
INSERT INTO upload (
|
||||
cmd.CommandText <-
|
||||
"INSERT INTO upload (
|
||||
id, web_log_id, path, updated_on, data
|
||||
) VALUES (
|
||||
@id, @webLogId, @path, @updatedOn, ZEROBLOB(@dataLength)
|
||||
)"""
|
||||
)"
|
||||
addUploadParameters cmd upload
|
||||
do! write cmd
|
||||
|
||||
@@ -40,11 +40,11 @@ type SQLiteUploadData (conn : SqliteConnection) =
|
||||
/// Delete an uploaded file by its ID
|
||||
let delete uploadId webLogId = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
SELECT id, web_log_id, path, updated_on
|
||||
cmd.CommandText <-
|
||||
"SELECT id, web_log_id, path, updated_on
|
||||
FROM upload
|
||||
WHERE id = @id
|
||||
AND web_log_id = @webLogId"""
|
||||
AND web_log_id = @webLogId"
|
||||
addWebLogId cmd webLogId
|
||||
cmd.Parameters.AddWithValue ("@id", UploadId.toString uploadId) |> ignore
|
||||
let! rdr = cmd.ExecuteReaderAsync ()
|
||||
|
||||
@@ -4,12 +4,13 @@ open System.Threading.Tasks
|
||||
open Microsoft.Data.Sqlite
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
open Newtonsoft.Json
|
||||
|
||||
// The web log podcast insert loop is not statically compilable; this is OK
|
||||
#nowarn "3511"
|
||||
|
||||
/// SQLite myWebLog web log data implementation
|
||||
type SQLiteWebLogData (conn : SqliteConnection) =
|
||||
type SQLiteWebLogData (conn : SqliteConnection, ser : JsonSerializer) =
|
||||
|
||||
// SUPPORT FUNCTIONS
|
||||
|
||||
@@ -45,40 +46,22 @@ type SQLiteWebLogData (conn : SqliteConnection) =
|
||||
cmd.Parameters.AddWithValue ("@webLogId", WebLogId.toString webLogId)
|
||||
cmd.Parameters.AddWithValue ("@source", CustomFeedSource.toString feed.Source)
|
||||
cmd.Parameters.AddWithValue ("@path", Permalink.toString feed.Path)
|
||||
cmd.Parameters.AddWithValue ("@podcast", maybe (if Option.isSome feed.Podcast then
|
||||
Some (Utils.serialize ser feed.Podcast)
|
||||
else None))
|
||||
] |> ignore
|
||||
|
||||
/// Add parameters for podcast INSERT or UPDATE statements
|
||||
let addPodcastParameters (cmd : SqliteCommand) feedId (podcast : PodcastOptions) =
|
||||
[ cmd.Parameters.AddWithValue ("@feedId", CustomFeedId.toString feedId)
|
||||
cmd.Parameters.AddWithValue ("@title", podcast.Title)
|
||||
cmd.Parameters.AddWithValue ("@subtitle", maybe podcast.Subtitle)
|
||||
cmd.Parameters.AddWithValue ("@itemsInFeed", podcast.ItemsInFeed)
|
||||
cmd.Parameters.AddWithValue ("@summary", podcast.Summary)
|
||||
cmd.Parameters.AddWithValue ("@displayedAuthor", podcast.DisplayedAuthor)
|
||||
cmd.Parameters.AddWithValue ("@email", podcast.Email)
|
||||
cmd.Parameters.AddWithValue ("@imageUrl", Permalink.toString podcast.ImageUrl)
|
||||
cmd.Parameters.AddWithValue ("@appleCategory", podcast.AppleCategory)
|
||||
cmd.Parameters.AddWithValue ("@appleSubcategory", maybe podcast.AppleSubcategory)
|
||||
cmd.Parameters.AddWithValue ("@explicit", ExplicitRating.toString podcast.Explicit)
|
||||
cmd.Parameters.AddWithValue ("@defaultMediaType", maybe podcast.DefaultMediaType)
|
||||
cmd.Parameters.AddWithValue ("@mediaBaseUrl", maybe podcast.MediaBaseUrl)
|
||||
cmd.Parameters.AddWithValue ("@podcastGuid", maybe podcast.PodcastGuid)
|
||||
cmd.Parameters.AddWithValue ("@fundingUrl", maybe podcast.FundingUrl)
|
||||
cmd.Parameters.AddWithValue ("@fundingText", maybe podcast.FundingText)
|
||||
cmd.Parameters.AddWithValue ("@medium", maybe (podcast.Medium |> Option.map PodcastMedium.toString))
|
||||
] |> ignore
|
||||
/// Shorthand to map a data reader to a custom feed
|
||||
let toCustomFeed =
|
||||
Map.toCustomFeed ser
|
||||
|
||||
/// Get the current custom feeds for a web log
|
||||
let getCustomFeeds (webLog : WebLog) = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
SELECT f.*, p.*
|
||||
FROM web_log_feed f
|
||||
LEFT JOIN web_log_feed_podcast p ON p.feed_id = f.id
|
||||
WHERE f.web_log_id = @webLogId"""
|
||||
cmd.CommandText <- "SELECT * FROM web_log_feed WHERE web_log_id = @webLogId"
|
||||
addWebLogId cmd webLog.Id
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
return toList Map.toCustomFeed rdr
|
||||
return toList toCustomFeed rdr
|
||||
}
|
||||
|
||||
/// Append custom feeds to a web log
|
||||
@@ -87,27 +70,10 @@ type SQLiteWebLogData (conn : SqliteConnection) =
|
||||
return { webLog with Rss = { webLog.Rss with CustomFeeds = feeds } }
|
||||
}
|
||||
|
||||
/// Add a podcast to a custom feed
|
||||
let addPodcast feedId (podcast : PodcastOptions) = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
INSERT INTO web_log_feed_podcast (
|
||||
feed_id, title, subtitle, items_in_feed, summary, displayed_author, email, image_url,
|
||||
apple_category, apple_subcategory, explicit, default_media_type, media_base_url, podcast_guid,
|
||||
funding_url, funding_text, medium
|
||||
) VALUES (
|
||||
@feedId, @title, @subtitle, @itemsInFeed, @summary, @displayedAuthor, @email, @imageUrl,
|
||||
@appleCategory, @appleSubcategory, @explicit, @defaultMediaType, @mediaBaseUrl, @podcastGuid,
|
||||
@fundingUrl, @fundingText, @medium
|
||||
)"""
|
||||
addPodcastParameters cmd feedId podcast
|
||||
do! write cmd
|
||||
}
|
||||
|
||||
/// Update the custom feeds for a web log
|
||||
let updateCustomFeeds (webLog : WebLog) = backgroundTask {
|
||||
let! feeds = getCustomFeeds webLog
|
||||
let toDelete, toAdd = diffLists feeds webLog.Rss.CustomFeeds (fun it -> $"{CustomFeedId.toString it.Id}")
|
||||
let toDelete, toAdd = Utils.diffLists feeds webLog.Rss.CustomFeeds (fun it -> $"{CustomFeedId.toString it.Id}")
|
||||
let toId (feed : CustomFeed) = feed.Id
|
||||
let toUpdate =
|
||||
webLog.Rss.CustomFeeds
|
||||
@@ -117,9 +83,7 @@ type SQLiteWebLogData (conn : SqliteConnection) =
|
||||
cmd.Parameters.Add ("@id", SqliteType.Text) |> ignore
|
||||
toDelete
|
||||
|> List.map (fun it -> backgroundTask {
|
||||
cmd.CommandText <- """
|
||||
DELETE FROM web_log_feed_podcast WHERE feed_id = @id;
|
||||
DELETE FROM web_log_feed WHERE id = @id"""
|
||||
cmd.CommandText <- "DELETE FROM web_log_feed WHERE id = @id"
|
||||
cmd.Parameters["@id"].Value <- CustomFeedId.toString it.Id
|
||||
do! write cmd
|
||||
})
|
||||
@@ -128,68 +92,30 @@ type SQLiteWebLogData (conn : SqliteConnection) =
|
||||
cmd.Parameters.Clear ()
|
||||
toAdd
|
||||
|> List.map (fun it -> backgroundTask {
|
||||
cmd.CommandText <- """
|
||||
INSERT INTO web_log_feed (
|
||||
id, web_log_id, source, path
|
||||
cmd.CommandText <-
|
||||
"INSERT INTO web_log_feed (
|
||||
id, web_log_id, source, path, podcast
|
||||
) VALUES (
|
||||
@id, @webLogId, @source, @path
|
||||
)"""
|
||||
@id, @webLogId, @source, @path, @podcast
|
||||
)"
|
||||
cmd.Parameters.Clear ()
|
||||
addCustomFeedParameters cmd webLog.Id it
|
||||
do! write cmd
|
||||
match it.Podcast with
|
||||
| Some podcast -> do! addPodcast it.Id podcast
|
||||
| None -> ()
|
||||
})
|
||||
|> Task.WhenAll
|
||||
|> ignore
|
||||
toUpdate
|
||||
|> List.map (fun it -> backgroundTask {
|
||||
cmd.CommandText <- """
|
||||
UPDATE web_log_feed
|
||||
cmd.CommandText <-
|
||||
"UPDATE web_log_feed
|
||||
SET source = @source,
|
||||
path = @path
|
||||
path = @path,
|
||||
podcast = @podcast
|
||||
WHERE id = @id
|
||||
AND web_log_id = @webLogId"""
|
||||
AND web_log_id = @webLogId"
|
||||
cmd.Parameters.Clear ()
|
||||
addCustomFeedParameters cmd webLog.Id it
|
||||
do! write cmd
|
||||
let hadPodcast = Option.isSome (feeds |> List.find (fun f -> f.Id = it.Id)).Podcast
|
||||
match it.Podcast with
|
||||
| Some podcast ->
|
||||
if hadPodcast then
|
||||
cmd.CommandText <- """
|
||||
UPDATE web_log_feed_podcast
|
||||
SET title = @title,
|
||||
subtitle = @subtitle,
|
||||
items_in_feed = @itemsInFeed,
|
||||
summary = @summary,
|
||||
displayed_author = @displayedAuthor,
|
||||
email = @email,
|
||||
image_url = @imageUrl,
|
||||
apple_category = @appleCategory,
|
||||
apple_subcategory = @appleSubcategory,
|
||||
explicit = @explicit,
|
||||
default_media_type = @defaultMediaType,
|
||||
media_base_url = @mediaBaseUrl,
|
||||
podcast_guid = @podcastGuid,
|
||||
funding_url = @fundingUrl,
|
||||
funding_text = @fundingText,
|
||||
medium = @medium
|
||||
WHERE feed_id = @feedId"""
|
||||
cmd.Parameters.Clear ()
|
||||
addPodcastParameters cmd it.Id podcast
|
||||
do! write cmd
|
||||
else
|
||||
do! addPodcast it.Id podcast
|
||||
| None ->
|
||||
if hadPodcast then
|
||||
cmd.CommandText <- "DELETE FROM web_log_feed_podcast WHERE feed_id = @id"
|
||||
cmd.Parameters.Clear ()
|
||||
cmd.Parameters.AddWithValue ("@id", CustomFeedId.toString it.Id) |> ignore
|
||||
do! write cmd
|
||||
else
|
||||
()
|
||||
})
|
||||
|> Task.WhenAll
|
||||
|> ignore
|
||||
@@ -200,14 +126,14 @@ type SQLiteWebLogData (conn : SqliteConnection) =
|
||||
/// Add a web log
|
||||
let add webLog = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
INSERT INTO web_log (
|
||||
cmd.CommandText <-
|
||||
"INSERT INTO web_log (
|
||||
id, name, slug, subtitle, default_page, posts_per_page, theme_id, url_base, time_zone, auto_htmx,
|
||||
uploads, is_feed_enabled, feed_name, items_in_feed, is_category_enabled, is_tag_enabled, copyright
|
||||
) VALUES (
|
||||
@id, @name, @slug, @subtitle, @defaultPage, @postsPerPage, @themeId, @urlBase, @timeZone, @autoHtmx,
|
||||
@uploads, @isFeedEnabled, @feedName, @itemsInFeed, @isCategoryEnabled, @isTagEnabled, @copyright
|
||||
)"""
|
||||
)"
|
||||
addWebLogParameters cmd webLog
|
||||
do! write cmd
|
||||
do! updateCustomFeeds webLog
|
||||
@@ -232,26 +158,22 @@ type SQLiteWebLogData (conn : SqliteConnection) =
|
||||
let subQuery table = $"(SELECT id FROM {table} WHERE web_log_id = @webLogId)"
|
||||
let postSubQuery = subQuery "post"
|
||||
let pageSubQuery = subQuery "page"
|
||||
cmd.CommandText <- $"""
|
||||
cmd.CommandText <- $"
|
||||
DELETE FROM post_comment WHERE post_id IN {postSubQuery};
|
||||
DELETE FROM post_revision WHERE post_id IN {postSubQuery};
|
||||
DELETE FROM post_permalink WHERE post_id IN {postSubQuery};
|
||||
DELETE FROM post_episode WHERE post_id IN {postSubQuery};
|
||||
DELETE FROM post_tag WHERE post_id IN {postSubQuery};
|
||||
DELETE FROM post_category WHERE post_id IN {postSubQuery};
|
||||
DELETE FROM post_meta WHERE post_id IN {postSubQuery};
|
||||
DELETE FROM post WHERE web_log_id = @webLogId;
|
||||
DELETE FROM page_revision WHERE page_id IN {pageSubQuery};
|
||||
DELETE FROM page_permalink WHERE page_id IN {pageSubQuery};
|
||||
DELETE FROM page_meta WHERE page_id IN {pageSubQuery};
|
||||
DELETE FROM page WHERE web_log_id = @webLogId;
|
||||
DELETE FROM category WHERE web_log_id = @webLogId;
|
||||
DELETE FROM tag_map WHERE web_log_id = @webLogId;
|
||||
DELETE FROM upload WHERE web_log_id = @webLogId;
|
||||
DELETE FROM web_log_user WHERE web_log_id = @webLogId;
|
||||
DELETE FROM web_log_feed_podcast WHERE feed_id IN {subQuery "web_log_feed"};
|
||||
DELETE FROM web_log_feed WHERE web_log_id = @webLogId;
|
||||
DELETE FROM web_log WHERE id = @webLogId"""
|
||||
DELETE FROM web_log WHERE id = @webLogId"
|
||||
do! write cmd
|
||||
}
|
||||
|
||||
@@ -284,8 +206,8 @@ type SQLiteWebLogData (conn : SqliteConnection) =
|
||||
/// Update settings for a web log
|
||||
let updateSettings webLog = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
UPDATE web_log
|
||||
cmd.CommandText <-
|
||||
"UPDATE web_log
|
||||
SET name = @name,
|
||||
slug = @slug,
|
||||
subtitle = @subtitle,
|
||||
@@ -302,7 +224,7 @@ type SQLiteWebLogData (conn : SqliteConnection) =
|
||||
is_category_enabled = @isCategoryEnabled,
|
||||
is_tag_enabled = @isTagEnabled,
|
||||
copyright = @copyright
|
||||
WHERE id = @id"""
|
||||
WHERE id = @id"
|
||||
addWebLogParameters cmd webLog
|
||||
do! write cmd
|
||||
}
|
||||
@@ -310,15 +232,15 @@ type SQLiteWebLogData (conn : SqliteConnection) =
|
||||
/// Update RSS options for a web log
|
||||
let updateRssOptions webLog = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
UPDATE web_log
|
||||
cmd.CommandText <-
|
||||
"UPDATE web_log
|
||||
SET is_feed_enabled = @isFeedEnabled,
|
||||
feed_name = @feedName,
|
||||
items_in_feed = @itemsInFeed,
|
||||
is_category_enabled = @isCategoryEnabled,
|
||||
is_tag_enabled = @isTagEnabled,
|
||||
copyright = @copyright
|
||||
WHERE id = @id"""
|
||||
WHERE id = @id"
|
||||
addWebLogRssParameters cmd webLog
|
||||
cmd.Parameters.AddWithValue ("@id", WebLogId.toString webLog.Id) |> ignore
|
||||
do! write cmd
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
namespace MyWebLog.Data.SQLite
|
||||
|
||||
open System
|
||||
open Microsoft.Data.Sqlite
|
||||
open MyWebLog
|
||||
open MyWebLog.Data
|
||||
@@ -19,11 +18,10 @@ type SQLiteWebLogUserData (conn : SqliteConnection) =
|
||||
cmd.Parameters.AddWithValue ("@lastName", user.LastName)
|
||||
cmd.Parameters.AddWithValue ("@preferredName", user.PreferredName)
|
||||
cmd.Parameters.AddWithValue ("@passwordHash", user.PasswordHash)
|
||||
cmd.Parameters.AddWithValue ("@salt", user.Salt)
|
||||
cmd.Parameters.AddWithValue ("@url", maybe user.Url)
|
||||
cmd.Parameters.AddWithValue ("@accessLevel", AccessLevel.toString user.AccessLevel)
|
||||
cmd.Parameters.AddWithValue ("@createdOn", user.CreatedOn)
|
||||
cmd.Parameters.AddWithValue ("@lastSeenOn", maybe user.LastSeenOn)
|
||||
cmd.Parameters.AddWithValue ("@createdOn", instantParam user.CreatedOn)
|
||||
cmd.Parameters.AddWithValue ("@lastSeenOn", maybeInstant user.LastSeenOn)
|
||||
] |> ignore
|
||||
|
||||
// IMPLEMENTATION FUNCTIONS
|
||||
@@ -31,14 +29,14 @@ type SQLiteWebLogUserData (conn : SqliteConnection) =
|
||||
/// Add a user
|
||||
let add user = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
INSERT INTO web_log_user (
|
||||
id, web_log_id, email, first_name, last_name, preferred_name, password_hash, salt, url, access_level,
|
||||
cmd.CommandText <-
|
||||
"INSERT INTO web_log_user (
|
||||
id, web_log_id, email, first_name, last_name, preferred_name, password_hash, url, access_level,
|
||||
created_on, last_seen_on
|
||||
) VALUES (
|
||||
@id, @webLogId, @email, @firstName, @lastName, @preferredName, @passwordHash, @salt, @url, @accessLevel,
|
||||
@id, @webLogId, @email, @firstName, @lastName, @preferredName, @passwordHash, @url, @accessLevel,
|
||||
@createdOn, @lastSeenOn
|
||||
)"""
|
||||
)"
|
||||
addWebLogUserParameters cmd user
|
||||
do! write cmd
|
||||
}
|
||||
@@ -93,14 +91,10 @@ type SQLiteWebLogUserData (conn : SqliteConnection) =
|
||||
/// Find the names of users by their IDs for the given web log
|
||||
let findNames webLogId userIds = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- "SELECT * FROM web_log_user WHERE web_log_id = @webLogId AND id IN ("
|
||||
userIds
|
||||
|> List.iteri (fun idx userId ->
|
||||
if idx > 0 then cmd.CommandText <- $"{cmd.CommandText}, "
|
||||
cmd.CommandText <- $"{cmd.CommandText}@id{idx}"
|
||||
cmd.Parameters.AddWithValue ($"@id{idx}", WebLogUserId.toString userId) |> ignore)
|
||||
cmd.CommandText <- $"{cmd.CommandText})"
|
||||
let nameSql, nameParams = inClause "AND id" "id" WebLogUserId.toString userIds
|
||||
cmd.CommandText <- $"SELECT * FROM web_log_user WHERE web_log_id = @webLogId {nameSql}"
|
||||
addWebLogId cmd webLogId
|
||||
cmd.Parameters.AddRange nameParams
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
return
|
||||
toList Map.toWebLogUser rdr
|
||||
@@ -116,14 +110,14 @@ type SQLiteWebLogUserData (conn : SqliteConnection) =
|
||||
/// Set a user's last seen date/time to now
|
||||
let setLastSeen userId webLogId = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
UPDATE web_log_user
|
||||
cmd.CommandText <-
|
||||
"UPDATE web_log_user
|
||||
SET last_seen_on = @lastSeenOn
|
||||
WHERE id = @id
|
||||
AND web_log_id = @webLogId"""
|
||||
AND web_log_id = @webLogId"
|
||||
addWebLogId cmd webLogId
|
||||
[ cmd.Parameters.AddWithValue ("@id", WebLogUserId.toString userId)
|
||||
cmd.Parameters.AddWithValue ("@lastSeenOn", DateTime.UtcNow)
|
||||
cmd.Parameters.AddWithValue ("@lastSeenOn", instantParam (Noda.now ()))
|
||||
] |> ignore
|
||||
let! _ = cmd.ExecuteNonQueryAsync ()
|
||||
()
|
||||
@@ -132,20 +126,19 @@ type SQLiteWebLogUserData (conn : SqliteConnection) =
|
||||
/// Update a user
|
||||
let update user = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- """
|
||||
UPDATE web_log_user
|
||||
cmd.CommandText <-
|
||||
"UPDATE web_log_user
|
||||
SET email = @email,
|
||||
first_name = @firstName,
|
||||
last_name = @lastName,
|
||||
preferred_name = @preferredName,
|
||||
password_hash = @passwordHash,
|
||||
salt = @salt,
|
||||
url = @url,
|
||||
access_level = @accessLevel,
|
||||
created_on = @createdOn,
|
||||
last_seen_on = @lastSeenOn
|
||||
WHERE id = @id
|
||||
AND web_log_id = @webLogId"""
|
||||
AND web_log_id = @webLogId"
|
||||
addWebLogUserParameters cmd user
|
||||
do! write cmd
|
||||
}
|
||||
|
||||
@@ -2,90 +2,53 @@ namespace MyWebLog.Data
|
||||
|
||||
open Microsoft.Data.Sqlite
|
||||
open Microsoft.Extensions.Logging
|
||||
open MyWebLog
|
||||
open MyWebLog.Data.SQLite
|
||||
open Newtonsoft.Json
|
||||
open NodaTime
|
||||
|
||||
/// SQLite myWebLog data implementation
|
||||
type SQLiteData (conn : SqliteConnection, log : ILogger<SQLiteData>) =
|
||||
type SQLiteData (conn : SqliteConnection, log : ILogger<SQLiteData>, ser : JsonSerializer) =
|
||||
|
||||
let ensureTables () = backgroundTask {
|
||||
|
||||
/// Determine if the given table exists
|
||||
let tableExists (table : string) = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- "SELECT COUNT(*) FROM sqlite_master WHERE type = 'table' AND name = @table"
|
||||
cmd.Parameters.AddWithValue ("@table", table) |> ignore
|
||||
let! count = count cmd
|
||||
return count = 1
|
||||
|
||||
let! tables = backgroundTask {
|
||||
cmd.CommandText <- "SELECT name FROM sqlite_master WHERE type = 'table'"
|
||||
let! rdr = cmd.ExecuteReaderAsync ()
|
||||
let mutable tableList = []
|
||||
while rdr.Read() do
|
||||
tableList <- Map.getString "name" rdr :: tableList
|
||||
do! rdr.CloseAsync ()
|
||||
return tableList
|
||||
}
|
||||
|
||||
/// The connection for this instance
|
||||
member _.Conn = conn
|
||||
|
||||
/// Make a SQLite connection ready to execute commends
|
||||
static member setUpConnection (conn : SqliteConnection) = backgroundTask {
|
||||
do! conn.OpenAsync ()
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- "PRAGMA foreign_keys = TRUE"
|
||||
let! _ = cmd.ExecuteNonQueryAsync ()
|
||||
()
|
||||
}
|
||||
|
||||
interface IData with
|
||||
|
||||
member _.Category = SQLiteCategoryData conn
|
||||
member _.Page = SQLitePageData conn
|
||||
member _.Post = SQLitePostData conn
|
||||
member _.TagMap = SQLiteTagMapData conn
|
||||
member _.Theme = SQLiteThemeData conn
|
||||
member _.ThemeAsset = SQLiteThemeAssetData conn
|
||||
member _.Upload = SQLiteUploadData conn
|
||||
member _.WebLog = SQLiteWebLogData conn
|
||||
member _.WebLogUser = SQLiteWebLogUserData conn
|
||||
|
||||
member _.StartUp () = backgroundTask {
|
||||
|
||||
use cmd = conn.CreateCommand ()
|
||||
|
||||
let needsTable table =
|
||||
not (List.contains table tables)
|
||||
seq {
|
||||
// Theme tables
|
||||
match! tableExists "theme" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating theme table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE theme (
|
||||
if needsTable "theme" then
|
||||
"CREATE TABLE theme (
|
||||
id TEXT PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
version TEXT NOT NULL)"""
|
||||
do! write cmd
|
||||
match! tableExists "theme_template" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating theme_template table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE theme_template (
|
||||
version TEXT NOT NULL)"
|
||||
if needsTable "theme_template" then
|
||||
"CREATE TABLE theme_template (
|
||||
theme_id TEXT NOT NULL REFERENCES theme (id),
|
||||
name TEXT NOT NULL,
|
||||
template TEXT NOT NULL,
|
||||
PRIMARY KEY (theme_id, name))"""
|
||||
do! write cmd
|
||||
match! tableExists "theme_asset" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating theme_asset table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE theme_asset (
|
||||
PRIMARY KEY (theme_id, name))"
|
||||
if needsTable "theme_asset" then
|
||||
"CREATE TABLE theme_asset (
|
||||
theme_id TEXT NOT NULL REFERENCES theme (id),
|
||||
path TEXT NOT NULL,
|
||||
updated_on TEXT NOT NULL,
|
||||
data BLOB NOT NULL,
|
||||
PRIMARY KEY (theme_id, path))"""
|
||||
do! write cmd
|
||||
PRIMARY KEY (theme_id, path))"
|
||||
|
||||
// Web log tables
|
||||
match! tableExists "web_log" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating web_log table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE web_log (
|
||||
if needsTable "web_log" then
|
||||
"CREATE TABLE web_log (
|
||||
id TEXT PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
slug TEXT NOT NULL,
|
||||
@@ -103,68 +66,30 @@ type SQLiteData (conn : SqliteConnection, log : ILogger<SQLiteData>) =
|
||||
is_category_enabled INTEGER NOT NULL DEFAULT 0,
|
||||
is_tag_enabled INTEGER NOT NULL DEFAULT 0,
|
||||
copyright TEXT);
|
||||
CREATE INDEX web_log_theme_idx ON web_log (theme_id)"""
|
||||
do! write cmd
|
||||
match! tableExists "web_log_feed" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating web_log_feed table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE web_log_feed (
|
||||
CREATE INDEX web_log_theme_idx ON web_log (theme_id)"
|
||||
if needsTable "web_log_feed" then
|
||||
"CREATE TABLE web_log_feed (
|
||||
id TEXT PRIMARY KEY,
|
||||
web_log_id TEXT NOT NULL REFERENCES web_log (id),
|
||||
source TEXT NOT NULL,
|
||||
path TEXT NOT NULL);
|
||||
CREATE INDEX web_log_feed_web_log_idx ON web_log_feed (web_log_id)"""
|
||||
do! write cmd
|
||||
match! tableExists "web_log_feed_podcast" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating web_log_feed_podcast table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE web_log_feed_podcast (
|
||||
feed_id TEXT PRIMARY KEY REFERENCES web_log_feed (id),
|
||||
title TEXT NOT NULL,
|
||||
subtitle TEXT,
|
||||
items_in_feed INTEGER NOT NULL,
|
||||
summary TEXT NOT NULL,
|
||||
displayed_author TEXT NOT NULL,
|
||||
email TEXT NOT NULL,
|
||||
image_url TEXT NOT NULL,
|
||||
apple_category TEXT NOT NULL,
|
||||
apple_subcategory TEXT,
|
||||
explicit TEXT NOT NULL,
|
||||
default_media_type TEXT,
|
||||
media_base_url TEXT,
|
||||
podcast_guid TEXT,
|
||||
funding_url TEXT,
|
||||
funding_text TEXT,
|
||||
medium TEXT)"""
|
||||
do! write cmd
|
||||
path TEXT NOT NULL,
|
||||
podcast TEXT);
|
||||
CREATE INDEX web_log_feed_web_log_idx ON web_log_feed (web_log_id)"
|
||||
|
||||
// Category table
|
||||
match! tableExists "category" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating category table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE category (
|
||||
if needsTable "category" then
|
||||
"CREATE TABLE category (
|
||||
id TEXT PRIMARY KEY,
|
||||
web_log_id TEXT NOT NULL REFERENCES web_log (id),
|
||||
name TEXT NOT NULL,
|
||||
slug TEXT NOT NULL,
|
||||
description TEXT,
|
||||
parent_id TEXT);
|
||||
CREATE INDEX category_web_log_idx ON category (web_log_id)"""
|
||||
do! write cmd
|
||||
CREATE INDEX category_web_log_idx ON category (web_log_id)"
|
||||
|
||||
// Web log user table
|
||||
match! tableExists "web_log_user" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating web_log_user table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE web_log_user (
|
||||
if needsTable "web_log_user" then
|
||||
"CREATE TABLE web_log_user (
|
||||
id TEXT PRIMARY KEY,
|
||||
web_log_id TEXT NOT NULL REFERENCES web_log (id),
|
||||
email TEXT NOT NULL,
|
||||
@@ -172,22 +97,16 @@ type SQLiteData (conn : SqliteConnection, log : ILogger<SQLiteData>) =
|
||||
last_name TEXT NOT NULL,
|
||||
preferred_name TEXT NOT NULL,
|
||||
password_hash TEXT NOT NULL,
|
||||
salt TEXT NOT NULL,
|
||||
url TEXT,
|
||||
access_level TEXT NOT NULL,
|
||||
created_on TEXT NOT NULL,
|
||||
last_seen_on TEXT);
|
||||
CREATE INDEX web_log_user_web_log_idx ON web_log_user (web_log_id);
|
||||
CREATE INDEX web_log_user_email_idx ON web_log_user (web_log_id, email)"""
|
||||
do! write cmd
|
||||
CREATE INDEX web_log_user_email_idx ON web_log_user (web_log_id, email)"
|
||||
|
||||
// Page tables
|
||||
match! tableExists "page" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating page table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE page (
|
||||
if needsTable "page" then
|
||||
"CREATE TABLE page (
|
||||
id TEXT PRIMARY KEY,
|
||||
web_log_id TEXT NOT NULL REFERENCES web_log (id),
|
||||
author_id TEXT NOT NULL REFERENCES web_log_user (id),
|
||||
@@ -197,51 +116,26 @@ type SQLiteData (conn : SqliteConnection, log : ILogger<SQLiteData>) =
|
||||
updated_on TEXT NOT NULL,
|
||||
is_in_page_list INTEGER NOT NULL DEFAULT 0,
|
||||
template TEXT,
|
||||
page_text TEXT NOT NULL);
|
||||
page_text TEXT NOT NULL,
|
||||
meta_items TEXT);
|
||||
CREATE INDEX page_web_log_idx ON page (web_log_id);
|
||||
CREATE INDEX page_author_idx ON page (author_id);
|
||||
CREATE INDEX page_permalink_idx ON page (web_log_id, permalink)"""
|
||||
do! write cmd
|
||||
match! tableExists "page_meta" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating page_meta table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE page_meta (
|
||||
page_id TEXT NOT NULL REFERENCES page (id),
|
||||
name TEXT NOT NULL,
|
||||
value TEXT NOT NULL,
|
||||
PRIMARY KEY (page_id, name, value))"""
|
||||
do! write cmd
|
||||
match! tableExists "page_permalink" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating page_permalink table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE page_permalink (
|
||||
CREATE INDEX page_permalink_idx ON page (web_log_id, permalink)"
|
||||
if needsTable "page_permalink" then
|
||||
"CREATE TABLE page_permalink (
|
||||
page_id TEXT NOT NULL REFERENCES page (id),
|
||||
permalink TEXT NOT NULL,
|
||||
PRIMARY KEY (page_id, permalink))"""
|
||||
do! write cmd
|
||||
match! tableExists "page_revision" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating page_revision table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE page_revision (
|
||||
PRIMARY KEY (page_id, permalink))"
|
||||
if needsTable "page_revision" then
|
||||
"CREATE TABLE page_revision (
|
||||
page_id TEXT NOT NULL REFERENCES page (id),
|
||||
as_of TEXT NOT NULL,
|
||||
revision_text TEXT NOT NULL,
|
||||
PRIMARY KEY (page_id, as_of))"""
|
||||
do! write cmd
|
||||
PRIMARY KEY (page_id, as_of))"
|
||||
|
||||
// Post tables
|
||||
match! tableExists "post" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating post table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE post (
|
||||
if needsTable "post" then
|
||||
"CREATE TABLE post (
|
||||
id TEXT PRIMARY KEY,
|
||||
web_log_id TEXT NOT NULL REFERENCES web_log (id),
|
||||
author_id TEXT NOT NULL REFERENCES web_log_user (id),
|
||||
@@ -251,96 +145,37 @@ type SQLiteData (conn : SqliteConnection, log : ILogger<SQLiteData>) =
|
||||
published_on TEXT,
|
||||
updated_on TEXT NOT NULL,
|
||||
template TEXT,
|
||||
post_text TEXT NOT NULL);
|
||||
post_text TEXT NOT NULL,
|
||||
meta_items TEXT,
|
||||
episode TEXT);
|
||||
CREATE INDEX post_web_log_idx ON post (web_log_id);
|
||||
CREATE INDEX post_author_idx ON post (author_id);
|
||||
CREATE INDEX post_status_idx ON post (web_log_id, status, updated_on);
|
||||
CREATE INDEX post_permalink_idx ON post (web_log_id, permalink)"""
|
||||
do! write cmd
|
||||
match! tableExists "post_category" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating post_category table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE post_category (
|
||||
CREATE INDEX post_permalink_idx ON post (web_log_id, permalink)"
|
||||
if needsTable "post_category" then
|
||||
"CREATE TABLE post_category (
|
||||
post_id TEXT NOT NULL REFERENCES post (id),
|
||||
category_id TEXT NOT NULL REFERENCES category (id),
|
||||
PRIMARY KEY (post_id, category_id));
|
||||
CREATE INDEX post_category_category_idx ON post_category (category_id)"""
|
||||
do! write cmd
|
||||
match! tableExists "post_episode" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating post_episode table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE post_episode (
|
||||
post_id TEXT PRIMARY KEY REFERENCES post(id),
|
||||
media TEXT NOT NULL,
|
||||
length INTEGER NOT NULL,
|
||||
duration TEXT,
|
||||
media_type TEXT,
|
||||
image_url TEXT,
|
||||
subtitle TEXT,
|
||||
explicit TEXT,
|
||||
chapter_file TEXT,
|
||||
chapter_type TEXT,
|
||||
transcript_url TEXT,
|
||||
transcript_type TEXT,
|
||||
transcript_lang TEXT,
|
||||
transcript_captions INTEGER,
|
||||
season_number INTEGER,
|
||||
season_description TEXT,
|
||||
episode_number TEXT,
|
||||
episode_description TEXT)"""
|
||||
do! write cmd
|
||||
match! tableExists "post_tag" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating post_tag table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE post_tag (
|
||||
CREATE INDEX post_category_category_idx ON post_category (category_id)"
|
||||
if needsTable "post_tag" then
|
||||
"CREATE TABLE post_tag (
|
||||
post_id TEXT NOT NULL REFERENCES post (id),
|
||||
tag TEXT NOT NULL,
|
||||
PRIMARY KEY (post_id, tag))"""
|
||||
do! write cmd
|
||||
match! tableExists "post_meta" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating post_meta table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE post_meta (
|
||||
post_id TEXT NOT NULL REFERENCES post (id),
|
||||
name TEXT NOT NULL,
|
||||
value TEXT NOT NULL,
|
||||
PRIMARY KEY (post_id, name, value))"""
|
||||
do! write cmd
|
||||
match! tableExists "post_permalink" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating post_permalink table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE post_permalink (
|
||||
PRIMARY KEY (post_id, tag))"
|
||||
if needsTable "post_permalink" then
|
||||
"CREATE TABLE post_permalink (
|
||||
post_id TEXT NOT NULL REFERENCES post (id),
|
||||
permalink TEXT NOT NULL,
|
||||
PRIMARY KEY (post_id, permalink))"""
|
||||
do! write cmd
|
||||
match! tableExists "post_revision" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating post_revision table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE post_revision (
|
||||
PRIMARY KEY (post_id, permalink))"
|
||||
if needsTable "post_revision" then
|
||||
"CREATE TABLE post_revision (
|
||||
post_id TEXT NOT NULL REFERENCES post (id),
|
||||
as_of TEXT NOT NULL,
|
||||
revision_text TEXT NOT NULL,
|
||||
PRIMARY KEY (post_id, as_of))"""
|
||||
do! write cmd
|
||||
match! tableExists "post_comment" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating post_comment table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE post_comment (
|
||||
PRIMARY KEY (post_id, as_of))"
|
||||
if needsTable "post_comment" then
|
||||
"CREATE TABLE post_comment (
|
||||
id TEXT PRIMARY KEY,
|
||||
post_id TEXT NOT NULL REFERENCES post(id),
|
||||
in_reply_to_id TEXT,
|
||||
@@ -350,36 +185,404 @@ type SQLiteData (conn : SqliteConnection, log : ILogger<SQLiteData>) =
|
||||
status TEXT NOT NULL,
|
||||
posted_on TEXT NOT NULL,
|
||||
comment_text TEXT NOT NULL);
|
||||
CREATE INDEX post_comment_post_idx ON post_comment (post_id)"""
|
||||
do! write cmd
|
||||
CREATE INDEX post_comment_post_idx ON post_comment (post_id)"
|
||||
|
||||
// Tag map table
|
||||
match! tableExists "tag_map" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating tag_map table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE tag_map (
|
||||
if needsTable "tag_map" then
|
||||
"CREATE TABLE tag_map (
|
||||
id TEXT PRIMARY KEY,
|
||||
web_log_id TEXT NOT NULL REFERENCES web_log (id),
|
||||
tag TEXT NOT NULL,
|
||||
url_value TEXT NOT NULL);
|
||||
CREATE INDEX tag_map_web_log_idx ON tag_map (web_log_id)"""
|
||||
do! write cmd
|
||||
CREATE INDEX tag_map_web_log_idx ON tag_map (web_log_id)"
|
||||
|
||||
// Uploaded file table
|
||||
match! tableExists "upload" with
|
||||
| true -> ()
|
||||
| false ->
|
||||
log.LogInformation "Creating upload table..."
|
||||
cmd.CommandText <- """
|
||||
CREATE TABLE upload (
|
||||
if needsTable "upload" then
|
||||
"CREATE TABLE upload (
|
||||
id TEXT PRIMARY KEY,
|
||||
web_log_id TEXT NOT NULL REFERENCES web_log (id),
|
||||
path TEXT NOT NULL,
|
||||
updated_on TEXT NOT NULL,
|
||||
data BLOB NOT NULL);
|
||||
CREATE INDEX upload_web_log_idx ON upload (web_log_id);
|
||||
CREATE INDEX upload_path_idx ON upload (web_log_id, path)"""
|
||||
CREATE INDEX upload_path_idx ON upload (web_log_id, path)"
|
||||
|
||||
// Database version table
|
||||
if needsTable "db_version" then
|
||||
"CREATE TABLE db_version (id TEXT PRIMARY KEY);
|
||||
INSERT INTO db_version VALUES ('v2-rc1')"
|
||||
}
|
||||
|> Seq.map (fun sql ->
|
||||
log.LogInformation $"Creating {(sql.Split ' ')[2]} table..."
|
||||
cmd.CommandText <- sql
|
||||
write cmd |> Async.AwaitTask |> Async.RunSynchronously)
|
||||
|> List.ofSeq
|
||||
|> ignore
|
||||
}
|
||||
|
||||
/// Set the database version to the specified version
|
||||
let setDbVersion version = backgroundTask {
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- $"DELETE FROM db_version; INSERT INTO db_version VALUES ('%s{version}')"
|
||||
do! write cmd
|
||||
}
|
||||
|
||||
/// Implement the changes between v2-rc1 and v2-rc2
|
||||
let migrateV2Rc1ToV2Rc2 () = backgroundTask {
|
||||
let logStep = Utils.logMigrationStep log "v2-rc1 to v2-rc2"
|
||||
// Move meta items, podcast settings, and episode details to JSON-encoded text fields
|
||||
use cmd = conn.CreateCommand ()
|
||||
logStep "Adding new columns"
|
||||
cmd.CommandText <-
|
||||
"ALTER TABLE web_log_feed ADD COLUMN podcast TEXT;
|
||||
ALTER TABLE page ADD COLUMN meta_items TEXT;
|
||||
ALTER TABLE post ADD COLUMN meta_items TEXT;
|
||||
ALTER TABLE post ADD COLUMN episode TEXT"
|
||||
do! write cmd
|
||||
logStep "Migrating meta items"
|
||||
let migrateMeta entity = backgroundTask {
|
||||
cmd.CommandText <- $"SELECT * FROM %s{entity}_meta"
|
||||
use! metaRdr = cmd.ExecuteReaderAsync ()
|
||||
let allMetas =
|
||||
seq {
|
||||
while metaRdr.Read () do
|
||||
Map.getString $"{entity}_id" metaRdr,
|
||||
{ Name = Map.getString "name" metaRdr; Value = Map.getString "value" metaRdr }
|
||||
} |> List.ofSeq
|
||||
metaRdr.Close ()
|
||||
let metas =
|
||||
allMetas
|
||||
|> List.map fst
|
||||
|> List.distinct
|
||||
|> List.map (fun it -> it, allMetas |> List.filter (fun meta -> fst meta = it))
|
||||
metas
|
||||
|> List.iter (fun (entityId, items) ->
|
||||
cmd.CommandText <-
|
||||
"UPDATE post
|
||||
SET meta_items = @metaItems
|
||||
WHERE id = @postId"
|
||||
[ cmd.Parameters.AddWithValue ("@metaItems", Utils.serialize ser items)
|
||||
cmd.Parameters.AddWithValue ("@id", entityId) ] |> ignore
|
||||
let _ = cmd.ExecuteNonQuery ()
|
||||
cmd.Parameters.Clear ())
|
||||
}
|
||||
do! migrateMeta "page"
|
||||
do! migrateMeta "post"
|
||||
logStep "Migrating podcasts and episodes"
|
||||
cmd.CommandText <- "SELECT * FROM web_log_feed_podcast"
|
||||
use! podcastRdr = cmd.ExecuteReaderAsync ()
|
||||
let podcasts =
|
||||
seq {
|
||||
while podcastRdr.Read () do
|
||||
CustomFeedId (Map.getString "feed_id" podcastRdr),
|
||||
{ Title = Map.getString "title" podcastRdr
|
||||
Subtitle = Map.tryString "subtitle" podcastRdr
|
||||
ItemsInFeed = Map.getInt "items_in_feed" podcastRdr
|
||||
Summary = Map.getString "summary" podcastRdr
|
||||
DisplayedAuthor = Map.getString "displayed_author" podcastRdr
|
||||
Email = Map.getString "email" podcastRdr
|
||||
ImageUrl = Map.getString "image_url" podcastRdr |> Permalink
|
||||
AppleCategory = Map.getString "apple_category" podcastRdr
|
||||
AppleSubcategory = Map.tryString "apple_subcategory" podcastRdr
|
||||
Explicit = Map.getString "explicit" podcastRdr |> ExplicitRating.parse
|
||||
DefaultMediaType = Map.tryString "default_media_type" podcastRdr
|
||||
MediaBaseUrl = Map.tryString "media_base_url" podcastRdr
|
||||
PodcastGuid = Map.tryGuid "podcast_guid" podcastRdr
|
||||
FundingUrl = Map.tryString "funding_url" podcastRdr
|
||||
FundingText = Map.tryString "funding_text" podcastRdr
|
||||
Medium = Map.tryString "medium" podcastRdr
|
||||
|> Option.map PodcastMedium.parse
|
||||
}
|
||||
} |> List.ofSeq
|
||||
podcastRdr.Close ()
|
||||
podcasts
|
||||
|> List.iter (fun (feedId, podcast) ->
|
||||
cmd.CommandText <- "UPDATE web_log_feed SET podcast = @podcast WHERE id = @id"
|
||||
[ cmd.Parameters.AddWithValue ("@podcast", Utils.serialize ser podcast)
|
||||
cmd.Parameters.AddWithValue ("@id", CustomFeedId.toString feedId) ] |> ignore
|
||||
let _ = cmd.ExecuteNonQuery ()
|
||||
cmd.Parameters.Clear ())
|
||||
cmd.CommandText <- "SELECT * FROM post_episode"
|
||||
use! epRdr = cmd.ExecuteReaderAsync ()
|
||||
let episodes =
|
||||
seq {
|
||||
while epRdr.Read () do
|
||||
PostId (Map.getString "post_id" epRdr),
|
||||
{ Media = Map.getString "media" epRdr
|
||||
Length = Map.getLong "length" epRdr
|
||||
Duration = Map.tryTimeSpan "duration" epRdr
|
||||
|> Option.map Duration.FromTimeSpan
|
||||
MediaType = Map.tryString "media_type" epRdr
|
||||
ImageUrl = Map.tryString "image_url" epRdr
|
||||
Subtitle = Map.tryString "subtitle" epRdr
|
||||
Explicit = Map.tryString "explicit" epRdr
|
||||
|> Option.map ExplicitRating.parse
|
||||
ChapterFile = Map.tryString "chapter_file" epRdr
|
||||
ChapterType = Map.tryString "chapter_type" epRdr
|
||||
TranscriptUrl = Map.tryString "transcript_url" epRdr
|
||||
TranscriptType = Map.tryString "transcript_type" epRdr
|
||||
TranscriptLang = Map.tryString "transcript_lang" epRdr
|
||||
TranscriptCaptions = Map.tryBoolean "transcript_captions" epRdr
|
||||
SeasonNumber = Map.tryInt "season_number" epRdr
|
||||
SeasonDescription = Map.tryString "season_description" epRdr
|
||||
EpisodeNumber = Map.tryString "episode_number" epRdr
|
||||
|> Option.map System.Double.Parse
|
||||
EpisodeDescription = Map.tryString "episode_description" epRdr
|
||||
}
|
||||
} |> List.ofSeq
|
||||
epRdr.Close ()
|
||||
episodes
|
||||
|> List.iter (fun (postId, episode) ->
|
||||
cmd.CommandText <- "UPDATE post SET episode = @episode WHERE id = @id"
|
||||
[ cmd.Parameters.AddWithValue ("@episode", Utils.serialize ser episode)
|
||||
cmd.Parameters.AddWithValue ("@id", PostId.toString postId) ] |> ignore
|
||||
let _ = cmd.ExecuteNonQuery ()
|
||||
cmd.Parameters.Clear ())
|
||||
|
||||
logStep "Migrating dates/times"
|
||||
let inst (dt : System.DateTime) =
|
||||
System.DateTime (dt.Ticks, System.DateTimeKind.Utc)
|
||||
|> (Instant.FromDateTimeUtc >> Noda.toSecondsPrecision)
|
||||
// page.updated_on, page.published_on
|
||||
cmd.CommandText <- "SELECT id, updated_on, published_on FROM page"
|
||||
use! pageRdr = cmd.ExecuteReaderAsync ()
|
||||
let toUpdate =
|
||||
seq {
|
||||
while pageRdr.Read () do
|
||||
Map.getString "id" pageRdr,
|
||||
inst (Map.getDateTime "updated_on" pageRdr),
|
||||
inst (Map.getDateTime "published_on" pageRdr)
|
||||
} |> List.ofSeq
|
||||
pageRdr.Close ()
|
||||
cmd.CommandText <- "UPDATE page SET updated_on = @updatedOn, published_on = @publishedOn WHERE id = @id"
|
||||
[ cmd.Parameters.Add ("@id", SqliteType.Text)
|
||||
cmd.Parameters.Add ("@updatedOn", SqliteType.Text)
|
||||
cmd.Parameters.Add ("@publishedOn", SqliteType.Text)
|
||||
] |> ignore
|
||||
toUpdate
|
||||
|> List.iter (fun (pageId, updatedOn, publishedOn) ->
|
||||
cmd.Parameters["@id" ].Value <- pageId
|
||||
cmd.Parameters["@updatedOn" ].Value <- instantParam updatedOn
|
||||
cmd.Parameters["@publishedOn"].Value <- instantParam publishedOn
|
||||
let _ = cmd.ExecuteNonQuery ()
|
||||
())
|
||||
cmd.Parameters.Clear ()
|
||||
// page_revision.as_of
|
||||
cmd.CommandText <- "SELECT * FROM page_revision"
|
||||
use! pageRevRdr = cmd.ExecuteReaderAsync ()
|
||||
let toUpdate =
|
||||
seq {
|
||||
while pageRevRdr.Read () do
|
||||
let asOf = Map.getDateTime "as_of" pageRevRdr
|
||||
Map.getString "page_id" pageRevRdr, asOf, inst asOf, Map.getString "revision_text" pageRevRdr
|
||||
} |> List.ofSeq
|
||||
pageRevRdr.Close ()
|
||||
cmd.CommandText <-
|
||||
"DELETE FROM page_revision WHERE page_id = @pageId AND as_of = @oldAsOf;
|
||||
INSERT INTO page_revision (page_id, as_of, revision_text) VALUES (@pageId, @asOf, @text)"
|
||||
[ cmd.Parameters.Add ("@pageId", SqliteType.Text)
|
||||
cmd.Parameters.Add ("@oldAsOf", SqliteType.Text)
|
||||
cmd.Parameters.Add ("@asOf", SqliteType.Text)
|
||||
cmd.Parameters.Add ("@text", SqliteType.Text)
|
||||
] |> ignore
|
||||
toUpdate
|
||||
|> List.iter (fun (pageId, oldAsOf, asOf, text) ->
|
||||
cmd.Parameters["@pageId" ].Value <- pageId
|
||||
cmd.Parameters["@oldAsOf"].Value <- oldAsOf
|
||||
cmd.Parameters["@asOf" ].Value <- instantParam asOf
|
||||
cmd.Parameters["@text" ].Value <- text
|
||||
let _ = cmd.ExecuteNonQuery ()
|
||||
())
|
||||
cmd.Parameters.Clear ()
|
||||
// post.updated_on, post.published_on (opt)
|
||||
cmd.CommandText <- "SELECT id, updated_on, published_on FROM post"
|
||||
use! postRdr = cmd.ExecuteReaderAsync ()
|
||||
let toUpdate =
|
||||
seq {
|
||||
while postRdr.Read () do
|
||||
Map.getString "id" postRdr,
|
||||
inst (Map.getDateTime "updated_on" postRdr),
|
||||
(Map.tryDateTime "published_on" postRdr |> Option.map inst)
|
||||
} |> List.ofSeq
|
||||
postRdr.Close ()
|
||||
cmd.CommandText <- "UPDATE post SET updated_on = @updatedOn, published_on = @publishedOn WHERE id = @id"
|
||||
[ cmd.Parameters.Add ("@id", SqliteType.Text)
|
||||
cmd.Parameters.Add ("@updatedOn", SqliteType.Text)
|
||||
cmd.Parameters.Add ("@publishedOn", SqliteType.Text)
|
||||
] |> ignore
|
||||
toUpdate
|
||||
|> List.iter (fun (postId, updatedOn, publishedOn) ->
|
||||
cmd.Parameters["@id" ].Value <- postId
|
||||
cmd.Parameters["@updatedOn" ].Value <- instantParam updatedOn
|
||||
cmd.Parameters["@publishedOn"].Value <- maybeInstant publishedOn
|
||||
let _ = cmd.ExecuteNonQuery ()
|
||||
())
|
||||
cmd.Parameters.Clear ()
|
||||
// post_revision.as_of
|
||||
cmd.CommandText <- "SELECT * FROM post_revision"
|
||||
use! postRevRdr = cmd.ExecuteReaderAsync ()
|
||||
let toUpdate =
|
||||
seq {
|
||||
while postRevRdr.Read () do
|
||||
let asOf = Map.getDateTime "as_of" postRevRdr
|
||||
Map.getString "post_id" postRevRdr, asOf, inst asOf, Map.getString "revision_text" postRevRdr
|
||||
} |> List.ofSeq
|
||||
postRevRdr.Close ()
|
||||
cmd.CommandText <-
|
||||
"DELETE FROM post_revision WHERE post_id = @postId AND as_of = @oldAsOf;
|
||||
INSERT INTO post_revision (post_id, as_of, revision_text) VALUES (@postId, @asOf, @text)"
|
||||
[ cmd.Parameters.Add ("@postId", SqliteType.Text)
|
||||
cmd.Parameters.Add ("@oldAsOf", SqliteType.Text)
|
||||
cmd.Parameters.Add ("@asOf", SqliteType.Text)
|
||||
cmd.Parameters.Add ("@text", SqliteType.Text)
|
||||
] |> ignore
|
||||
toUpdate
|
||||
|> List.iter (fun (postId, oldAsOf, asOf, text) ->
|
||||
cmd.Parameters["@postId" ].Value <- postId
|
||||
cmd.Parameters["@oldAsOf"].Value <- oldAsOf
|
||||
cmd.Parameters["@asOf" ].Value <- instantParam asOf
|
||||
cmd.Parameters["@text" ].Value <- text
|
||||
let _ = cmd.ExecuteNonQuery ()
|
||||
())
|
||||
cmd.Parameters.Clear ()
|
||||
// theme_asset.updated_on
|
||||
cmd.CommandText <- "SELECT theme_id, path, updated_on FROM theme_asset"
|
||||
use! assetRdr = cmd.ExecuteReaderAsync ()
|
||||
let toUpdate =
|
||||
seq {
|
||||
while assetRdr.Read () do
|
||||
Map.getString "theme_id" assetRdr, Map.getString "path" assetRdr,
|
||||
inst (Map.getDateTime "updated_on" assetRdr)
|
||||
} |> List.ofSeq
|
||||
assetRdr.Close ()
|
||||
cmd.CommandText <- "UPDATE theme_asset SET updated_on = @updatedOn WHERE theme_id = @themeId AND path = @path"
|
||||
[ cmd.Parameters.Add ("@updatedOn", SqliteType.Text)
|
||||
cmd.Parameters.Add ("@themeId", SqliteType.Text)
|
||||
cmd.Parameters.Add ("@path", SqliteType.Text)
|
||||
] |> ignore
|
||||
toUpdate
|
||||
|> List.iter (fun (themeId, path, updatedOn) ->
|
||||
cmd.Parameters["@themeId" ].Value <- themeId
|
||||
cmd.Parameters["@path" ].Value <- path
|
||||
cmd.Parameters["@updatedOn"].Value <- instantParam updatedOn
|
||||
let _ = cmd.ExecuteNonQuery ()
|
||||
())
|
||||
cmd.Parameters.Clear ()
|
||||
// upload.updated_on
|
||||
cmd.CommandText <- "SELECT id, updated_on FROM upload"
|
||||
use! upRdr = cmd.ExecuteReaderAsync ()
|
||||
let toUpdate =
|
||||
seq {
|
||||
while upRdr.Read () do
|
||||
Map.getString "id" upRdr, inst (Map.getDateTime "updated_on" upRdr)
|
||||
} |> List.ofSeq
|
||||
upRdr.Close ()
|
||||
cmd.CommandText <- "UPDATE upload SET updated_on = @updatedOn WHERE id = @id"
|
||||
[ cmd.Parameters.Add ("@updatedOn", SqliteType.Text)
|
||||
cmd.Parameters.Add ("@id", SqliteType.Text)
|
||||
] |> ignore
|
||||
toUpdate
|
||||
|> List.iter (fun (upId, updatedOn) ->
|
||||
cmd.Parameters["@id" ].Value <- upId
|
||||
cmd.Parameters["@updatedOn"].Value <- instantParam updatedOn
|
||||
let _ = cmd.ExecuteNonQuery ()
|
||||
())
|
||||
cmd.Parameters.Clear ()
|
||||
// web_log_user.created_on, web_log_user.last_seen_on (opt)
|
||||
cmd.CommandText <- "SELECT id, created_on, last_seen_on FROM web_log_user"
|
||||
use! userRdr = cmd.ExecuteReaderAsync ()
|
||||
let toUpdate =
|
||||
seq {
|
||||
while userRdr.Read () do
|
||||
Map.getString "id" userRdr,
|
||||
inst (Map.getDateTime "created_on" userRdr),
|
||||
(Map.tryDateTime "last_seen_on" userRdr |> Option.map inst)
|
||||
} |> List.ofSeq
|
||||
userRdr.Close ()
|
||||
cmd.CommandText <- "UPDATE web_log_user SET created_on = @createdOn, last_seen_on = @lastSeenOn WHERE id = @id"
|
||||
[ cmd.Parameters.Add ("@id", SqliteType.Text)
|
||||
cmd.Parameters.Add ("@createdOn", SqliteType.Text)
|
||||
cmd.Parameters.Add ("@lastSeenOn", SqliteType.Text)
|
||||
] |> ignore
|
||||
toUpdate
|
||||
|> List.iter (fun (userId, createdOn, lastSeenOn) ->
|
||||
cmd.Parameters["@id" ].Value <- userId
|
||||
cmd.Parameters["@createdOn" ].Value <- instantParam createdOn
|
||||
cmd.Parameters["@lastSeenOn"].Value <- maybeInstant lastSeenOn
|
||||
let _ = cmd.ExecuteNonQuery ()
|
||||
())
|
||||
cmd.Parameters.Clear ()
|
||||
|
||||
conn.Close ()
|
||||
conn.Open ()
|
||||
|
||||
logStep "Dropping old tables and columns"
|
||||
cmd.CommandText <-
|
||||
"ALTER TABLE web_log_user DROP COLUMN salt;
|
||||
DROP TABLE post_episode;
|
||||
DROP TABLE post_meta;
|
||||
DROP TABLE page_meta;
|
||||
DROP TABLE web_log_feed_podcast"
|
||||
do! write cmd
|
||||
|
||||
logStep "Setting database version to v2-rc2"
|
||||
do! setDbVersion "v2-rc2"
|
||||
}
|
||||
|
||||
/// Migrate from v2-rc2 to v2
|
||||
let migrateV2Rc2ToV2 () = backgroundTask {
|
||||
Utils.logMigrationStep log "v2-rc2 to v2" "Setting database version; no migration required"
|
||||
do! setDbVersion "v2"
|
||||
}
|
||||
|
||||
/// Migrate data among versions (up only)
|
||||
let migrate version = backgroundTask {
|
||||
|
||||
match version with
|
||||
| Some v when v = "v2" -> ()
|
||||
| Some v when v = "v2-rc2" -> do! migrateV2Rc2ToV2 ()
|
||||
| Some v when v = "v2-rc1" -> do! migrateV2Rc1ToV2Rc2 ()
|
||||
| Some _
|
||||
| None ->
|
||||
log.LogWarning $"Unknown database version; assuming {Utils.currentDbVersion}"
|
||||
do! setDbVersion Utils.currentDbVersion
|
||||
}
|
||||
|
||||
/// The connection for this instance
|
||||
member _.Conn = conn
|
||||
|
||||
/// Make a SQLite connection ready to execute commends
|
||||
static member setUpConnection (conn : SqliteConnection) = backgroundTask {
|
||||
do! conn.OpenAsync ()
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- "PRAGMA foreign_keys = TRUE"
|
||||
let! _ = cmd.ExecuteNonQueryAsync ()
|
||||
()
|
||||
}
|
||||
|
||||
interface IData with
|
||||
|
||||
member _.Category = SQLiteCategoryData conn
|
||||
member _.Page = SQLitePageData (conn, ser)
|
||||
member _.Post = SQLitePostData (conn, ser)
|
||||
member _.TagMap = SQLiteTagMapData conn
|
||||
member _.Theme = SQLiteThemeData conn
|
||||
member _.ThemeAsset = SQLiteThemeAssetData conn
|
||||
member _.Upload = SQLiteUploadData conn
|
||||
member _.WebLog = SQLiteWebLogData (conn, ser)
|
||||
member _.WebLogUser = SQLiteWebLogUserData conn
|
||||
|
||||
member _.Serializer = ser
|
||||
|
||||
member _.StartUp () = backgroundTask {
|
||||
do! ensureTables ()
|
||||
|
||||
use cmd = conn.CreateCommand ()
|
||||
cmd.CommandText <- "SELECT id FROM db_version"
|
||||
use! rdr = cmd.ExecuteReaderAsync ()
|
||||
let version = if rdr.Read () then Some (Map.getString "id" rdr) else None
|
||||
match version with
|
||||
| Some v when v = "v2-rc2" -> ()
|
||||
| Some _
|
||||
| None -> do! migrate version
|
||||
}
|
||||
|
||||
@@ -5,6 +5,9 @@ module internal MyWebLog.Data.Utils
|
||||
open MyWebLog
|
||||
open MyWebLog.ViewModels
|
||||
|
||||
/// The current database version
|
||||
let currentDbVersion = "v2"
|
||||
|
||||
/// Create a category hierarchy from the given list of categories
|
||||
let rec orderByHierarchy (cats : Category list) parentId slugBase parentNames = seq {
|
||||
for cat in cats |> List.filter (fun c -> c.ParentId = parentId) do
|
||||
@@ -20,3 +23,36 @@ let rec orderByHierarchy (cats : Category list) parentId slugBase parentNames =
|
||||
yield! orderByHierarchy cats (Some cat.Id) (Some fullSlug) ([ cat.Name ] |> List.append parentNames)
|
||||
}
|
||||
|
||||
/// Get lists of items removed from and added to the given lists
|
||||
let diffLists<'T, 'U when 'U : equality> oldItems newItems (f : 'T -> 'U) =
|
||||
let diff compList = fun item -> not (compList |> List.exists (fun other -> f item = f other))
|
||||
List.filter (diff newItems) oldItems, List.filter (diff oldItems) newItems
|
||||
|
||||
/// Find meta items added and removed
|
||||
let diffMetaItems (oldItems : MetaItem list) newItems =
|
||||
diffLists oldItems newItems (fun item -> $"{item.Name}|{item.Value}")
|
||||
|
||||
/// Find the permalinks added and removed
|
||||
let diffPermalinks oldLinks newLinks =
|
||||
diffLists oldLinks newLinks Permalink.toString
|
||||
|
||||
/// Find the revisions added and removed
|
||||
let diffRevisions oldRevs newRevs =
|
||||
diffLists oldRevs newRevs (fun (rev : Revision) -> $"{rev.AsOf.ToUnixTimeTicks ()}|{MarkupText.toString rev.Text}")
|
||||
|
||||
open MyWebLog.Converters
|
||||
open Newtonsoft.Json
|
||||
|
||||
/// Serialize an object to JSON
|
||||
let serialize<'T> ser (item : 'T) =
|
||||
JsonConvert.SerializeObject (item, Json.settings ser)
|
||||
|
||||
/// Deserialize a JSON string
|
||||
let deserialize<'T> (ser : JsonSerializer) value =
|
||||
JsonConvert.DeserializeObject<'T> (value, Json.settings ser)
|
||||
|
||||
open Microsoft.Extensions.Logging
|
||||
|
||||
/// Log a migration step
|
||||
let logMigrationStep<'T> (log : ILogger<'T>) migration message =
|
||||
log.LogInformation $"Migrating %s{migration}: %s{message}"
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
open System
|
||||
open MyWebLog
|
||||
open NodaTime
|
||||
|
||||
/// A category under which a post may be identified
|
||||
[<CLIMutable; NoComparison; NoEquality>]
|
||||
@@ -64,7 +65,7 @@ type Comment =
|
||||
Status : CommentStatus
|
||||
|
||||
/// When the comment was posted
|
||||
PostedOn : DateTime
|
||||
PostedOn : Instant
|
||||
|
||||
/// The text of the comment
|
||||
Text : string
|
||||
@@ -82,7 +83,7 @@ module Comment =
|
||||
Email = ""
|
||||
Url = None
|
||||
Status = Pending
|
||||
PostedOn = DateTime.UtcNow
|
||||
PostedOn = Noda.epoch
|
||||
Text = ""
|
||||
}
|
||||
|
||||
@@ -106,10 +107,10 @@ type Page =
|
||||
Permalink : Permalink
|
||||
|
||||
/// When this page was published
|
||||
PublishedOn : DateTime
|
||||
PublishedOn : Instant
|
||||
|
||||
/// When this page was last updated
|
||||
UpdatedOn : DateTime
|
||||
UpdatedOn : Instant
|
||||
|
||||
/// Whether this page shows as part of the web log's navigation
|
||||
IsInPageList : bool
|
||||
@@ -140,8 +141,8 @@ module Page =
|
||||
AuthorId = WebLogUserId.empty
|
||||
Title = ""
|
||||
Permalink = Permalink.empty
|
||||
PublishedOn = DateTime.MinValue
|
||||
UpdatedOn = DateTime.MinValue
|
||||
PublishedOn = Noda.epoch
|
||||
UpdatedOn = Noda.epoch
|
||||
IsInPageList = false
|
||||
Template = None
|
||||
Text = ""
|
||||
@@ -173,10 +174,10 @@ type Post =
|
||||
Permalink : Permalink
|
||||
|
||||
/// The instant on which the post was originally published
|
||||
PublishedOn : DateTime option
|
||||
PublishedOn : Instant option
|
||||
|
||||
/// The instant on which the post was last updated
|
||||
UpdatedOn : DateTime
|
||||
UpdatedOn : Instant
|
||||
|
||||
/// The template to use in displaying the post
|
||||
Template : string option
|
||||
@@ -215,7 +216,7 @@ module Post =
|
||||
Title = ""
|
||||
Permalink = Permalink.empty
|
||||
PublishedOn = None
|
||||
UpdatedOn = DateTime.MinValue
|
||||
UpdatedOn = Noda.epoch
|
||||
Text = ""
|
||||
Template = None
|
||||
CategoryIds = []
|
||||
@@ -288,7 +289,7 @@ type ThemeAsset =
|
||||
Id : ThemeAssetId
|
||||
|
||||
/// The updated date (set from the file date from the ZIP archive)
|
||||
UpdatedOn : DateTime
|
||||
UpdatedOn : Instant
|
||||
|
||||
/// The data for the asset
|
||||
Data : byte[]
|
||||
@@ -300,7 +301,7 @@ module ThemeAsset =
|
||||
/// An empty theme asset
|
||||
let empty =
|
||||
{ Id = ThemeAssetId (ThemeId "", "")
|
||||
UpdatedOn = DateTime.MinValue
|
||||
UpdatedOn = Noda.epoch
|
||||
Data = [||]
|
||||
}
|
||||
|
||||
@@ -317,7 +318,7 @@ type Upload =
|
||||
Path : Permalink
|
||||
|
||||
/// The updated date/time for this upload
|
||||
UpdatedOn : DateTime
|
||||
UpdatedOn : Instant
|
||||
|
||||
/// The data for the upload
|
||||
Data : byte[]
|
||||
@@ -331,7 +332,7 @@ module Upload =
|
||||
{ Id = UploadId.empty
|
||||
WebLogId = WebLogId.empty
|
||||
Path = Permalink.empty
|
||||
UpdatedOn = DateTime.MinValue
|
||||
UpdatedOn = Noda.epoch
|
||||
Data = [||]
|
||||
}
|
||||
|
||||
@@ -410,10 +411,11 @@ module WebLog =
|
||||
let _, leadPath = hostAndPath webLog
|
||||
$"{leadPath}/{Permalink.toString permalink}"
|
||||
|
||||
/// Convert a UTC date/time to the web log's local date/time
|
||||
let localTime webLog (date : DateTime) =
|
||||
TimeZoneInfo.ConvertTimeFromUtc
|
||||
(DateTime (date.Ticks, DateTimeKind.Utc), TimeZoneInfo.FindSystemTimeZoneById webLog.TimeZone)
|
||||
/// Convert an Instant (UTC reference) to the web log's local date/time
|
||||
let localTime webLog (date : Instant) =
|
||||
match DateTimeZoneProviders.Tzdb[webLog.TimeZone] with
|
||||
| null -> date.ToDateTimeUtc ()
|
||||
| tz -> date.InZone(tz).ToDateTimeUnspecified ()
|
||||
|
||||
|
||||
/// A user of the web log
|
||||
@@ -440,9 +442,6 @@ type WebLogUser =
|
||||
/// The hash of the user's password
|
||||
PasswordHash : string
|
||||
|
||||
/// Salt used to calculate the user's password hash
|
||||
Salt : Guid
|
||||
|
||||
/// The URL of the user's personal site
|
||||
Url : string option
|
||||
|
||||
@@ -450,10 +449,10 @@ type WebLogUser =
|
||||
AccessLevel : AccessLevel
|
||||
|
||||
/// When the user was created
|
||||
CreatedOn : DateTime
|
||||
CreatedOn : Instant
|
||||
|
||||
/// When the user last logged on
|
||||
LastSeenOn : DateTime option
|
||||
LastSeenOn : Instant option
|
||||
}
|
||||
|
||||
/// Functions to support web log users
|
||||
@@ -468,10 +467,9 @@ module WebLogUser =
|
||||
LastName = ""
|
||||
PreferredName = ""
|
||||
PasswordHash = ""
|
||||
Salt = Guid.Empty
|
||||
Url = None
|
||||
AccessLevel = Author
|
||||
CreatedOn = DateTime.UnixEpoch
|
||||
CreatedOn = Noda.epoch
|
||||
LastSeenOn = None
|
||||
}
|
||||
|
||||
|
||||
@@ -7,9 +7,9 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Markdig" Version="0.30.2" />
|
||||
<PackageReference Update="FSharp.Core" Version="6.0.5" />
|
||||
<PackageReference Include="Markdown.ColorCode" Version="1.0.1" />
|
||||
<PackageReference Include="Markdig" Version="0.30.4" />
|
||||
<PackageReference Include="Markdown.ColorCode" Version="1.0.2" />
|
||||
<PackageReference Include="NodaTime" Version="3.1.6" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
namespace MyWebLog
|
||||
|
||||
open System
|
||||
open NodaTime
|
||||
|
||||
/// Support functions for domain definition
|
||||
[<AutoOpen>]
|
||||
@@ -12,6 +13,29 @@ module private Helpers =
|
||||
Convert.ToBase64String(Guid.NewGuid().ToByteArray ()).Replace('/', '_').Replace('+', '-').Substring (0, 22)
|
||||
|
||||
|
||||
/// Functions to support NodaTime manipulation
|
||||
module Noda =
|
||||
|
||||
/// The clock to use when getting "now" (will make mutable for testing)
|
||||
let clock : IClock = SystemClock.Instance
|
||||
|
||||
/// The Unix epoch
|
||||
let epoch = Instant.FromUnixTimeSeconds 0L
|
||||
|
||||
|
||||
/// Truncate an instant to remove fractional seconds
|
||||
let toSecondsPrecision (value : Instant) =
|
||||
Instant.FromUnixTimeSeconds (value.ToUnixTimeSeconds ())
|
||||
|
||||
/// The current Instant, with fractional seconds truncated
|
||||
let now () =
|
||||
toSecondsPrecision (clock.GetCurrentInstant ())
|
||||
|
||||
/// Convert a date/time to an Instant with whole seconds
|
||||
let fromDateTime (dt : DateTime) =
|
||||
toSecondsPrecision (Instant.FromDateTimeUtc (DateTime (dt.Ticks, DateTimeKind.Utc)))
|
||||
|
||||
|
||||
/// A user's access level
|
||||
type AccessLevel =
|
||||
/// The user may create and publish posts and edit the ones they have created
|
||||
@@ -137,6 +161,8 @@ module ExplicitRating =
|
||||
| x -> raise (invalidArg "rating" $"{x} is not a valid explicit rating")
|
||||
|
||||
|
||||
open NodaTime.Text
|
||||
|
||||
/// A podcast episode
|
||||
type Episode =
|
||||
{ /// The URL to the media file for the episode (may be permalink)
|
||||
@@ -146,7 +172,7 @@ type Episode =
|
||||
Length : int64
|
||||
|
||||
/// The duration of the episode
|
||||
Duration : TimeSpan option
|
||||
Duration : Duration option
|
||||
|
||||
/// The media type of the file (overrides podcast default if present)
|
||||
MediaType : string option
|
||||
@@ -215,6 +241,10 @@ module Episode =
|
||||
EpisodeDescription = None
|
||||
}
|
||||
|
||||
/// Format a duration for an episode
|
||||
let formatDuration ep =
|
||||
ep.Duration |> Option.map (DurationPattern.CreateWithInvariantCulture("H:mm:ss").Format)
|
||||
|
||||
|
||||
open Markdig
|
||||
open Markdown.ColorCode
|
||||
@@ -269,12 +299,11 @@ module MetaItem =
|
||||
let empty =
|
||||
{ Name = ""; Value = "" }
|
||||
|
||||
|
||||
/// A revision of a page or post
|
||||
[<CLIMutable; NoComparison; NoEquality>]
|
||||
type Revision =
|
||||
{ /// When this revision was saved
|
||||
AsOf : DateTime
|
||||
AsOf : Instant
|
||||
|
||||
/// The text of the revision
|
||||
Text : MarkupText
|
||||
@@ -285,7 +314,7 @@ module Revision =
|
||||
|
||||
/// An empty revision
|
||||
let empty =
|
||||
{ AsOf = DateTime.UtcNow
|
||||
{ AsOf = Noda.epoch
|
||||
Text = Html ""
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
open System
|
||||
open MyWebLog
|
||||
open NodaTime
|
||||
|
||||
/// Helper functions for view models
|
||||
[<AutoOpen>]
|
||||
@@ -138,8 +139,8 @@ type DisplayPage =
|
||||
AuthorId = WebLogUserId.toString page.AuthorId
|
||||
Title = page.Title
|
||||
Permalink = Permalink.toString page.Permalink
|
||||
PublishedOn = page.PublishedOn
|
||||
UpdatedOn = page.UpdatedOn
|
||||
PublishedOn = WebLog.localTime webLog page.PublishedOn
|
||||
UpdatedOn = WebLog.localTime webLog page.UpdatedOn
|
||||
IsInPageList = page.IsInPageList
|
||||
IsDefault = pageId = webLog.DefaultPage
|
||||
Text = ""
|
||||
@@ -154,8 +155,8 @@ type DisplayPage =
|
||||
AuthorId = WebLogUserId.toString page.AuthorId
|
||||
Title = page.Title
|
||||
Permalink = Permalink.toString page.Permalink
|
||||
PublishedOn = page.PublishedOn
|
||||
UpdatedOn = page.UpdatedOn
|
||||
PublishedOn = WebLog.localTime webLog page.PublishedOn
|
||||
UpdatedOn = WebLog.localTime webLog page.UpdatedOn
|
||||
IsInPageList = page.IsInPageList
|
||||
IsDefault = pageId = webLog.DefaultPage
|
||||
Text = addBaseToRelativeUrls extra page.Text
|
||||
@@ -179,7 +180,7 @@ with
|
||||
|
||||
/// Create a display revision from an actual revision
|
||||
static member fromRevision webLog (rev : Revision) =
|
||||
{ AsOf = rev.AsOf
|
||||
{ AsOf = rev.AsOf.ToDateTimeUtc ()
|
||||
AsOfLocal = WebLog.localTime webLog rev.AsOf
|
||||
Format = MarkupText.sourceType rev.Text
|
||||
}
|
||||
@@ -723,7 +724,7 @@ type EditPostModel =
|
||||
IsEpisode = Option.isSome post.Episode
|
||||
Media = episode.Media
|
||||
Length = episode.Length
|
||||
Duration = defaultArg (episode.Duration |> Option.map (fun it -> it.ToString """hh\:mm\:ss""")) ""
|
||||
Duration = defaultArg (Episode.formatDuration episode) ""
|
||||
MediaType = defaultArg episode.MediaType ""
|
||||
ImageUrl = defaultArg episode.ImageUrl ""
|
||||
Subtitle = defaultArg episode.Subtitle ""
|
||||
@@ -781,7 +782,8 @@ type EditPostModel =
|
||||
Some {
|
||||
Media = this.Media
|
||||
Length = this.Length
|
||||
Duration = noneIfBlank this.Duration |> Option.map TimeSpan.Parse
|
||||
Duration = noneIfBlank this.Duration
|
||||
|> Option.map (TimeSpan.Parse >> Duration.FromTimeSpan)
|
||||
MediaType = noneIfBlank this.MediaType
|
||||
ImageUrl = noneIfBlank this.ImageUrl
|
||||
Subtitle = noneIfBlank this.Subtitle
|
||||
|
||||
@@ -56,7 +56,6 @@ module Extensions =
|
||||
defaultArg (this.UserAccessLevel |> Option.map (AccessLevel.hasAccess level)) false
|
||||
|
||||
|
||||
|
||||
open System.Collections.Concurrent
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -5,6 +5,7 @@ open System.Threading.Tasks
|
||||
open Giraffe
|
||||
open MyWebLog
|
||||
open MyWebLog.ViewModels
|
||||
open NodaTime
|
||||
|
||||
/// ~~ DASHBOARDS ~~
|
||||
module Dashboard =
|
||||
@@ -13,22 +14,21 @@ module Dashboard =
|
||||
let user : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||
let getCount (f : WebLogId -> Task<int>) = f ctx.WebLog.Id
|
||||
let data = ctx.Data
|
||||
let posts = getCount (data.Post.CountByStatus Published)
|
||||
let drafts = getCount (data.Post.CountByStatus Draft)
|
||||
let pages = getCount data.Page.CountAll
|
||||
let listed = getCount data.Page.CountListed
|
||||
let cats = getCount data.Category.CountAll
|
||||
let topCats = getCount data.Category.CountTopLevel
|
||||
let! _ = Task.WhenAll (posts, drafts, pages, listed, cats, topCats)
|
||||
let! posts = getCount (data.Post.CountByStatus Published)
|
||||
let! drafts = getCount (data.Post.CountByStatus Draft)
|
||||
let! pages = getCount data.Page.CountAll
|
||||
let! listed = getCount data.Page.CountListed
|
||||
let! cats = getCount data.Category.CountAll
|
||||
let! topCats = getCount data.Category.CountTopLevel
|
||||
return!
|
||||
hashForPage "Dashboard"
|
||||
|> addToHash ViewContext.Model {
|
||||
Posts = posts.Result
|
||||
Drafts = drafts.Result
|
||||
Pages = pages.Result
|
||||
ListedPages = listed.Result
|
||||
Categories = cats.Result
|
||||
TopLevelCategories = topCats.Result
|
||||
Posts = posts
|
||||
Drafts = drafts
|
||||
Pages = pages
|
||||
ListedPages = listed
|
||||
Categories = cats
|
||||
TopLevelCategories = topCats
|
||||
}
|
||||
|> adminView "dashboard" next ctx
|
||||
}
|
||||
@@ -344,7 +344,8 @@ module Theme =
|
||||
do! asset.Open().CopyToAsync stream
|
||||
do! data.ThemeAsset.Save
|
||||
{ Id = ThemeAssetId (themeId, assetName)
|
||||
UpdatedOn = asset.LastWriteTime.DateTime
|
||||
UpdatedOn = LocalDateTime.FromDateTime(asset.LastWriteTime.DateTime)
|
||||
.InZoneLeniently(DateTimeZone.Utc).ToInstant ()
|
||||
Data = stream.ToArray ()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -95,8 +95,8 @@ let private toFeedItem webLog (authors : MetaItem list) (cats : DisplayCategory[
|
||||
let item = SyndicationItem (
|
||||
Id = WebLog.absoluteUrl webLog post.Permalink,
|
||||
Title = TextSyndicationContent.CreateHtmlContent post.Title,
|
||||
PublishDate = DateTimeOffset post.PublishedOn.Value,
|
||||
LastUpdatedTime = DateTimeOffset post.UpdatedOn,
|
||||
PublishDate = post.PublishedOn.Value.ToDateTimeOffset (),
|
||||
LastUpdatedTime = post.UpdatedOn.ToDateTimeOffset (),
|
||||
Content = TextSyndicationContent.CreatePlaintextContent plainText)
|
||||
item.AddPermalink (Uri item.Id)
|
||||
|
||||
@@ -163,8 +163,8 @@ let private addEpisode webLog (podcast : PodcastOptions) (episode : Episode) (po
|
||||
item.ElementExtensions.Add ("author", Namespace.iTunes, podcast.DisplayedAuthor)
|
||||
item.ElementExtensions.Add ("explicit", Namespace.iTunes, epExplicit)
|
||||
episode.Subtitle |> Option.iter (fun it -> item.ElementExtensions.Add ("subtitle", Namespace.iTunes, it))
|
||||
episode.Duration
|
||||
|> Option.iter (fun it -> item.ElementExtensions.Add ("duration", Namespace.iTunes, it.ToString """hh\:mm\:ss"""))
|
||||
Episode.formatDuration episode
|
||||
|> Option.iter (fun it -> item.ElementExtensions.Add ("duration", Namespace.iTunes, it))
|
||||
|
||||
match episode.ChapterFile with
|
||||
| Some chapters ->
|
||||
@@ -381,7 +381,7 @@ let createFeed (feedType : FeedType) posts : HttpHandler = fun next ctx -> backg
|
||||
addNamespace feed "content" Namespace.content
|
||||
setTitleAndDescription feedType webLog cats feed
|
||||
|
||||
feed.LastUpdatedTime <- (List.head posts).UpdatedOn |> DateTimeOffset
|
||||
feed.LastUpdatedTime <- (List.head posts).UpdatedOn.ToDateTimeOffset ()
|
||||
feed.Generator <- ctx.Generator
|
||||
feed.Items <- posts |> Seq.ofList |> Seq.map toItem
|
||||
feed.Language <- "en"
|
||||
|
||||
@@ -122,7 +122,6 @@ module ViewContext =
|
||||
let WebLog = "web_log"
|
||||
|
||||
|
||||
|
||||
/// The HTTP item key for loading the session
|
||||
let private sessionLoadedKey = "session-loaded"
|
||||
|
||||
@@ -419,10 +418,11 @@ let getCategoryIds slug ctx =
|
||||
|
||||
open System
|
||||
open System.Globalization
|
||||
open NodaTime
|
||||
|
||||
/// Parse a date/time to UTC
|
||||
let parseToUtc (date : string) =
|
||||
DateTime.Parse (date, null, DateTimeStyles.AdjustToUniversal)
|
||||
Instant.FromDateTimeUtc (DateTime.Parse (date, null, DateTimeStyles.AdjustToUniversal))
|
||||
|
||||
open Microsoft.Extensions.DependencyInjection
|
||||
open Microsoft.Extensions.Logging
|
||||
|
||||
@@ -12,9 +12,14 @@ let all pageNbr : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||
return!
|
||||
hashForPage "Pages"
|
||||
|> withAntiCsrf ctx
|
||||
|> addToHash "pages" (pages |> List.map (DisplayPage.fromPageMinimal ctx.WebLog))
|
||||
|> addToHash "pages" (pages
|
||||
|> Seq.ofList
|
||||
|> Seq.truncate 25
|
||||
|> Seq.map (DisplayPage.fromPageMinimal ctx.WebLog)
|
||||
|> List.ofSeq)
|
||||
|> addToHash "page_nbr" pageNbr
|
||||
|> addToHash "prev_page" (if pageNbr = 2 then "" else $"/page/{pageNbr - 1}")
|
||||
|> addToHash "has_next" (List.length pages > 25)
|
||||
|> addToHash "next_page" $"/page/{pageNbr + 1}"
|
||||
|> adminView "page-list" next ctx
|
||||
}
|
||||
@@ -139,15 +144,13 @@ let previewRevision (pgId, revDate) : HttpHandler = requireAccess Author >=> fun
|
||||
| _, None -> return! Error.notFound next ctx
|
||||
}
|
||||
|
||||
open System
|
||||
|
||||
// POST /admin/page/{id}/revision/{revision-date}/restore
|
||||
let restoreRevision (pgId, revDate) : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||
match! findPageRevision pgId revDate ctx with
|
||||
| Some pg, Some rev when canEdit pg.AuthorId ctx ->
|
||||
do! ctx.Data.Page.Update
|
||||
{ pg with
|
||||
Revisions = { rev with AsOf = DateTime.UtcNow }
|
||||
Revisions = { rev with AsOf = Noda.now () }
|
||||
:: (pg.Revisions |> List.filter (fun r -> r.AsOf <> rev.AsOf))
|
||||
}
|
||||
do! addMessage ctx { UserMessage.success with Message = "Revision restored successfully" }
|
||||
@@ -173,7 +176,7 @@ let deleteRevision (pgId, revDate) : HttpHandler = requireAccess Author >=> fun
|
||||
let save : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||
let! model = ctx.BindFormAsync<EditPageModel> ()
|
||||
let data = ctx.Data
|
||||
let now = DateTime.UtcNow
|
||||
let now = Noda.now ()
|
||||
let tryPage =
|
||||
if model.IsNew then
|
||||
{ Page.empty with
|
||||
|
||||
@@ -53,8 +53,8 @@ let preparePostList webLog posts listType (url : string) pageNbr perPage (data :
|
||||
match listType with
|
||||
| SinglePost ->
|
||||
let post = List.head posts
|
||||
let dateTime = defaultArg post.PublishedOn post.UpdatedOn
|
||||
data.Post.FindSurroundingPosts webLog.Id dateTime
|
||||
let target = defaultArg post.PublishedOn post.UpdatedOn
|
||||
data.Post.FindSurroundingPosts webLog.Id target
|
||||
| _ -> Task.FromResult (None, None)
|
||||
let newerLink =
|
||||
match listType, pageNbr with
|
||||
@@ -350,7 +350,7 @@ let restoreRevision (postId, revDate) : HttpHandler = requireAccess Author >=> f
|
||||
| Some post, Some rev when canEdit post.AuthorId ctx ->
|
||||
do! ctx.Data.Post.Update
|
||||
{ post with
|
||||
Revisions = { rev with AsOf = DateTime.UtcNow }
|
||||
Revisions = { rev with AsOf = Noda.now () }
|
||||
:: (post.Revisions |> List.filter (fun r -> r.AsOf <> rev.AsOf))
|
||||
}
|
||||
do! addMessage ctx { UserMessage.success with Message = "Revision restored successfully" }
|
||||
@@ -376,7 +376,6 @@ let deleteRevision (postId, revDate) : HttpHandler = requireAccess Author >=> fu
|
||||
let save : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||
let! model = ctx.BindFormAsync<EditPostModel> ()
|
||||
let data = ctx.Data
|
||||
let now = DateTime.UtcNow
|
||||
let tryPost =
|
||||
if model.IsNew then
|
||||
{ Post.empty with
|
||||
@@ -389,7 +388,7 @@ let save : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||
| Some post when canEdit post.AuthorId ctx ->
|
||||
let priorCats = post.CategoryIds
|
||||
let updatedPost =
|
||||
model.UpdatePost post now
|
||||
model.UpdatePost post (Noda.now ())
|
||||
|> function
|
||||
| post ->
|
||||
if model.SetPublished then
|
||||
|
||||
@@ -94,7 +94,7 @@ module Asset =
|
||||
| Some asset ->
|
||||
match Upload.checkModified asset.UpdatedOn ctx with
|
||||
| Some threeOhFour -> return! threeOhFour next ctx
|
||||
| None -> return! Upload.sendFile asset.UpdatedOn path asset.Data next ctx
|
||||
| None -> return! Upload.sendFile (asset.UpdatedOn.ToDateTimeUtc ()) path asset.Data next ctx
|
||||
| None -> return! Error.notFound next ctx
|
||||
}
|
||||
|
||||
|
||||
@@ -29,15 +29,17 @@ module private Helpers =
|
||||
|
||||
// ~~ SERVING UPLOADS ~~
|
||||
|
||||
open System.Globalization
|
||||
open Giraffe
|
||||
open Microsoft.AspNetCore.Http
|
||||
open NodaTime
|
||||
|
||||
/// Determine if the file has been modified since the date/time specified by the If-Modified-Since header
|
||||
let checkModified since (ctx : HttpContext) : HttpHandler option =
|
||||
match ctx.Request.Headers.IfModifiedSince with
|
||||
| it when it.Count < 1 -> None
|
||||
| it when since > DateTime.Parse it[0] -> None
|
||||
| _ -> Some (setStatusCode 304 >=> setBodyFromString "Not Modified")
|
||||
| it when since > Instant.FromDateTimeUtc (DateTime.Parse (it[0], null, DateTimeStyles.AdjustToUniversal)) -> None
|
||||
| _ -> Some (setStatusCode 304)
|
||||
|
||||
|
||||
open Microsoft.AspNetCore.Http.Headers
|
||||
@@ -73,7 +75,7 @@ let serve (urlParts : string seq) : HttpHandler = fun next ctx -> task {
|
||||
| Some upload ->
|
||||
match checkModified upload.UpdatedOn ctx with
|
||||
| Some threeOhFour -> return! threeOhFour next ctx
|
||||
| None -> return! sendFile upload.UpdatedOn path upload.Data next ctx
|
||||
| None -> return! sendFile (upload.UpdatedOn.ToDateTimeUtc ()) path upload.Data next ctx
|
||||
| None -> return! Error.notFound next ctx
|
||||
else
|
||||
return! Error.notFound next ctx
|
||||
@@ -143,7 +145,8 @@ let save : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||
let upload = Seq.head ctx.Request.Form.Files
|
||||
let fileName = String.Concat (makeSlug (Path.GetFileNameWithoutExtension upload.FileName),
|
||||
Path.GetExtension(upload.FileName).ToLowerInvariant ())
|
||||
let localNow = WebLog.localTime ctx.WebLog DateTime.Now
|
||||
let now = Noda.now ()
|
||||
let localNow = WebLog.localTime ctx.WebLog now
|
||||
let year = localNow.ToString "yyyy"
|
||||
let month = localNow.ToString "MM"
|
||||
let! form = ctx.BindFormAsync<UploadFileModel> ()
|
||||
@@ -156,7 +159,7 @@ let save : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||
{ Id = UploadId.create ()
|
||||
WebLogId = ctx.WebLog.Id
|
||||
Path = Permalink $"{year}/{month}/{fileName}"
|
||||
UpdatedOn = DateTime.UtcNow
|
||||
UpdatedOn = now
|
||||
Data = stream.ToArray ()
|
||||
}
|
||||
do! ctx.Data.Upload.Add file
|
||||
|
||||
@@ -2,19 +2,32 @@
|
||||
module MyWebLog.Handlers.User
|
||||
|
||||
open System
|
||||
open System.Security.Cryptography
|
||||
open System.Text
|
||||
open Microsoft.AspNetCore.Http
|
||||
open Microsoft.AspNetCore.Identity
|
||||
open MyWebLog
|
||||
open NodaTime
|
||||
|
||||
// ~~ LOG ON / LOG OFF ~~
|
||||
|
||||
/// Hash a password for a given user
|
||||
let hashedPassword (plainText : string) (email : string) (salt : Guid) =
|
||||
let allSalt = Array.concat [ salt.ToByteArray (); Encoding.UTF8.GetBytes email ]
|
||||
use alg = new Rfc2898DeriveBytes (plainText, allSalt, 2_048)
|
||||
Convert.ToBase64String (alg.GetBytes 64)
|
||||
/// Create a password hash a password for a given user
|
||||
let createPasswordHash user password =
|
||||
PasswordHasher<WebLogUser>().HashPassword (user, password)
|
||||
|
||||
/// Verify whether a password is valid
|
||||
let verifyPassword user password (ctx : HttpContext) = backgroundTask {
|
||||
match user with
|
||||
| Some usr ->
|
||||
let hasher = PasswordHasher<WebLogUser> ()
|
||||
match hasher.VerifyHashedPassword (usr, usr.PasswordHash, password) with
|
||||
| PasswordVerificationResult.Success -> return Ok ()
|
||||
| PasswordVerificationResult.SuccessRehashNeeded ->
|
||||
do! ctx.Data.WebLogUser.Update { usr with PasswordHash = hasher.HashPassword (usr, password) }
|
||||
return Ok ()
|
||||
| _ -> return Error "Log on attempt unsuccessful"
|
||||
| None -> return Error "Log on attempt unsuccessful"
|
||||
}
|
||||
|
||||
open Giraffe
|
||||
open MyWebLog
|
||||
open MyWebLog.ViewModels
|
||||
|
||||
// GET /user/log-on
|
||||
@@ -37,8 +50,10 @@ open Microsoft.AspNetCore.Authentication.Cookies
|
||||
let doLogOn : HttpHandler = fun next ctx -> task {
|
||||
let! model = ctx.BindFormAsync<LogOnModel> ()
|
||||
let data = ctx.Data
|
||||
match! data.WebLogUser.FindByEmail model.EmailAddress ctx.WebLog.Id with
|
||||
| Some user when user.PasswordHash = hashedPassword model.Password user.Email user.Salt ->
|
||||
let! tryUser = data.WebLogUser.FindByEmail model.EmailAddress ctx.WebLog.Id
|
||||
match! verifyPassword tryUser model.Password ctx with
|
||||
| Ok _ ->
|
||||
let user = tryUser.Value
|
||||
let claims = seq {
|
||||
Claim (ClaimTypes.NameIdentifier, WebLogUserId.toString user.Id)
|
||||
Claim (ClaimTypes.Name, $"{user.FirstName} {user.LastName}")
|
||||
@@ -59,8 +74,8 @@ let doLogOn : HttpHandler = fun next ctx -> task {
|
||||
match model.ReturnTo with
|
||||
| Some url -> redirectTo false url next ctx
|
||||
| None -> redirectToGet "admin/dashboard" next ctx
|
||||
| _ ->
|
||||
do! addMessage ctx { UserMessage.error with Message = "Log on attempt unsuccessful" }
|
||||
| Error msg ->
|
||||
do! addMessage ctx { UserMessage.error with Message = msg }
|
||||
return! logOn model.ReturnTo next ctx
|
||||
}
|
||||
|
||||
@@ -147,7 +162,9 @@ let private showMyInfo (model : EditMyInfoModel) (user : WebLogUser) : HttpHandl
|
||||
|> addToHash ViewContext.Model model
|
||||
|> addToHash "access_level" (AccessLevel.toString user.AccessLevel)
|
||||
|> addToHash "created_on" (WebLog.localTime ctx.WebLog user.CreatedOn)
|
||||
|> addToHash "last_seen_on" (WebLog.localTime ctx.WebLog (defaultArg user.LastSeenOn DateTime.UnixEpoch))
|
||||
|> addToHash "last_seen_on" (WebLog.localTime ctx.WebLog
|
||||
(defaultArg user.LastSeenOn (Instant.FromUnixTimeSeconds 0)))
|
||||
|
||||
|> adminView "my-info" next ctx
|
||||
|
||||
|
||||
@@ -164,19 +181,13 @@ let saveMyInfo : HttpHandler = requireAccess Author >=> fun next ctx -> task {
|
||||
let data = ctx.Data
|
||||
match! data.WebLogUser.FindById ctx.UserId ctx.WebLog.Id with
|
||||
| Some user when model.NewPassword = model.NewPasswordConfirm ->
|
||||
let pw, salt =
|
||||
if model.NewPassword = "" then
|
||||
user.PasswordHash, user.Salt
|
||||
else
|
||||
let newSalt = Guid.NewGuid ()
|
||||
hashedPassword model.NewPassword user.Email newSalt, newSalt
|
||||
let pw = if model.NewPassword = "" then user.PasswordHash else createPasswordHash user model.NewPassword
|
||||
let user =
|
||||
{ user with
|
||||
FirstName = model.FirstName
|
||||
LastName = model.LastName
|
||||
PreferredName = model.PreferredName
|
||||
PasswordHash = pw
|
||||
Salt = salt
|
||||
}
|
||||
do! data.WebLogUser.Update user
|
||||
let pwMsg = if model.NewPassword = "" then "" else " and updated your password"
|
||||
@@ -200,7 +211,7 @@ let save : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task {
|
||||
{ WebLogUser.empty with
|
||||
Id = WebLogUserId.create ()
|
||||
WebLogId = ctx.WebLog.Id
|
||||
CreatedOn = DateTime.UtcNow
|
||||
CreatedOn = Noda.now ()
|
||||
} |> someTask
|
||||
else data.WebLogUser.FindById (WebLogUserId model.Id) ctx.WebLog.Id
|
||||
match! tryUser with
|
||||
@@ -211,9 +222,7 @@ let save : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task {
|
||||
else
|
||||
let toUpdate =
|
||||
if model.Password = "" then updatedUser
|
||||
else
|
||||
let salt = Guid.NewGuid ()
|
||||
{ updatedUser with PasswordHash = hashedPassword model.Password model.Email salt; Salt = salt }
|
||||
else { updatedUser with PasswordHash = createPasswordHash updatedUser model.Password }
|
||||
do! (if model.IsNew then data.WebLogUser.Add else data.WebLogUser.Update) toUpdate
|
||||
do! addMessage ctx
|
||||
{ UserMessage.success with
|
||||
@@ -227,4 +236,3 @@ let save : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task {
|
||||
next ctx
|
||||
| None -> return! Error.notFound next ctx
|
||||
}
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ open System
|
||||
open System.IO
|
||||
open Microsoft.Extensions.DependencyInjection
|
||||
open MyWebLog.Data
|
||||
open NodaTime
|
||||
|
||||
/// Create the web log information
|
||||
let private doCreateWebLog (args : string[]) (sp : IServiceProvider) = task {
|
||||
@@ -41,10 +42,8 @@ let private doCreateWebLog (args : string[]) (sp : IServiceProvider) = task {
|
||||
}
|
||||
|
||||
// Create the admin user
|
||||
let salt = Guid.NewGuid ()
|
||||
let now = DateTime.UtcNow
|
||||
|
||||
do! data.WebLogUser.Add
|
||||
let now = Noda.now ()
|
||||
let user =
|
||||
{ WebLogUser.empty with
|
||||
Id = userId
|
||||
WebLogId = webLogId
|
||||
@@ -52,11 +51,10 @@ let private doCreateWebLog (args : string[]) (sp : IServiceProvider) = task {
|
||||
FirstName = "Admin"
|
||||
LastName = "User"
|
||||
PreferredName = "Admin"
|
||||
PasswordHash = Handlers.User.hashedPassword args[4] args[3] salt
|
||||
Salt = salt
|
||||
AccessLevel = accessLevel
|
||||
CreatedOn = now
|
||||
}
|
||||
do! data.WebLogUser.Add { user with PasswordHash = Handlers.User.createPasswordHash user args[4] }
|
||||
|
||||
// Create the default home page
|
||||
do! data.Page.Add
|
||||
@@ -89,7 +87,7 @@ let private doCreateWebLog (args : string[]) (sp : IServiceProvider) = task {
|
||||
let createWebLog args sp = task {
|
||||
match args |> Array.length with
|
||||
| 5 -> do! doCreateWebLog args sp
|
||||
| _ -> eprintfn "Usage: MyWebLog init [url] [name] [admin-email] [admin-pw]"
|
||||
| _ -> eprintfn "Usage: myWebLog init [url] [name] [admin-email] [admin-pw]"
|
||||
}
|
||||
|
||||
/// Import prior permalinks from a text files with lines in the format "[old] [new]"
|
||||
@@ -122,7 +120,7 @@ let private importPriorPermalinks urlBase file (sp : IServiceProvider) = task {
|
||||
let importLinks args sp = task {
|
||||
match args |> Array.length with
|
||||
| 3 -> do! importPriorPermalinks args[1] args[2] sp
|
||||
| _ -> eprintfn "Usage: MyWebLog import-links [url] [file-name]"
|
||||
| _ -> eprintfn "Usage: myWebLog import-links [url] [file-name]"
|
||||
}
|
||||
|
||||
// Loading a theme and restoring a backup are not statically compilable; this is OK
|
||||
@@ -149,13 +147,12 @@ let loadTheme (args : string[]) (sp : IServiceProvider) = task {
|
||||
log.LogInformation $"{theme.Name} v{theme.Version} ({ThemeId.toString theme.Id}) loaded"
|
||||
| Error message -> eprintfn $"{message}"
|
||||
else
|
||||
eprintfn "Usage: MyWebLog load-theme [theme-zip-file-name]"
|
||||
eprintfn "Usage: myWebLog load-theme [theme-zip-file-name]"
|
||||
}
|
||||
|
||||
/// Back up a web log's data
|
||||
module Backup =
|
||||
|
||||
open System.Threading.Tasks
|
||||
open MyWebLog.Converters
|
||||
open Newtonsoft.Json
|
||||
|
||||
@@ -165,7 +162,7 @@ module Backup =
|
||||
Id : ThemeAssetId
|
||||
|
||||
/// The updated date for this asset
|
||||
UpdatedOn : DateTime
|
||||
UpdatedOn : Instant
|
||||
|
||||
/// The data for this asset, base-64 encoded
|
||||
Data : string
|
||||
@@ -197,7 +194,7 @@ module Backup =
|
||||
Path : Permalink
|
||||
|
||||
/// The date/time this upload was last updated (file time)
|
||||
UpdatedOn : DateTime
|
||||
UpdatedOn : Instant
|
||||
|
||||
/// The data for the upload, base-64 encoded
|
||||
Data : string
|
||||
@@ -251,10 +248,9 @@ module Backup =
|
||||
Uploads : EncodedUpload list
|
||||
}
|
||||
|
||||
/// Create a JSON serializer (uses RethinkDB data implementation's JSON converters)
|
||||
/// Create a JSON serializer
|
||||
let private getSerializer prettyOutput =
|
||||
let serializer = JsonSerializer.CreateDefault ()
|
||||
Json.all () |> Seq.iter serializer.Converters.Add
|
||||
let serializer = Json.configure (JsonSerializer.CreateDefault ())
|
||||
if prettyOutput then serializer.Formatting <- Formatting.Indented
|
||||
serializer
|
||||
|
||||
@@ -382,7 +378,8 @@ module Backup =
|
||||
printfn ""
|
||||
printfn "- Importing theme..."
|
||||
do! data.Theme.Save restore.Theme
|
||||
let! _ = restore.Assets |> List.map (EncodedAsset.toAsset >> data.ThemeAsset.Save) |> Task.WhenAll
|
||||
restore.Assets
|
||||
|> List.iter (EncodedAsset.toAsset >> data.ThemeAsset.Save >> Async.AwaitTask >> Async.RunSynchronously)
|
||||
|
||||
// Restore web log data
|
||||
|
||||
@@ -393,18 +390,19 @@ module Backup =
|
||||
do! data.WebLogUser.Restore restore.Users
|
||||
|
||||
printfn "- Restoring categories and tag mappings..."
|
||||
do! data.TagMap.Restore restore.TagMappings
|
||||
do! data.Category.Restore restore.Categories
|
||||
if not (List.isEmpty restore.TagMappings) then do! data.TagMap.Restore restore.TagMappings
|
||||
if not (List.isEmpty restore.Categories) then do! data.Category.Restore restore.Categories
|
||||
|
||||
printfn "- Restoring pages..."
|
||||
do! data.Page.Restore restore.Pages
|
||||
if not (List.isEmpty restore.Pages) then do! data.Page.Restore restore.Pages
|
||||
|
||||
printfn "- Restoring posts..."
|
||||
do! data.Post.Restore restore.Posts
|
||||
if not (List.isEmpty restore.Posts) then do! data.Post.Restore restore.Posts
|
||||
|
||||
// TODO: comments not yet implemented
|
||||
|
||||
printfn "- Restoring uploads..."
|
||||
if not (List.isEmpty restore.Uploads) then
|
||||
do! data.Upload.Restore (restore.Uploads |> List.map EncodedUpload.toUpload)
|
||||
|
||||
displayStats "Restored for <>NAME<>:" restore.WebLog restore
|
||||
@@ -451,7 +449,7 @@ module Backup =
|
||||
do! createBackup webLog fileName prettyOutput data
|
||||
| None -> eprintfn $"Error: no web log found for {args[1]}"
|
||||
else
|
||||
eprintfn """Usage: MyWebLog backup [url-base] [*backup-file-name] [**"pretty"]"""
|
||||
eprintfn """Usage: myWebLog backup [url-base] [*backup-file-name] [**"pretty"]"""
|
||||
eprintfn """ * optional - default is [web-log-slug].json"""
|
||||
eprintfn """ ** optional - default is non-pretty JSON output"""
|
||||
}
|
||||
@@ -463,7 +461,7 @@ module Backup =
|
||||
let newUrlBase = if args.Length = 3 then Some args[2] else None
|
||||
do! restoreBackup args[1] newUrlBase (args[0] <> "do-restore") data
|
||||
else
|
||||
eprintfn "Usage: MyWebLog restore [backup-file-name] [*url-base]"
|
||||
eprintfn "Usage: myWebLog restore [backup-file-name] [*url-base]"
|
||||
eprintfn " * optional - will restore to original URL base if omitted"
|
||||
eprintfn " (use do-restore to skip confirmation prompt)"
|
||||
}
|
||||
@@ -488,5 +486,24 @@ let private doUserUpgrade urlBase email (data : IData) = task {
|
||||
let upgradeUser (args : string[]) (sp : IServiceProvider) = task {
|
||||
match args.Length with
|
||||
| 3 -> do! doUserUpgrade args[1] args[2] (sp.GetRequiredService<IData> ())
|
||||
| _ -> eprintfn "Usage: MyWebLog upgrade-user [web-log-url-base] [email-address]"
|
||||
| _ -> eprintfn "Usage: myWebLog upgrade-user [web-log-url-base] [email-address]"
|
||||
}
|
||||
|
||||
/// Set a user's password
|
||||
let doSetPassword urlBase email password (data : IData) = task {
|
||||
match! data.WebLog.FindByHost urlBase with
|
||||
| Some webLog ->
|
||||
match! data.WebLogUser.FindByEmail email webLog.Id with
|
||||
| Some user ->
|
||||
do! data.WebLogUser.Update { user with PasswordHash = Handlers.User.createPasswordHash user password }
|
||||
printfn $"Password for user {email} at {webLog.Name} set successfully"
|
||||
| None -> eprintfn $"ERROR: no user {email} found at {urlBase}"
|
||||
| None -> eprintfn $"ERROR: no web log found for {urlBase}"
|
||||
}
|
||||
|
||||
/// Set a user's password if the command-line arguments are good
|
||||
let setPassword (args : string[]) (sp : IServiceProvider) = task {
|
||||
match args.Length with
|
||||
| 4 -> do! doSetPassword args[1] args[2] args[3] (sp.GetRequiredService<IData> ())
|
||||
| _ -> eprintfn "Usage: myWebLog set-password [web-log-url-base] [email-address] [password]"
|
||||
}
|
||||
|
||||
@@ -23,14 +23,13 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="DotLiquid" Version="2.2.656" />
|
||||
<PackageReference Include="DotLiquid" Version="2.2.682" />
|
||||
<PackageReference Include="Giraffe" Version="6.0.0" />
|
||||
<PackageReference Include="Giraffe.Htmx" Version="1.8.0" />
|
||||
<PackageReference Include="Giraffe.ViewEngine.Htmx" Version="1.8.0" />
|
||||
<PackageReference Include="Giraffe.Htmx" Version="1.8.5" />
|
||||
<PackageReference Include="Giraffe.ViewEngine.Htmx" Version="1.8.5" />
|
||||
<PackageReference Include="NeoSmart.Caching.Sqlite" Version="6.0.1" />
|
||||
<PackageReference Include="RethinkDB.DistributedCache" Version="1.0.0-rc1" />
|
||||
<PackageReference Update="FSharp.Core" Version="6.0.5" />
|
||||
<PackageReference Include="System.ServiceModel.Syndication" Version="6.0.0" />
|
||||
<PackageReference Include="System.ServiceModel.Syndication" Version="7.0.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -10,7 +10,7 @@ type WebLogMiddleware (next : RequestDelegate, log : ILogger<WebLogMiddleware>)
|
||||
/// Is the debug level enabled on the logger?
|
||||
let isDebug = log.IsEnabled LogLevel.Debug
|
||||
|
||||
member this.InvokeAsync (ctx : HttpContext) = task {
|
||||
member _.InvokeAsync (ctx : HttpContext) = task {
|
||||
/// Create the full path of the request
|
||||
let path = $"{ctx.Request.Scheme}://{ctx.Request.Host.Value}{ctx.Request.Path.Value}"
|
||||
match WebLogCache.tryGet path with
|
||||
@@ -29,6 +29,8 @@ type WebLogMiddleware (next : RequestDelegate, log : ILogger<WebLogMiddleware>)
|
||||
open System
|
||||
open Microsoft.Extensions.DependencyInjection
|
||||
open MyWebLog.Data
|
||||
open Newtonsoft.Json
|
||||
open Npgsql
|
||||
|
||||
/// Logic to obtain a data connection and implementation based on configured values
|
||||
module DataImplementation =
|
||||
@@ -37,29 +39,42 @@ module DataImplementation =
|
||||
open RethinkDb.Driver.FSharp
|
||||
open RethinkDb.Driver.Net
|
||||
|
||||
/// Create an NpgsqlDataSource from the connection string, configuring appropriately
|
||||
let createNpgsqlDataSource (cfg : IConfiguration) =
|
||||
let builder = NpgsqlDataSourceBuilder (cfg.GetConnectionString "PostgreSQL")
|
||||
let _ = builder.UseNodaTime ()
|
||||
// let _ = builder.UseLoggerFactory(LoggerFactory.Create(fun it -> it.AddConsole () |> ignore))
|
||||
builder.Build ()
|
||||
|
||||
/// Get the configured data implementation
|
||||
let get (sp : IServiceProvider) : IData =
|
||||
let config = sp.GetRequiredService<IConfiguration> ()
|
||||
let await it = (Async.AwaitTask >> Async.RunSynchronously) it
|
||||
let connStr name = config.GetConnectionString name
|
||||
let hasConnStr name = (connStr >> isNull >> not) name
|
||||
let createSQLite connStr =
|
||||
let createSQLite connStr : IData =
|
||||
let log = sp.GetRequiredService<ILogger<SQLiteData>> ()
|
||||
let conn = new SqliteConnection (connStr)
|
||||
log.LogInformation $"Using SQLite database {conn.DataSource}"
|
||||
await (SQLiteData.setUpConnection conn)
|
||||
SQLiteData (conn, log)
|
||||
SQLiteData (conn, log, Json.configure (JsonSerializer.CreateDefault ()))
|
||||
|
||||
if hasConnStr "SQLite" then
|
||||
upcast createSQLite (connStr "SQLite")
|
||||
createSQLite (connStr "SQLite")
|
||||
elif hasConnStr "RethinkDB" then
|
||||
let log = sp.GetRequiredService<ILogger<RethinkDbData>> ()
|
||||
Json.all () |> Seq.iter Converter.Serializer.Converters.Add
|
||||
let _ = Json.configure Converter.Serializer
|
||||
let rethinkCfg = DataConfig.FromUri (connStr "RethinkDB")
|
||||
let conn = await (rethinkCfg.CreateConnectionAsync log)
|
||||
upcast RethinkDbData (conn, rethinkCfg, log)
|
||||
RethinkDbData (conn, rethinkCfg, log)
|
||||
elif hasConnStr "PostgreSQL" then
|
||||
let source = createNpgsqlDataSource config
|
||||
use conn = source.CreateConnection ()
|
||||
let log = sp.GetRequiredService<ILogger<PostgresData>> ()
|
||||
log.LogInformation $"Using PostgreSQL database {conn.Database}"
|
||||
PostgresData (source, log, Json.configure (JsonSerializer.CreateDefault ()))
|
||||
else
|
||||
upcast createSQLite "Data Source=./myweblog.db;Cache=Shared"
|
||||
createSQLite "Data Source=./myweblog.db;Cache=Shared"
|
||||
|
||||
|
||||
open System.Threading.Tasks
|
||||
@@ -76,6 +91,7 @@ let showHelp () =
|
||||
printfn "init Initializes a new web log"
|
||||
printfn "load-theme Load a theme"
|
||||
printfn "restore Restore a JSON file backup (prompt before overwriting)"
|
||||
printfn "set-password Set a password for a specific user"
|
||||
printfn "upgrade-user Upgrade a WebLogAdmin user to a full Administrator"
|
||||
printfn " "
|
||||
printfn "For more information on a particular command, run it with no options."
|
||||
@@ -88,6 +104,7 @@ open Giraffe.EndpointRouting
|
||||
open Microsoft.AspNetCore.Authentication.Cookies
|
||||
open Microsoft.AspNetCore.Builder
|
||||
open Microsoft.AspNetCore.HttpOverrides
|
||||
open Microsoft.Extensions.Caching.Distributed
|
||||
open NeoSmart.Caching.Sqlite
|
||||
open RethinkDB.DistributedCache
|
||||
|
||||
@@ -110,6 +127,7 @@ let rec main args =
|
||||
|
||||
let sp = builder.Services.BuildServiceProvider ()
|
||||
let data = DataImplementation.get sp
|
||||
let _ = builder.Services.AddSingleton<JsonSerializer> data.Serializer
|
||||
|
||||
task {
|
||||
do! data.StartUp ()
|
||||
@@ -121,23 +139,35 @@ let rec main args =
|
||||
match data with
|
||||
| :? RethinkDbData as rethink ->
|
||||
// A RethinkDB connection is designed to work as a singleton
|
||||
builder.Services.AddSingleton<IData> data |> ignore
|
||||
let _ = builder.Services.AddSingleton<IData> data
|
||||
let _ =
|
||||
builder.Services.AddDistributedRethinkDBCache (fun opts ->
|
||||
opts.TableName <- "Session"
|
||||
opts.Connection <- rethink.Conn)
|
||||
|> ignore
|
||||
()
|
||||
| :? SQLiteData as sql ->
|
||||
// ADO.NET connections are designed to work as per-request instantiation
|
||||
let cfg = sp.GetRequiredService<IConfiguration> ()
|
||||
let _ =
|
||||
builder.Services.AddScoped<SqliteConnection> (fun sp ->
|
||||
let conn = new SqliteConnection (sql.Conn.ConnectionString)
|
||||
SQLiteData.setUpConnection conn |> Async.AwaitTask |> Async.RunSynchronously
|
||||
conn)
|
||||
|> ignore
|
||||
builder.Services.AddScoped<IData, SQLiteData> () |> ignore
|
||||
let _ = builder.Services.AddScoped<IData, SQLiteData> () |> ignore
|
||||
// Use SQLite for caching as well
|
||||
let cachePath = defaultArg (Option.ofObj (cfg.GetConnectionString "SQLiteCachePath")) "./session.db"
|
||||
builder.Services.AddSqliteCache (fun o -> o.CachePath <- cachePath) |> ignore
|
||||
let _ = builder.Services.AddSqliteCache (fun o -> o.CachePath <- cachePath)
|
||||
()
|
||||
| :? PostgresData as postgres ->
|
||||
// ADO.NET Data Sources are designed to work as singletons
|
||||
let _ =
|
||||
builder.Services.AddSingleton<NpgsqlDataSource> (fun sp ->
|
||||
DataImplementation.createNpgsqlDataSource (sp.GetRequiredService<IConfiguration> ()))
|
||||
let _ = builder.Services.AddSingleton<IData> postgres
|
||||
let _ =
|
||||
builder.Services.AddSingleton<IDistributedCache> (fun _ ->
|
||||
Postgres.DistributedCache () :> IDistributedCache)
|
||||
()
|
||||
| _ -> ()
|
||||
|
||||
let _ = builder.Services.AddSession(fun opts ->
|
||||
@@ -159,6 +189,7 @@ let rec main args =
|
||||
| Some it when it = "restore" -> Maintenance.Backup.restoreFromBackup args app.Services
|
||||
| Some it when it = "do-restore" -> Maintenance.Backup.restoreFromBackup args app.Services
|
||||
| Some it when it = "upgrade-user" -> Maintenance.upgradeUser args app.Services
|
||||
| Some it when it = "set-password" -> Maintenance.setPassword args app.Services
|
||||
| Some it when it = "help" -> showHelp ()
|
||||
| Some it ->
|
||||
printfn $"""Unrecognized command "{it}" - valid commands are:"""
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"Generator": "myWebLog 2.0-rc1",
|
||||
"Generator": "myWebLog 2.0",
|
||||
"Logging": {
|
||||
"LogLevel": {
|
||||
"MyWebLog.Handlers": "Information"
|
||||
|
||||
@@ -1,13 +1,19 @@
|
||||
<h2 class="my-3">{{ page_title }}</h2>
|
||||
<article>
|
||||
<form action="{{ "admin/settings/rss/save" | relative_link }}" method="post">
|
||||
<input type="hidden" name="{{ csrf.form_field_name }}" value="{{ csrf.request_token }}">
|
||||
<input type="hidden" name="Id" value="{{ model.id }}">
|
||||
<input
|
||||
type="hidden"
|
||||
name="{{ csrf.form_field_name }}"
|
||||
value="{{ csrf.request_token }}">
|
||||
<input
|
||||
type="hidden"
|
||||
name="Id"
|
||||
value="{{ model.id }}">
|
||||
{%- assign typ = model.source_type -%}
|
||||
<div class="container">
|
||||
<div class="row pb-3">
|
||||
<div class="col">
|
||||
<a href="{{ "admin/settings/rss" | relative_link }}">« Back to RSS Settings</a>
|
||||
<a href="{{ "admin/settings#rss-settings" | relative_link }}">« Back to Settings</a>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row pb-3">
|
||||
@@ -17,7 +23,12 @@
|
||||
<div class="row">
|
||||
<div class="col">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="Path" id="path" class="form-control" placeholder="Relative Feed Path"
|
||||
<input
|
||||
type="text"
|
||||
name="Path"
|
||||
id="path"
|
||||
class="form-control"
|
||||
placeholder="Relative Feed Path"
|
||||
value="{{ model.path }}">
|
||||
<label for="path">Relative Feed Path</label>
|
||||
<span class="form-text fst-italic">Appended to {{ web_log.url_base }}/</span>
|
||||
@@ -27,8 +38,13 @@
|
||||
<div class="row">
|
||||
<div class="col py-3 d-flex align-self-center justify-content-center">
|
||||
<div class="form-check form-switch">
|
||||
<input type="checkbox" name="IsPodcast" id="isPodcast" class="form-check-input" value="true"
|
||||
{%- if model.is_podcast %} checked="checked"{% endif %} onclick="Admin.checkPodcast()">
|
||||
<input
|
||||
type="checkbox"
|
||||
name="IsPodcast"
|
||||
id="isPodcast"
|
||||
class="form-check-input"
|
||||
value="true"
|
||||
{%- if model.is_podcast %}checked="checked"{% endif %}onclick="Admin.checkPodcast()">
|
||||
<label for="isPodcast" class="form-check-label">This Is a Podcast Feed</label>
|
||||
</div>
|
||||
</div>
|
||||
@@ -41,21 +57,31 @@
|
||||
<div class="row d-flex align-items-center">
|
||||
<div class="col-1 d-flex justify-content-end pb-3">
|
||||
<div class="form-check form-check-inline me-0">
|
||||
<input type="radio" name="SourceType" id="sourceTypeCat" class="form-check-input" value="category"
|
||||
{%- unless typ == "tag" %} checked="checked" {% endunless -%}
|
||||
onclick="Admin.customFeedBy('category')">
|
||||
<input
|
||||
type="radio"
|
||||
name="SourceType"
|
||||
id="sourceTypeCat"
|
||||
class="form-check-input"
|
||||
value="category"
|
||||
{%- unless typ == "tag" %}checked="checked"{% endunless -%}onclick="Admin.customFeedBy('category')">
|
||||
<label for="sourceTypeCat" class="form-check-label d-none">Category</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-11 pb-3">
|
||||
<div class="form-floating">
|
||||
<select name="SourceValue" id="sourceValueCat" class="form-control" required
|
||||
{%- if typ == "tag" %} disabled="disabled"{% endif %}>
|
||||
<select
|
||||
name="SourceValue"
|
||||
id="sourceValueCat"
|
||||
class="form-control"
|
||||
required
|
||||
{%- if typ == "tag" %}disabled="disabled"{% endif %}>
|
||||
<option value="">– Select Category –</option>
|
||||
{% for cat in categories -%}
|
||||
<option value="{{ cat.id }}"
|
||||
{%- if typ != "tag" and model.source_value == cat.id %} selected="selected"{% endif -%}>
|
||||
{% for it in cat.parent_names %}{{ it }} ⟩ {% endfor %}{{ cat.name }}
|
||||
<option value="{{ cat.id }}"{%- if typ != "tag" and model.source_value == cat.id %}selected="selected"{% endif -%}>
|
||||
{% for it in cat.parent_names %}
|
||||
{{ it }} ⟩
|
||||
{% endfor %}
|
||||
{{ cat.name }}
|
||||
</option>
|
||||
{%- endfor %}
|
||||
</select>
|
||||
@@ -64,16 +90,25 @@
|
||||
</div>
|
||||
<div class="col-1 d-flex justify-content-end pb-3">
|
||||
<div class="form-check form-check-inline me-0">
|
||||
<input type="radio" name="SourceType" id="sourceTypeTag" class="form-check-input" value="tag"
|
||||
{%- if typ == "tag" %} checked="checked"{% endif %} onclick="Admin.customFeedBy('tag')">
|
||||
<input
|
||||
type="radio"
|
||||
name="SourceType"
|
||||
id="sourceTypeTag"
|
||||
class="form-check-input"
|
||||
value="tag"
|
||||
{%- if typ == "tag" %}checked="checked"{% endif %}onclick="Admin.customFeedBy('tag')">
|
||||
<label for="sourceTypeTag" class="form-check-label d-none">Tag</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-11 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="SourceValue" id="sourceValueTag" class="form-control" placeholder="Tag"
|
||||
{%- unless typ == "tag" %} disabled="disabled"{% endunless %} required
|
||||
{%- if typ == "tag" %} value="{{ model.source_value }}"{% endif %}>
|
||||
<input
|
||||
type="text"
|
||||
name="SourceValue"
|
||||
id="sourceValueTag"
|
||||
class="form-control"
|
||||
placeholder="Tag"
|
||||
{%- unless typ == "tag" %}disabled="disabled"{% endunless %}required{%- if typ == "tag" %}value="{{ model.source_value }}"{% endif %}>
|
||||
<label for="sourceValueTag">Tag</label>
|
||||
</div>
|
||||
</div>
|
||||
@@ -83,27 +118,47 @@
|
||||
</div>
|
||||
<div class="row pb-3">
|
||||
<div class="col">
|
||||
<fieldset class="container" id="podcastFields"{% unless model.is_podcast %} disabled="disabled"{%endunless%}>
|
||||
<fieldset
|
||||
class="container"
|
||||
id="podcastFields"
|
||||
{% unless model.is_podcast %}disabled="disabled"{% endunless %}>
|
||||
<legend>Podcast Settings</legend>
|
||||
<div class="row">
|
||||
<div class="col-12 col-md-5 col-lg-4 offset-lg-1 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="Title" id="title" class="form-control" placeholder="Title" required
|
||||
<input
|
||||
type="text"
|
||||
name="Title"
|
||||
id="title"
|
||||
class="form-control"
|
||||
placeholder="Title"
|
||||
required
|
||||
value="{{ model.title }}">
|
||||
<label for="title">Title</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-12 col-md-4 col-lg-4 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="Subtitle" id="subtitle" class="form-control" placeholder="Subtitle"
|
||||
<input
|
||||
type="text"
|
||||
name="Subtitle"
|
||||
id="subtitle"
|
||||
class="form-control"
|
||||
placeholder="Subtitle"
|
||||
value="{{ model.subtitle }}">
|
||||
<label for="subtitle">Podcast Subtitle</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-12 col-md-3 col-lg-2 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="number" name="ItemsInFeed" id="itemsInFeed" class="form-control" placeholder="Items"
|
||||
required value="{{ model.items_in_feed }}">
|
||||
<input
|
||||
type="number"
|
||||
name="ItemsInFeed"
|
||||
id="itemsInFeed"
|
||||
class="form-control"
|
||||
placeholder="Items"
|
||||
required
|
||||
value="{{ model.items_in_feed }}">
|
||||
<label for="itemsInFeed"># Episodes</label>
|
||||
</div>
|
||||
</div>
|
||||
@@ -111,11 +166,19 @@
|
||||
<div class="row">
|
||||
<div class="col-12 col-md-5 col-lg-4 offset-lg-1 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="AppleCategory" id="appleCategory" class="form-control"
|
||||
placeholder="iTunes Category" required value="{{ model.apple_category }}">
|
||||
<input
|
||||
type="text"
|
||||
name="AppleCategory"
|
||||
id="appleCategory"
|
||||
class="form-control"
|
||||
placeholder="iTunes Category"
|
||||
required
|
||||
value="{{ model.apple_category }}">
|
||||
<label for="appleCategory">iTunes Category</label>
|
||||
<span class="form-text fst-italic">
|
||||
<a href="https://www.thepodcasthost.com/planning/itunes-podcast-categories/" target="_blank"
|
||||
<a
|
||||
href="https://www.thepodcasthost.com/planning/itunes-podcast-categories/"
|
||||
target="_blank"
|
||||
rel="noopener">
|
||||
iTunes Category / Subcategory List
|
||||
</a>
|
||||
@@ -124,17 +187,26 @@
|
||||
</div>
|
||||
<div class="col-12 col-md-4 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="AppleSubcategory" id="appleSubcategory" class="form-control"
|
||||
placeholder="iTunes Subcategory" value="{{ model.apple_subcategory }}">
|
||||
<input
|
||||
type="text"
|
||||
name="AppleSubcategory"
|
||||
id="appleSubcategory"
|
||||
class="form-control"
|
||||
placeholder="iTunes Subcategory"
|
||||
value="{{ model.apple_subcategory }}">
|
||||
<label for="appleSubcategory">iTunes Subcategory</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-12 col-md-3 col-lg-2 pb-3">
|
||||
<div class="form-floating">
|
||||
<select name="Explicit" id="explicit" class="form-control" required>
|
||||
<option value="yes"{% if model.explicit == "yes" %} selected="selected"{% endif %}>Yes</option>
|
||||
<option value="no"{% if model.explicit == "no" %} selected="selected"{% endif %}>No</option>
|
||||
<option value="clean"{% if model.explicit == "clean" %} selected="selected"{% endif %}>
|
||||
<select
|
||||
name="Explicit"
|
||||
id="explicit"
|
||||
class="form-control"
|
||||
required>
|
||||
<option value="yes" {% if model.explicit == "yes" %}selected="selected"{% endif %}>Yes</option>
|
||||
<option value="no" {% if model.explicit == "no" %}selected="selected"{% endif %}>No</option>
|
||||
<option value="clean" {% if model.explicit == "clean" %}selected="selected"{% endif %}>
|
||||
Clean
|
||||
</option>
|
||||
</select>
|
||||
@@ -145,14 +217,26 @@
|
||||
<div class="row">
|
||||
<div class="col-12 col-md-6 col-lg-4 offset-xxl-1 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="DisplayedAuthor" id="displayedAuthor" class="form-control"
|
||||
placeholder="Author" required value="{{ model.displayed_author }}">
|
||||
<input
|
||||
type="text"
|
||||
name="DisplayedAuthor"
|
||||
id="displayedAuthor"
|
||||
class="form-control"
|
||||
placeholder="Author"
|
||||
required
|
||||
value="{{ model.displayed_author }}">
|
||||
<label for="displayedAuthor">Displayed Author</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-12 col-md-6 col-lg-4 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="email" name="Email" id="email" class="form-control" placeholder="Email" required
|
||||
<input
|
||||
type="email"
|
||||
name="Email"
|
||||
id="email"
|
||||
class="form-control"
|
||||
placeholder="Email"
|
||||
required
|
||||
value="{{ model.email }}">
|
||||
<label for="email">Author E-mail</label>
|
||||
<span class="form-text fst-italic">For iTunes, must match registered e-mail</span>
|
||||
@@ -160,15 +244,26 @@
|
||||
</div>
|
||||
<div class="col-12 col-sm-5 col-md-4 col-lg-4 col-xl-3 offset-xl-1 col-xxl-2 offset-xxl-0">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="DefaultMediaType" id="defaultMediaType" class="form-control"
|
||||
placeholder="Media Type" value="{{ model.default_media_type }}">
|
||||
<input
|
||||
type="text"
|
||||
name="DefaultMediaType"
|
||||
id="defaultMediaType"
|
||||
class="form-control"
|
||||
placeholder="Media Type"
|
||||
value="{{ model.default_media_type }}">
|
||||
<label for="defaultMediaType">Default Media Type</label>
|
||||
<span class="form-text fst-italic">Optional; blank for no default</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-12 col-sm-7 col-md-8 col-lg-10 offset-lg-1">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="ImageUrl" id="imageUrl" class="form-control" placeholder="Image URL" required
|
||||
<input
|
||||
type="text"
|
||||
name="ImageUrl"
|
||||
id="imageUrl"
|
||||
class="form-control"
|
||||
placeholder="Image URL"
|
||||
required
|
||||
value="{{ model.image_url }}">
|
||||
<label for="imageUrl">Image URL</label>
|
||||
<span class="form-text fst-italic">Relative URL will be appended to {{ web_log.url_base }}/</span>
|
||||
@@ -178,7 +273,13 @@
|
||||
<div class="row pb-3">
|
||||
<div class="col-12 col-lg-10 offset-lg-1">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="Summary" id="summary" class="form-control" placeholder="Summary" required
|
||||
<input
|
||||
type="text"
|
||||
name="Summary"
|
||||
id="summary"
|
||||
class="form-control"
|
||||
placeholder="Summary"
|
||||
required
|
||||
value="{{ model.summary }}">
|
||||
<label for="summary">Summary</label>
|
||||
<span class="form-text fst-italic">Displayed in podcast directories</span>
|
||||
@@ -188,8 +289,13 @@
|
||||
<div class="row pb-3">
|
||||
<div class="col-12 col-lg-10 offset-lg-1">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="MediaBaseUrl" id="mediaBaseUrl" class="form-control"
|
||||
placeholder="Media Base URL" value="{{ model.media_base_url }}">
|
||||
<input
|
||||
type="text"
|
||||
name="MediaBaseUrl"
|
||||
id="mediaBaseUrl"
|
||||
class="form-control"
|
||||
placeholder="Media Base URL"
|
||||
value="{{ model.media_base_url }}">
|
||||
<label for="mediaBaseUrl">Media Base URL</label>
|
||||
<span class="form-text fst-italic">Optional; prepended to episode media file if present</span>
|
||||
</div>
|
||||
@@ -198,7 +304,12 @@
|
||||
<div class="row">
|
||||
<div class="col-12 col-lg-5 offset-lg-1 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="FundingUrl" id="fundingUrl" class="form-control" placeholder="Funding URL"
|
||||
<input
|
||||
type="text"
|
||||
name="FundingUrl"
|
||||
id="fundingUrl"
|
||||
class="form-control"
|
||||
placeholder="Funding URL"
|
||||
value="{{ model.funding_url }}">
|
||||
<label for="fundingUrl">Funding URL</label>
|
||||
<span class="form-text fst-italic">
|
||||
@@ -208,8 +319,14 @@
|
||||
</div>
|
||||
<div class="col-12 col-lg-5 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="FundingText" id="fundingText" class="form-control" maxlength="128"
|
||||
placeholder="Funding Text" value="{{ model.funding_text }}">
|
||||
<input
|
||||
type="text"
|
||||
name="FundingText"
|
||||
id="fundingText"
|
||||
class="form-control"
|
||||
maxlength="128"
|
||||
placeholder="Funding Text"
|
||||
value="{{ model.funding_text }}">
|
||||
<label for="fundingText">Funding Text</label>
|
||||
<span class="form-text fst-italic">Optional; text for the funding link</span>
|
||||
</div>
|
||||
@@ -218,21 +335,28 @@
|
||||
<div class="row pb-3">
|
||||
<div class="col-8 col-lg-5 offset-lg-1 pb-3">
|
||||
<div class="form-floating">
|
||||
<input type="text" name="PodcastGuid" id="guid" class="form-control" placeholder="GUID"
|
||||
<input
|
||||
type="text"
|
||||
name="PodcastGuid"
|
||||
id="guid"
|
||||
class="form-control"
|
||||
placeholder="GUID"
|
||||
value="{{ model.podcast_guid }}">
|
||||
<label for="guid">Podcast GUID</label>
|
||||
<span class="form-text fst-italic">
|
||||
Optional; v5 UUID uniquely identifying this podcast; once entered, do not change this value
|
||||
(<a href="https://github.com/Podcastindex-org/podcast-namespace/blob/main/docs/1.0.md#guid"
|
||||
target="_blank">documentation</a>)
|
||||
(<a href="https://github.com/Podcastindex-org/podcast-namespace/blob/main/docs/1.0.md#guid" target="_blank">documentation</a>)
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-4 col-lg-3 offset-lg-2 pb-3">
|
||||
<div class="form-floating">
|
||||
<select name="Medium" id="medium" class="form-control">
|
||||
<select
|
||||
name="Medium"
|
||||
id="medium"
|
||||
class="form-control">
|
||||
{% for med in medium_values -%}
|
||||
<option value="{{ med[0] }}"{% if model.medium == med[0] %} selected{% endif %}>
|
||||
<option value="{{ med[0] }}"{% if model.medium == med[0] %}selected{% endif %}>
|
||||
{{ med[1] }}
|
||||
</option>
|
||||
{%- endfor %}
|
||||
@@ -240,8 +364,7 @@
|
||||
<label for="medium">Medium</label>
|
||||
<span class="form-text fst-italic">
|
||||
Optional; medium of the podcast content
|
||||
(<a href="https://github.com/Podcastindex-org/podcast-namespace/blob/main/docs/1.0.md#medium"
|
||||
target="_blank">documentation</a>)
|
||||
(<a href="https://github.com/Podcastindex-org/podcast-namespace/blob/main/docs/1.0.md#medium" target="_blank">documentation</a>)
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
{%- assign title_col = "col-12 col-md-5" -%}
|
||||
{%- assign link_col = "col-12 col-md-5" -%}
|
||||
{%- assign upd8_col = "col-12 col-md-2" -%}
|
||||
<form method="post" class="container" hx-target="body">
|
||||
<form method="post" class="container mb-3" hx-target="body">
|
||||
<input type="hidden" name="{{ csrf.form_field_name }}" value="{{ csrf.request_token }}">
|
||||
<div class="row mwl-table-heading">
|
||||
<div class="{{ title_col }}">
|
||||
@@ -49,7 +49,7 @@
|
||||
</div>
|
||||
{%- endfor %}
|
||||
</form>
|
||||
{% if page_nbr > 1 or page_count == 25 %}
|
||||
{% if page_nbr > 1 or has_next %}
|
||||
<div class="d-flex justify-content-evenly mb-3">
|
||||
<div>
|
||||
{% if page_nbr > 1 %}
|
||||
@@ -61,7 +61,7 @@
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="text-right">
|
||||
{% if page_count == 25 %}
|
||||
{% if has_next %}
|
||||
<p>
|
||||
<a class="btn btn-secondary" href="{{ "admin/pages" | append: next_page | relative_link }}">
|
||||
Next »
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
<a href="{{ "admin/post/new/edit" | relative_link }}" class="btn btn-primary btn-sm mb-3">Write a New Post</a>
|
||||
{%- assign post_count = model.posts | size -%}
|
||||
{%- if post_count > 0 %}
|
||||
<form method="post" class="container" hx-target="body">
|
||||
<form method="post" class="container mb-3" hx-target="body">
|
||||
<input type="hidden" name="{{ csrf.form_field_name }}" value="{{ csrf.request_token }}">
|
||||
{%- assign date_col = "col-xs-12 col-md-3 col-lg-2" -%}
|
||||
{%- assign title_col = "col-xs-12 col-md-7 col-lg-6 col-xl-5 col-xxl-4" -%}
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
myWebLog Admin
|
||||
2.0.0-rc1
|
||||
2.0.0
|
||||
@@ -334,16 +334,12 @@ this.Admin = {
|
||||
const theToast = new bootstrap.Toast(toast, options)
|
||||
theToast.show()
|
||||
})
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
htmx.on("htmx:afterOnLoad", function (evt) {
|
||||
const hdrs = evt.detail.xhr.getAllResponseHeaders()
|
||||
// Show messages if there were any in the response
|
||||
if (hdrs.indexOf("x-message") >= 0) {
|
||||
Admin.showMessage(evt.detail.xhr.getResponseHeader("x-message"))
|
||||
}
|
||||
// Initialize any toasts that were pre-rendered from the server
|
||||
/**
|
||||
* Initialize any toasts that were pre-rendered from the server
|
||||
*/
|
||||
showPreRenderedMessages() {
|
||||
[...document.querySelectorAll(".toast")].forEach(el => {
|
||||
if (el.getAttribute("data-mwl-shown") === "true" && el.className.indexOf("hide") >= 0) {
|
||||
document.removeChild(el)
|
||||
@@ -355,6 +351,17 @@ htmx.on("htmx:afterOnLoad", function (evt) {
|
||||
el.setAttribute("data-mwl-shown", "true")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
htmx.on("htmx:afterOnLoad", function (evt) {
|
||||
const hdrs = evt.detail.xhr.getAllResponseHeaders()
|
||||
// Initialize any toasts that were pre-rendered from the server
|
||||
Admin.showPreRenderedMessages()
|
||||
// Show messages if there were any in the response
|
||||
if (hdrs.indexOf("x-message") >= 0) {
|
||||
Admin.showMessage(evt.detail.xhr.getResponseHeader("x-message"))
|
||||
}
|
||||
})
|
||||
|
||||
htmx.on("htmx:responseError", function (evt) {
|
||||
@@ -365,3 +372,5 @@ htmx.on("htmx:responseError", function (evt) {
|
||||
Admin.showMessage(`danger|||${xhr.status}: ${xhr.statusText}`)
|
||||
}
|
||||
})
|
||||
|
||||
document.addEventListener("DOMContentLoaded", Admin.showPreRenderedMessages, { once: true})
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
myWebLog Default Theme
|
||||
2.0.0-rc1
|
||||
2.0.0
|
||||
Reference in New Issue
Block a user