Compare commits

...

10 Commits

Author SHA1 Message Date
f4be57b665 v2.2
Reviewed-on: #51
2024-06-20 00:08:40 +00:00
7f94e0beef Remove .NET 7 from build target (#48) 2024-06-19 20:07:45 -04:00
f59566a3d3 Create theme dir if needed (#49)
- Use Path.Combine throughout
- Update theme versions
- Update generator version
2024-06-19 17:02:05 -04:00
f2f766fc05 Update htmx to v2.0.0 (#50)
- Also update all other deps
2024-06-19 16:17:45 -04:00
75c4d4f991 Tweaks to v2.2 data migration (#45) 2024-06-19 16:04:53 -04:00
b50d0d9884 Drop .NET 7 support (#48)
- Bump version to 2.2
2024-06-18 22:06:02 -04:00
7ae15b9e93 Force URLs and e-mail to be lowercase (#45)
- Added v2.2 migration to lower existing e-mails
2024-06-18 22:01:41 -04:00
823286255b
Fix PostgreSQL v2.1 migration (#44)
fixes #43
2024-03-28 22:25:09 -04:00
f1a7e55f3e
Version 2.1 (#41)
- Add full chapter support (#6)
- Add built-in redirect functionality (#39)
- Support building Docker containers for release (#38)
- Support canonical domain configuration (#37)
- Add unit tests for domain/models and integration tests for all three data stores
- Convert SQLite storage to use JSON documents, similar to PostgreSQL
- Convert admin templates to Giraffe View Engine (from Liquid)
- Add .NET 8 support
2024-03-26 20:13:28 -04:00
7b325dc19e
v2 (#36)
* Use PostgreSQL JSON-based data implementation
* Fix back link on RSS settings page (#34)
* Show theme upload messages (#28)
* Fix admin page list paging (#35)
* Add db migrations for all stores
* Support both .NET 6 and 7
2023-02-26 13:01:21 -05:00
121 changed files with 15504 additions and 9633 deletions

View File

@ -1,12 +0,0 @@
{
"version": 1,
"isRoot": true,
"tools": {
"fake-cli": {
"version": "5.22.0",
"commands": [
"fake"
]
}
}
}

99
.github/workflows/ci.yml vendored Normal file
View File

@ -0,0 +1,99 @@
name: Continuous Integration
on:
push:
branches:
- main
pull_request:
branches:
- main
workflow_dispatch:
env:
MWL_TEST_RETHINK_URI: rethinkdb://localhost/mwl_test
jobs:
build_and_test:
name: Build / Test
runs-on: ubuntu-latest
strategy:
matrix:
dotnet-version:
- "6.0"
- "7.0"
- "8.0"
services:
postgres:
image: postgres:latest
env:
POSTGRES_PASSWORD: postgres
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
rethink:
image: rethinkdb:latest
ports:
- 28015:28015
steps:
- name: Check Out Code
uses: actions/checkout@v4
- name: Setup .NET Core SDK
uses: actions/setup-dotnet@v4.0.0
with:
dotnet-version: 8.x
- name: Restore dependencies
run: dotnet restore src/MyWebLog.sln
- name: Build (${{ matrix.dotnet-version }})
run: dotnet build src/MyWebLog.sln -f net${{ matrix.dotnet-version }}
- name: Test (${{ matrix.dotnet-version }})
run: cd src/MyWebLog.Tests; dotnet run -f net${{ matrix.dotnet-version }}
publish:
name: Publish Packages
runs-on: ubuntu-latest
needs: build_and_test
strategy:
matrix:
ver:
- "net6.0"
- "net7.0"
- "net8.0"
os:
- "linux-x64"
- "win-x64"
include:
- os: "linux-x64"
bz2: true
- os: "win-x64"
zip: true
steps:
- name: Check Out Code
uses: actions/checkout@v4
- name: Setup .NET Core SDK
uses: actions/setup-dotnet@v4.0.0
with:
dotnet-version: 8.x
- name: Publish (Release)
run: dotnet publish -c Release -f ${{ matrix.ver }} -r ${{ matrix.os }} src/MyWebLog/MyWebLog.fsproj
- name: Zip Admin Theme
run: cd src/admin-theme; zip -r ../MyWebLog/bin/Release/${{ matrix.ver }}/${{ matrix.os }}/publish/admin-theme.zip *; cd ../..
- name: Zip Default Theme
run: cd src/default-theme; zip -r ../MyWebLog/bin/Release/${{ matrix.ver }}/${{ matrix.os }}/publish/default-theme.zip *; cd ../..
- if: ${{ matrix.bz2 }}
name: Create .tar.bz2 Archive
run: tar cfj myWebLog-${{ matrix.ver }}-${{ matrix.os }}.tar.bz2 -C src/MyWebLog/bin/Release/${{ matrix.ver }}/${{ matrix.os }}/publish .
- if: ${{ matrix.zip }}
name: Create .zip Archive
run: cd src/MyWebLog/bin/Release/${{ matrix.ver }}/${{ matrix.os }}/publish; zip -r myWebLog-${{ matrix.ver }}-${{ matrix.os }}.zip *; cp myWeb*.zip ../../../../../../..; cd ../../../../../../..
- name: Upload Artifacts
uses: actions/upload-artifact@v4
with:
name: package-${{ matrix.ver }}-${{ matrix.os }}
path: |
*x64.zip
*.bz2

3
.gitignore vendored
View File

@ -261,7 +261,8 @@ src/MyWebLog/wwwroot/img/daniel-j-summers
src/MyWebLog/wwwroot/img/bit-badger
.ionide
.vscode
src/MyWebLog/appsettings.Production.json
# SQLite database files
src/MyWebLog/*.db*
src/MyWebLog/data/*.db*

166
build.fs Normal file
View File

@ -0,0 +1,166 @@
open System.IO
open Fake.Core
open Fake.DotNet
open Fake.IO
open Fake.IO.Globbing.Operators
let execContext = Context.FakeExecutionContext.Create false "build.fsx" []
Context.setExecutionContext (Context.RuntimeContext.Fake execContext)
/// The output directory for release ZIPs
let releasePath = "releases"
/// The path to the main project
let projectPath = "src/MyWebLog"
/// The path and name of the main project
let projName = $"{projectPath}/MyWebLog.fsproj"
/// The version being packaged (extracted from appsettings.json)
let version =
let settings = File.ReadAllText $"{projectPath}/appsettings.json"
let generator = settings.Substring (settings.IndexOf "\"Generator\":")
let appVersion = generator.Replace("\"Generator\": \"", "")
let appVersion = appVersion.Substring (0, appVersion.IndexOf "\"")
appVersion.Split ' ' |> Array.last
/// Zip a theme distributed with myWebLog
let zipTheme (name : string) (_ : TargetParameter) =
let path = $"src/{name}-theme"
!! $"{path}/**/*"
|> Zip.filesAsSpecs path
|> Seq.filter (fun (_, name) -> not (name.EndsWith ".zip"))
|> Zip.zipSpec $"{releasePath}/{name}-theme.zip"
/// Frameworks supported by this build
let frameworks = [ "net6.0"; "net8.0" ]
/// Publish the project for the given runtime ID
let publishFor rid (_ : TargetParameter) =
frameworks
|> List.iter (fun fwk ->
DotNet.publish
(fun opts ->
{ opts with Runtime = Some rid; SelfContained = Some false; NoLogo = true; Framework = Some fwk })
projName)
/// Package published output for the given runtime ID
let packageFor rid (_ : TargetParameter) =
frameworks
|> List.iter (fun fwk ->
let path = $"{projectPath}/bin/Release/{fwk}/%s{rid}/publish"
let prodSettings = $"{path}/appsettings.Production.json"
if File.exists prodSettings then File.delete prodSettings
[ !! $"{path}/**/*"
|> Zip.filesAsSpecs path
|> Seq.map (fun (orig, dest) ->
orig, if dest.StartsWith "MyWebLog" then dest.Replace ("MyWebLog", "myWebLog") else dest)
Seq.singleton ($"{releasePath}/admin-theme.zip", "admin-theme.zip")
Seq.singleton ($"{releasePath}/default-theme.zip", "default-theme.zip")
]
|> Seq.concat
|> Zip.zipSpec $"{releasePath}/myWebLog-{version}.{fwk}.{rid}.zip")
Target.create "Clean" (fun _ ->
!! "src/**/bin"
++ "src/**/obj"
|> Shell.cleanDirs
Shell.cleanDir releasePath
)
Target.create "Build" (fun _ ->
DotNet.build (fun opts -> { opts with NoLogo = true }) projName
)
Target.create "ZipAdminTheme" (zipTheme "admin")
Target.create "ZipDefaultTheme" (zipTheme "default")
Target.create "PublishWindows" (publishFor "win-x64")
Target.create "PackageWindows" (packageFor "win-x64")
Target.create "PublishLinux" (publishFor "linux-x64")
Target.create "PackageLinux" (packageFor "linux-x64")
Target.create "RepackageLinux" (fun _ ->
let workDir = $"{releasePath}/linux"
frameworks
|> List.iter (fun fwk ->
let zipArchive = $"{releasePath}/myWebLog-{version}.{fwk}.linux-x64.zip"
let sh command args =
CreateProcess.fromRawCommand command args
|> CreateProcess.redirectOutput
|> Proc.run
|> ignore
Shell.mkdir workDir
Zip.unzip workDir zipArchive
Shell.cd workDir
sh "chmod" [ "+x"; "./myWebLog" ]
sh "tar" [ "cfj"; $"../myWebLog-{version}.{fwk}.linux-x64.tar.bz2"; "." ]
Shell.cd "../.."
Shell.rm zipArchive)
Shell.rm_rf workDir
)
Target.create "All" ignore
Target.create "RemoveThemeArchives" (fun _ ->
Shell.rm $"{releasePath}/admin-theme.zip"
Shell.rm $"{releasePath}/default-theme.zip"
)
Target.create "CI" ignore
open Fake.Core.TargetOperators
let dependencies = [
"Clean"
==> "All"
"Clean"
?=> "Build"
==> "All"
"Clean"
?=> "ZipDefaultTheme"
==> "All"
"Clean"
?=> "ZipAdminTheme"
==> "All"
"Build"
==> "PublishWindows"
==> "All"
"Build"
==> "PublishLinux"
==> "All"
"PublishWindows"
==> "PackageWindows"
==> "All"
"PublishLinux"
==> "PackageLinux"
==> "All"
"PackageLinux"
==> "RepackageLinux"
==> "All"
"All"
==> "RemoveThemeArchives"
==> "CI"
]
[<EntryPoint>]
let main args =
try
match args with
| [| target |] -> Target.runOrDefault target
| _ -> Target.runOrDefault "All"
0
with e ->
printfn "%A" e
1

20
build.fsproj Normal file
View File

@ -0,0 +1,20 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<Compile Include="build.fs" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Fake.Core.Target" Version="6.0.0" />
<PackageReference Include="Fake.DotNet.Cli" Version="6.0.0" />
<PackageReference Include="Fake.IO.FileSystem" Version="6.0.0" />
<PackageReference Include="Fake.IO.Zip" Version="6.0.0" />
<PackageReference Include="MSBuild.StructuredLogger" Version="2.2.206" />
</ItemGroup>
</Project>

147
build.fsx
View File

@ -1,147 +0,0 @@
#r "paket:
nuget Fake.DotNet.Cli
nuget Fake.IO.FileSystem
nuget Fake.IO.Zip
nuget Fake.Core.Target //"
#load ".fake/build.fsx/intellisense.fsx"
open System.IO
open Fake.Core
open Fake.DotNet
open Fake.IO
open Fake.IO.Globbing.Operators
open Fake.Core.TargetOperators
Target.initEnvironment ()
/// The output directory for release ZIPs
let releasePath = "releases"
/// The path to the main project
let projectPath = "src/MyWebLog"
/// The path and name of the main project
let projName = $"{projectPath}/MyWebLog.fsproj"
/// The version being packaged (extracted from appsettings.json)
let version =
let settings = File.ReadAllText $"{projectPath}/appsettings.json"
let generator = settings.Substring (settings.IndexOf "\"Generator\":")
let appVersion = generator.Replace("\"Generator\": \"", "")
let appVersion = appVersion.Substring (0, appVersion.IndexOf "\"")
appVersion.Split ' ' |> Array.last
/// Zip a theme distributed with myWebLog
let zipTheme (name : string) (_ : TargetParameter) =
let path = $"src/{name}-theme"
!! $"{path}/**/*"
|> Zip.filesAsSpecs path
|> Seq.filter (fun (_, name) -> not (name.EndsWith ".zip"))
|> Zip.zipSpec $"{releasePath}/{name}-theme.zip"
/// Publish the project for the given runtime ID
let publishFor rid (_ : TargetParameter) =
DotNet.publish (fun opts -> { opts with Runtime = Some rid; SelfContained = Some false; NoLogo = true }) projName
/// Package published output for the given runtime ID
let packageFor (rid : string) (_ : TargetParameter) =
let path = $"{projectPath}/bin/Release/net6.0/{rid}/publish"
let prodSettings = $"{path}/appsettings.Production.json"
if File.exists prodSettings then File.delete prodSettings
[ !! $"{path}/**/*"
|> Zip.filesAsSpecs path
|> Seq.map (fun (orig, dest) ->
orig, if dest.StartsWith "MyWebLog" then dest.Replace ("MyWebLog", "myWebLog") else dest)
Seq.singleton ($"{releasePath}/admin-theme.zip", "admin-theme.zip")
Seq.singleton ($"{releasePath}/default-theme.zip", "default-theme.zip")
]
|> Seq.concat
|> Zip.zipSpec $"{releasePath}/myWebLog-{version}.{rid}.zip"
Target.create "Clean" (fun _ ->
!! "src/**/bin"
++ "src/**/obj"
|> Shell.cleanDirs
Shell.cleanDir releasePath
)
Target.create "Build" (fun _ ->
DotNet.build (fun opts -> { opts with NoLogo = true }) projName
)
Target.create "ZipAdminTheme" (zipTheme "admin")
Target.create "ZipDefaultTheme" (zipTheme "default")
Target.create "PublishWindows" (publishFor "win-x64")
Target.create "PackageWindows" (packageFor "win-x64")
Target.create "PublishLinux" (publishFor "linux-x64")
Target.create "PackageLinux" (packageFor "linux-x64")
Target.create "RepackageLinux" (fun _ ->
let workDir = $"{releasePath}/linux"
let zipArchive = $"{releasePath}/myWebLog-{version}.linux-x64.zip"
let sh command args =
CreateProcess.fromRawCommand command args
|> CreateProcess.redirectOutput
|> Proc.run
|> ignore
Shell.mkdir workDir
Zip.unzip workDir zipArchive
Shell.cd workDir
sh "chmod" [ "+x"; "./myWebLog" ]
sh "tar" [ "cfj"; $"../myWebLog-{version}.linux-x64.tar.bz2"; "." ]
Shell.cd "../.."
Shell.rm zipArchive
Shell.rm_rf workDir
)
Target.create "All" ignore
Target.create "RemoveThemeArchives" (fun _ ->
Shell.rm $"{releasePath}/admin-theme.zip"
Shell.rm $"{releasePath}/default-theme.zip"
)
Target.create "CI" ignore
"Clean"
==> "All"
"Clean"
?=> "Build"
==> "All"
"Clean"
?=> "ZipDefaultTheme"
==> "All"
"Clean"
?=> "ZipAdminTheme"
==> "All"
"Build"
==> "PublishWindows"
==> "All"
"Build"
==> "PublishLinux"
==> "All"
"PublishWindows"
==> "PackageWindows"
==> "All"
"PublishLinux"
==> "PackageLinux"
==> "All"
"PackageLinux"
==> "RepackageLinux"
==> "All"
"All"
==> "RemoveThemeArchives"
==> "CI"
Target.runOrDefault "All"

View File

@ -1,227 +0,0 @@
STORAGE: NONE
RESTRICTION: || (== net6.0) (== netstandard2.0)
NUGET
remote: https://api.nuget.org/v3/index.json
BlackFox.VsWhere (1.1)
FSharp.Core (>= 4.2.3)
Microsoft.Win32.Registry (>= 4.7)
Fake.Core.CommandLineParsing (5.22)
FParsec (>= 1.1.1)
FSharp.Core (>= 6.0)
Fake.Core.Context (5.22)
FSharp.Core (>= 6.0)
Fake.Core.Environment (5.22)
FSharp.Core (>= 6.0)
Fake.Core.FakeVar (5.22)
Fake.Core.Context (>= 5.22)
FSharp.Core (>= 6.0)
Fake.Core.Process (5.22)
Fake.Core.Environment (>= 5.22)
Fake.Core.FakeVar (>= 5.22)
Fake.Core.String (>= 5.22)
Fake.Core.Trace (>= 5.22)
Fake.IO.FileSystem (>= 5.22)
FSharp.Core (>= 6.0)
System.Collections.Immutable (>= 5.0)
Fake.Core.SemVer (5.22)
FSharp.Core (>= 6.0)
Fake.Core.String (5.22)
FSharp.Core (>= 6.0)
Fake.Core.Target (5.22)
Fake.Core.CommandLineParsing (>= 5.22)
Fake.Core.Context (>= 5.22)
Fake.Core.Environment (>= 5.22)
Fake.Core.FakeVar (>= 5.22)
Fake.Core.Process (>= 5.22)
Fake.Core.String (>= 5.22)
Fake.Core.Trace (>= 5.22)
FSharp.Control.Reactive (>= 5.0.2)
FSharp.Core (>= 6.0)
Fake.Core.Tasks (5.22)
Fake.Core.Trace (>= 5.22)
FSharp.Core (>= 6.0)
Fake.Core.Trace (5.22)
Fake.Core.Environment (>= 5.22)
Fake.Core.FakeVar (>= 5.22)
FSharp.Core (>= 6.0)
Fake.Core.Xml (5.22)
Fake.Core.String (>= 5.22)
FSharp.Core (>= 6.0)
Fake.DotNet.Cli (5.22)
Fake.Core.Environment (>= 5.22)
Fake.Core.Process (>= 5.22)
Fake.Core.String (>= 5.22)
Fake.Core.Trace (>= 5.22)
Fake.DotNet.MSBuild (>= 5.22)
Fake.DotNet.NuGet (>= 5.22)
Fake.IO.FileSystem (>= 5.22)
FSharp.Core (>= 6.0)
Mono.Posix.NETStandard (>= 1.0)
Newtonsoft.Json (>= 13.0.1)
Fake.DotNet.MSBuild (5.22)
BlackFox.VsWhere (>= 1.1)
Fake.Core.Environment (>= 5.22)
Fake.Core.Process (>= 5.22)
Fake.Core.String (>= 5.22)
Fake.Core.Trace (>= 5.22)
Fake.IO.FileSystem (>= 5.22)
FSharp.Core (>= 6.0)
MSBuild.StructuredLogger (>= 2.1.545)
Fake.DotNet.NuGet (5.22)
Fake.Core.Environment (>= 5.22)
Fake.Core.Process (>= 5.22)
Fake.Core.SemVer (>= 5.22)
Fake.Core.String (>= 5.22)
Fake.Core.Tasks (>= 5.22)
Fake.Core.Trace (>= 5.22)
Fake.Core.Xml (>= 5.22)
Fake.IO.FileSystem (>= 5.22)
Fake.Net.Http (>= 5.22)
FSharp.Core (>= 6.0)
Newtonsoft.Json (>= 13.0.1)
NuGet.Protocol (>= 5.11)
Fake.IO.FileSystem (5.22)
Fake.Core.String (>= 5.22)
FSharp.Core (>= 6.0)
Fake.IO.Zip (5.22)
Fake.Core.String (>= 5.22)
Fake.IO.FileSystem (>= 5.22)
FSharp.Core (>= 6.0)
Fake.Net.Http (5.22)
Fake.Core.Trace (>= 5.22)
FSharp.Core (>= 6.0)
FParsec (1.1.1)
FSharp.Core (>= 4.3.4)
FSharp.Control.Reactive (5.0.5)
FSharp.Core (>= 4.7.2)
System.Reactive (>= 5.0 < 6.0)
FSharp.Core (6.0.5)
Microsoft.Build (17.2)
Microsoft.Build.Framework (>= 17.2) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
Microsoft.NET.StringTools (>= 1.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
Microsoft.Win32.Registry (>= 4.3) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net6.0))
System.Collections.Immutable (>= 5.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
System.Configuration.ConfigurationManager (>= 4.7) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
System.Reflection.Metadata (>= 1.6) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net6.0))
System.Security.Principal.Windows (>= 4.7) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net6.0))
System.Text.Encoding.CodePages (>= 4.0.1) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net6.0))
System.Text.Json (>= 6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
System.Threading.Tasks.Dataflow (>= 6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
Microsoft.Build.Framework (17.2)
Microsoft.Win32.Registry (>= 4.3)
System.Security.Permissions (>= 4.7)
Microsoft.Build.Tasks.Core (17.2)
Microsoft.Build.Framework (>= 17.2)
Microsoft.Build.Utilities.Core (>= 17.2)
Microsoft.NET.StringTools (>= 1.0)
Microsoft.Win32.Registry (>= 4.3)
System.CodeDom (>= 4.4)
System.Collections.Immutable (>= 5.0)
System.Reflection.Metadata (>= 1.6)
System.Resources.Extensions (>= 4.6)
System.Security.Cryptography.Pkcs (>= 4.7)
System.Security.Cryptography.Xml (>= 4.7)
System.Security.Permissions (>= 4.7)
System.Threading.Tasks.Dataflow (>= 6.0)
Microsoft.Build.Utilities.Core (17.2)
Microsoft.Build.Framework (>= 17.2)
Microsoft.NET.StringTools (>= 1.0)
Microsoft.Win32.Registry (>= 4.3)
System.Collections.Immutable (>= 5.0)
System.Configuration.ConfigurationManager (>= 4.7)
System.Security.Permissions (>= 4.7) - restriction: == netstandard2.0
System.Text.Encoding.CodePages (>= 4.0.1) - restriction: == netstandard2.0
Microsoft.NET.StringTools (1.0)
System.Memory (>= 4.5.4)
System.Runtime.CompilerServices.Unsafe (>= 5.0)
Microsoft.NETCore.Platforms (6.0.4) - restriction: || (&& (== net6.0) (< netcoreapp3.1)) (&& (== net6.0) (< netstandard1.2)) (&& (== net6.0) (< netstandard1.3)) (&& (== net6.0) (< netstandard1.5)) (== netstandard2.0)
Microsoft.NETCore.Targets (5.0) - restriction: || (&& (== net6.0) (< netcoreapp3.1)) (&& (== net6.0) (< netstandard1.2)) (&& (== net6.0) (< netstandard1.3)) (&& (== net6.0) (< netstandard1.5)) (== netstandard2.0)
Microsoft.Win32.Registry (5.0)
System.Buffers (>= 4.5.1) - restriction: || (&& (== net6.0) (>= monoandroid) (< netstandard1.3)) (&& (== net6.0) (>= monotouch)) (&& (== net6.0) (< netcoreapp2.0)) (&& (== net6.0) (>= xamarinios)) (&& (== net6.0) (>= xamarinmac)) (&& (== net6.0) (>= xamarintvos)) (&& (== net6.0) (>= xamarinwatchos)) (== netstandard2.0)
System.Memory (>= 4.5.4) - restriction: || (&& (== net6.0) (< netcoreapp2.0)) (&& (== net6.0) (< netcoreapp2.1)) (&& (== net6.0) (>= uap10.1)) (== netstandard2.0)
System.Security.AccessControl (>= 5.0)
System.Security.Principal.Windows (>= 5.0)
Microsoft.Win32.SystemEvents (6.0.1) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= netcoreapp3.1))
Mono.Posix.NETStandard (1.0)
MSBuild.StructuredLogger (2.1.669)
Microsoft.Build (>= 16.10)
Microsoft.Build.Framework (>= 16.10)
Microsoft.Build.Tasks.Core (>= 16.10)
Microsoft.Build.Utilities.Core (>= 16.10)
Newtonsoft.Json (13.0.1)
NuGet.Common (6.2.1)
NuGet.Frameworks (>= 6.2.1)
NuGet.Configuration (6.2.1)
NuGet.Common (>= 6.2.1)
System.Security.Cryptography.ProtectedData (>= 4.4)
NuGet.Frameworks (6.2.1)
NuGet.Packaging (6.2.1)
Newtonsoft.Json (>= 13.0.1)
NuGet.Configuration (>= 6.2.1)
NuGet.Versioning (>= 6.2.1)
System.Security.Cryptography.Cng (>= 5.0)
System.Security.Cryptography.Pkcs (>= 5.0)
NuGet.Protocol (6.2.1)
NuGet.Packaging (>= 6.2.1)
NuGet.Versioning (6.2.1)
System.Buffers (4.5.1) - restriction: || (&& (== net6.0) (>= monoandroid) (< netstandard1.3)) (&& (== net6.0) (>= monotouch)) (&& (== net6.0) (< netcoreapp2.0)) (&& (== net6.0) (>= xamarinios)) (&& (== net6.0) (>= xamarinmac)) (&& (== net6.0) (>= xamarintvos)) (&& (== net6.0) (>= xamarinwatchos)) (== netstandard2.0)
System.CodeDom (6.0)
System.Collections.Immutable (6.0)
System.Memory (>= 4.5.4) - restriction: || (&& (== net6.0) (>= net461)) (== netstandard2.0)
System.Runtime.CompilerServices.Unsafe (>= 6.0)
System.Configuration.ConfigurationManager (6.0)
System.Security.Cryptography.ProtectedData (>= 6.0)
System.Security.Permissions (>= 6.0)
System.Drawing.Common (6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= netcoreapp3.1))
Microsoft.Win32.SystemEvents (>= 6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= netcoreapp3.1))
System.Formats.Asn1 (6.0)
System.Buffers (>= 4.5.1) - restriction: || (&& (== net6.0) (>= net461)) (== netstandard2.0)
System.Memory (>= 4.5.4) - restriction: || (&& (== net6.0) (>= net461)) (== netstandard2.0)
System.Memory (4.5.5)
System.Buffers (>= 4.5.1) - restriction: || (&& (== net6.0) (>= monotouch)) (&& (== net6.0) (>= net461)) (&& (== net6.0) (< netcoreapp2.0)) (&& (== net6.0) (< netstandard1.1)) (&& (== net6.0) (< netstandard2.0)) (&& (== net6.0) (>= xamarinios)) (&& (== net6.0) (>= xamarinmac)) (&& (== net6.0) (>= xamarintvos)) (&& (== net6.0) (>= xamarinwatchos)) (== netstandard2.0)
System.Numerics.Vectors (>= 4.4) - restriction: || (&& (== net6.0) (< netcoreapp2.0)) (== netstandard2.0)
System.Runtime.CompilerServices.Unsafe (>= 4.5.3) - restriction: || (&& (== net6.0) (>= monotouch)) (&& (== net6.0) (>= net461)) (&& (== net6.0) (< netcoreapp2.0)) (&& (== net6.0) (< netcoreapp2.1)) (&& (== net6.0) (< netstandard1.1)) (&& (== net6.0) (< netstandard2.0)) (&& (== net6.0) (>= uap10.1)) (&& (== net6.0) (>= xamarinios)) (&& (== net6.0) (>= xamarinmac)) (&& (== net6.0) (>= xamarintvos)) (&& (== net6.0) (>= xamarinwatchos)) (== netstandard2.0)
System.Numerics.Vectors (4.5) - restriction: || (&& (== net6.0) (>= net461)) (== netstandard2.0)
System.Reactive (5.0)
System.Runtime.InteropServices.WindowsRuntime (>= 4.3) - restriction: || (&& (== net6.0) (< netcoreapp3.1)) (== netstandard2.0)
System.Threading.Tasks.Extensions (>= 4.5.4) - restriction: || (&& (== net6.0) (>= net472)) (&& (== net6.0) (< netcoreapp3.1)) (&& (== net6.0) (>= uap10.1)) (== netstandard2.0)
System.Reflection.Metadata (6.0.1)
System.Collections.Immutable (>= 6.0)
System.Resources.Extensions (6.0)
System.Memory (>= 4.5.4) - restriction: || (&& (== net6.0) (>= net461)) (== netstandard2.0)
System.Runtime (4.3.1) - restriction: || (&& (== net6.0) (< netcoreapp3.1)) (== netstandard2.0)
Microsoft.NETCore.Platforms (>= 1.1.1)
Microsoft.NETCore.Targets (>= 1.1.3)
System.Runtime.CompilerServices.Unsafe (6.0)
System.Runtime.InteropServices.WindowsRuntime (4.3) - restriction: || (&& (== net6.0) (< netcoreapp3.1)) (== netstandard2.0)
System.Runtime (>= 4.3)
System.Security.AccessControl (6.0)
System.Security.Principal.Windows (>= 5.0) - restriction: || (&& (== net6.0) (>= net461)) (== netstandard2.0)
System.Security.Cryptography.Cng (5.0)
System.Formats.Asn1 (>= 5.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= netcoreapp3.0))
System.Security.Cryptography.Pkcs (6.0.1)
System.Buffers (>= 4.5.1) - restriction: || (&& (== net6.0) (< netstandard2.1)) (== netstandard2.0)
System.Formats.Asn1 (>= 6.0)
System.Memory (>= 4.5.4) - restriction: || (&& (== net6.0) (< netstandard2.1)) (== netstandard2.0)
System.Security.Cryptography.Cng (>= 5.0) - restriction: || (&& (== net6.0) (< netcoreapp3.1)) (&& (== net6.0) (< netstandard2.1)) (== netstandard2.0)
System.Security.Cryptography.ProtectedData (6.0)
System.Security.Cryptography.Xml (6.0)
System.Memory (>= 4.5.4) - restriction: == netstandard2.0
System.Security.AccessControl (>= 6.0)
System.Security.Cryptography.Pkcs (>= 6.0)
System.Security.Permissions (6.0)
System.Security.AccessControl (>= 6.0)
System.Windows.Extensions (>= 6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= netcoreapp3.1))
System.Security.Principal.Windows (5.0)
System.Text.Encoding.CodePages (6.0)
System.Runtime.CompilerServices.Unsafe (>= 6.0)
System.Text.Encodings.Web (6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
System.Runtime.CompilerServices.Unsafe (>= 6.0)
System.Text.Json (6.0.5) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= net472)) (&& (== netstandard2.0) (>= net6.0))
System.Runtime.CompilerServices.Unsafe (>= 6.0)
System.Text.Encodings.Web (>= 6.0)
System.Threading.Tasks.Dataflow (6.0)
System.Threading.Tasks.Extensions (4.5.4) - restriction: || (&& (== net6.0) (>= net472)) (&& (== net6.0) (< netcoreapp3.1)) (&& (== net6.0) (>= uap10.1)) (== netstandard2.0)
System.Runtime.CompilerServices.Unsafe (>= 4.5.3) - restriction: || (&& (== net6.0) (>= net461)) (&& (== net6.0) (< netcoreapp2.1)) (&& (== net6.0) (< netstandard1.0)) (&& (== net6.0) (< netstandard2.0)) (&& (== net6.0) (>= wp8)) (== netstandard2.0)
System.Windows.Extensions (6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= netcoreapp3.1))
System.Drawing.Common (>= 6.0) - restriction: || (== net6.0) (&& (== netstandard2.0) (>= netcoreapp3.1))

View File

@ -1,2 +0,0 @@
dotnet tool restore
dotnet fake %*

View File

@ -1,7 +0,0 @@
#!/usr/bin/env bash
set -eu
set -o pipefail
dotnet tool restore
dotnet fake "$@"

4
src/.dockerignore Normal file
View File

@ -0,0 +1,4 @@
**/bin
**/obj
**/*.db
**/appsettings.*.json

View File

@ -1,10 +1,9 @@
<Project>
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<TargetFrameworks>net6.0;net8.0</TargetFrameworks>
<DebugType>embedded</DebugType>
<AssemblyVersion>2.0.0.0</AssemblyVersion>
<FileVersion>2.0.0.0</FileVersion>
<Version>2.0.0</Version>
<VersionSuffix>rc2</VersionSuffix>
<AssemblyVersion>2.2.0.0</AssemblyVersion>
<FileVersion>2.2.0.0</FileVersion>
<Version>2.2.0</Version>
</PropertyGroup>
</Project>

33
src/Dockerfile Normal file
View File

@ -0,0 +1,33 @@
FROM mcr.microsoft.com/dotnet/sdk:8.0-alpine AS build
WORKDIR /mwl
COPY ./MyWebLog.sln ./
COPY ./Directory.Build.props ./
COPY ./MyWebLog/MyWebLog.fsproj ./MyWebLog/
COPY ./MyWebLog.Data/MyWebLog.Data.fsproj ./MyWebLog.Data/
COPY ./MyWebLog.Domain/MyWebLog.Domain.fsproj ./MyWebLog.Domain/
COPY ./MyWebLog.Tests/MyWebLog.Tests.fsproj ./MyWebLog.Tests/
RUN dotnet restore
COPY . ./
WORKDIR /mwl/MyWebLog
RUN dotnet publish -f net8.0 -c Release -r linux-x64 --no-self-contained -p:PublishSingleFile=false
FROM alpine AS theme
RUN apk add --no-cache zip
WORKDIR /themes/default-theme
COPY ./default-theme ./
RUN zip -r ../default-theme.zip ./*
WORKDIR /themes/admin-theme
COPY ./admin-theme ./
RUN zip -r ../admin-theme.zip ./*
FROM mcr.microsoft.com/dotnet/aspnet:8.0-alpine as final
WORKDIR /app
RUN apk add --no-cache icu-libs
ENV DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=false
COPY --from=build /mwl/MyWebLog/bin/Release/net8.0/linux-x64/publish/ ./
COPY --from=theme /themes/*.zip /app/
RUN mkdir themes
EXPOSE 80
CMD [ "dotnet", "/app/MyWebLog.dll" ]

View File

@ -9,116 +9,123 @@ module Json =
open Newtonsoft.Json
type CategoryIdConverter () =
inherit JsonConverter<CategoryId> ()
override _.WriteJson (writer : JsonWriter, value : CategoryId, _ : JsonSerializer) =
writer.WriteValue (CategoryId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : CategoryId, _ : bool, _ : JsonSerializer) =
type CategoryIdConverter() =
inherit JsonConverter<CategoryId>()
override _.WriteJson(writer: JsonWriter, value: CategoryId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: CategoryId, _: bool, _: JsonSerializer) =
(string >> CategoryId) reader.Value
type CommentIdConverter () =
inherit JsonConverter<CommentId> ()
override _.WriteJson (writer : JsonWriter, value : CommentId, _ : JsonSerializer) =
writer.WriteValue (CommentId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : CommentId, _ : bool, _ : JsonSerializer) =
type CommentIdConverter() =
inherit JsonConverter<CommentId>()
override _.WriteJson(writer: JsonWriter, value: CommentId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: CommentId, _: bool, _: JsonSerializer) =
(string >> CommentId) reader.Value
type CustomFeedIdConverter () =
inherit JsonConverter<CustomFeedId> ()
override _.WriteJson (writer : JsonWriter, value : CustomFeedId, _ : JsonSerializer) =
writer.WriteValue (CustomFeedId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : CustomFeedId, _ : bool, _ : JsonSerializer) =
type CommentStatusConverter() =
inherit JsonConverter<CommentStatus>()
override _.WriteJson(writer: JsonWriter, value: CommentStatus, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: CommentStatus, _: bool, _: JsonSerializer) =
(string >> CommentStatus.Parse) reader.Value
type CustomFeedIdConverter() =
inherit JsonConverter<CustomFeedId>()
override _.WriteJson(writer: JsonWriter, value: CustomFeedId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: CustomFeedId, _: bool, _: JsonSerializer) =
(string >> CustomFeedId) reader.Value
type CustomFeedSourceConverter () =
inherit JsonConverter<CustomFeedSource> ()
override _.WriteJson (writer : JsonWriter, value : CustomFeedSource, _ : JsonSerializer) =
writer.WriteValue (CustomFeedSource.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : CustomFeedSource, _ : bool, _ : JsonSerializer) =
(string >> CustomFeedSource.parse) reader.Value
type CustomFeedSourceConverter() =
inherit JsonConverter<CustomFeedSource>()
override _.WriteJson(writer: JsonWriter, value: CustomFeedSource, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: CustomFeedSource, _: bool, _: JsonSerializer) =
(string >> CustomFeedSource.Parse) reader.Value
type ExplicitRatingConverter () =
inherit JsonConverter<ExplicitRating> ()
override _.WriteJson (writer : JsonWriter, value : ExplicitRating, _ : JsonSerializer) =
writer.WriteValue (ExplicitRating.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : ExplicitRating, _ : bool, _ : JsonSerializer) =
(string >> ExplicitRating.parse) reader.Value
type ExplicitRatingConverter() =
inherit JsonConverter<ExplicitRating>()
override _.WriteJson(writer: JsonWriter, value: ExplicitRating, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: ExplicitRating, _: bool, _: JsonSerializer) =
(string >> ExplicitRating.Parse) reader.Value
type MarkupTextConverter () =
inherit JsonConverter<MarkupText> ()
override _.WriteJson (writer : JsonWriter, value : MarkupText, _ : JsonSerializer) =
writer.WriteValue (MarkupText.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : MarkupText, _ : bool, _ : JsonSerializer) =
(string >> MarkupText.parse) reader.Value
type MarkupTextConverter() =
inherit JsonConverter<MarkupText>()
override _.WriteJson(writer: JsonWriter, value: MarkupText, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: MarkupText, _: bool, _: JsonSerializer) =
(string >> MarkupText.Parse) reader.Value
type PermalinkConverter () =
inherit JsonConverter<Permalink> ()
override _.WriteJson (writer : JsonWriter, value : Permalink, _ : JsonSerializer) =
writer.WriteValue (Permalink.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : Permalink, _ : bool, _ : JsonSerializer) =
type PermalinkConverter() =
inherit JsonConverter<Permalink>()
override _.WriteJson(writer: JsonWriter, value: Permalink, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: Permalink, _: bool, _: JsonSerializer) =
(string >> Permalink) reader.Value
type PageIdConverter () =
inherit JsonConverter<PageId> ()
override _.WriteJson (writer : JsonWriter, value : PageId, _ : JsonSerializer) =
writer.WriteValue (PageId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : PageId, _ : bool, _ : JsonSerializer) =
type PageIdConverter() =
inherit JsonConverter<PageId>()
override _.WriteJson(writer: JsonWriter, value: PageId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: PageId, _: bool, _: JsonSerializer) =
(string >> PageId) reader.Value
type PodcastMediumConverter () =
inherit JsonConverter<PodcastMedium> ()
override _.WriteJson (writer : JsonWriter, value : PodcastMedium, _ : JsonSerializer) =
writer.WriteValue (PodcastMedium.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : PodcastMedium, _ : bool, _ : JsonSerializer) =
(string >> PodcastMedium.parse) reader.Value
type PodcastMediumConverter() =
inherit JsonConverter<PodcastMedium>()
override _.WriteJson(writer: JsonWriter, value: PodcastMedium, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: PodcastMedium, _: bool, _: JsonSerializer) =
(string >> PodcastMedium.Parse) reader.Value
type PostIdConverter () =
inherit JsonConverter<PostId> ()
override _.WriteJson (writer : JsonWriter, value : PostId, _ : JsonSerializer) =
writer.WriteValue (PostId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : PostId, _ : bool, _ : JsonSerializer) =
type PostIdConverter() =
inherit JsonConverter<PostId>()
override _.WriteJson(writer: JsonWriter, value: PostId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: PostId, _: bool, _: JsonSerializer) =
(string >> PostId) reader.Value
type TagMapIdConverter () =
inherit JsonConverter<TagMapId> ()
override _.WriteJson (writer : JsonWriter, value : TagMapId, _ : JsonSerializer) =
writer.WriteValue (TagMapId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : TagMapId, _ : bool, _ : JsonSerializer) =
type TagMapIdConverter() =
inherit JsonConverter<TagMapId>()
override _.WriteJson(writer: JsonWriter, value: TagMapId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: TagMapId, _: bool, _: JsonSerializer) =
(string >> TagMapId) reader.Value
type ThemeAssetIdConverter () =
inherit JsonConverter<ThemeAssetId> ()
override _.WriteJson (writer : JsonWriter, value : ThemeAssetId, _ : JsonSerializer) =
writer.WriteValue (ThemeAssetId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : ThemeAssetId, _ : bool, _ : JsonSerializer) =
(string >> ThemeAssetId.ofString) reader.Value
type ThemeAssetIdConverter() =
inherit JsonConverter<ThemeAssetId>()
override _.WriteJson(writer: JsonWriter, value: ThemeAssetId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: ThemeAssetId, _: bool, _: JsonSerializer) =
(string >> ThemeAssetId.Parse) reader.Value
type ThemeIdConverter () =
inherit JsonConverter<ThemeId> ()
override _.WriteJson (writer : JsonWriter, value : ThemeId, _ : JsonSerializer) =
writer.WriteValue (ThemeId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : ThemeId, _ : bool, _ : JsonSerializer) =
type ThemeIdConverter() =
inherit JsonConverter<ThemeId>()
override _.WriteJson(writer: JsonWriter, value: ThemeId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: ThemeId, _: bool, _: JsonSerializer) =
(string >> ThemeId) reader.Value
type UploadIdConverter () =
inherit JsonConverter<UploadId> ()
override _.WriteJson (writer : JsonWriter, value : UploadId, _ : JsonSerializer) =
writer.WriteValue (UploadId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : UploadId, _ : bool, _ : JsonSerializer) =
type UploadIdConverter() =
inherit JsonConverter<UploadId>()
override _.WriteJson(writer: JsonWriter, value: UploadId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: UploadId, _: bool, _: JsonSerializer) =
(string >> UploadId) reader.Value
type WebLogIdConverter () =
inherit JsonConverter<WebLogId> ()
override _.WriteJson (writer : JsonWriter, value : WebLogId, _ : JsonSerializer) =
writer.WriteValue (WebLogId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : WebLogId, _ : bool, _ : JsonSerializer) =
type WebLogIdConverter() =
inherit JsonConverter<WebLogId>()
override _.WriteJson(writer: JsonWriter, value: WebLogId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: WebLogId, _: bool, _: JsonSerializer) =
(string >> WebLogId) reader.Value
type WebLogUserIdConverter () =
type WebLogUserIdConverter() =
inherit JsonConverter<WebLogUserId> ()
override _.WriteJson (writer : JsonWriter, value : WebLogUserId, _ : JsonSerializer) =
writer.WriteValue (WebLogUserId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : WebLogUserId, _ : bool, _ : JsonSerializer) =
override _.WriteJson(writer: JsonWriter, value: WebLogUserId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: WebLogUserId, _: bool, _: JsonSerializer) =
(string >> WebLogUserId) reader.Value
open Microsoft.FSharpLu.Json
@ -128,27 +135,28 @@ module Json =
/// Configure a serializer to use these converters
let configure (ser : JsonSerializer) =
// Our converters
[ CategoryIdConverter () :> JsonConverter
CommentIdConverter ()
CustomFeedIdConverter ()
CustomFeedSourceConverter ()
ExplicitRatingConverter ()
MarkupTextConverter ()
PermalinkConverter ()
PageIdConverter ()
PodcastMediumConverter ()
PostIdConverter ()
TagMapIdConverter ()
ThemeAssetIdConverter ()
ThemeIdConverter ()
UploadIdConverter ()
WebLogIdConverter ()
WebLogUserIdConverter ()
] |> List.iter ser.Converters.Add
[ CategoryIdConverter() :> JsonConverter
CommentIdConverter()
CommentStatusConverter()
CustomFeedIdConverter()
CustomFeedSourceConverter()
ExplicitRatingConverter()
MarkupTextConverter()
PermalinkConverter()
PageIdConverter()
PodcastMediumConverter()
PostIdConverter()
TagMapIdConverter()
ThemeAssetIdConverter()
ThemeIdConverter()
UploadIdConverter()
WebLogIdConverter()
WebLogUserIdConverter() ]
|> List.iter ser.Converters.Add
// NodaTime
let _ = ser.ConfigureForNodaTime DateTimeZoneProviders.Tzdb
// Handles DUs with no associated data, as well as option fields
ser.Converters.Add (CompactUnionJsonConverter ())
ser.Converters.Add(CompactUnionJsonConverter())
ser.NullValueHandling <- NullValueHandling.Ignore
ser.MissingMemberHandling <- MissingMemberHandling.Ignore
ser
@ -165,6 +173,7 @@ module Json =
Converters = ser.Converters,
DefaultValueHandling = ser.DefaultValueHandling,
DateFormatHandling = ser.DateFormatHandling,
DateParseHandling = ser.DateParseHandling,
MetadataPropertyHandling = ser.MetadataPropertyHandling,
MissingMemberHandling = ser.MissingMemberHandling,
NullValueHandling = ser.NullValueHandling,

View File

@ -7,6 +7,7 @@ open Newtonsoft.Json
open NodaTime
/// The result of a category deletion attempt
[<Struct>]
type CategoryDeleteResult =
/// The category was deleted successfully
| CategoryDeleted
@ -32,7 +33,7 @@ type ICategoryData =
abstract member Delete : CategoryId -> WebLogId -> Task<CategoryDeleteResult>
/// Find all categories for a web log, sorted alphabetically and grouped by hierarchy
abstract member FindAllForView : WebLogId -> Task<DisplayCategory[]>
abstract member FindAllForView : WebLogId -> Task<DisplayCategory array>
/// Find a category by its ID
abstract member FindById : CategoryId -> WebLogId -> Task<Category option>
@ -53,7 +54,7 @@ type IPageData =
/// Add a page
abstract member Add : Page -> Task<unit>
/// Get all pages for the web log (excluding meta items, text, revisions, and prior permalinks)
/// Get all pages for the web log (excluding text, metadata, revisions, and prior permalinks)
abstract member All : WebLogId -> Task<Page list>
/// Count all pages for the given web log
@ -84,7 +85,7 @@ type IPageData =
abstract member FindListed : WebLogId -> Task<Page list>
/// Find a page of pages (displayed in admin section) (excluding meta items, revisions and prior permalinks)
abstract member FindPageOfPages : WebLogId -> pageNbr : int -> Task<Page list>
abstract member FindPageOfPages : WebLogId -> pageNbr: int -> Task<Page list>
/// Restore pages from a backup
abstract member Restore : Page list -> Task<unit>
@ -125,20 +126,20 @@ type IPostData =
/// Find posts to be displayed on a category list page (excluding revisions and prior permalinks)
abstract member FindPageOfCategorizedPosts :
WebLogId -> CategoryId list -> pageNbr : int -> postsPerPage : int -> Task<Post list>
WebLogId -> CategoryId list -> pageNbr: int -> postsPerPage: int -> Task<Post list>
/// Find posts to be displayed on an admin page (excluding revisions and prior permalinks)
abstract member FindPageOfPosts : WebLogId -> pageNbr : int -> postsPerPage : int -> Task<Post list>
/// Find posts to be displayed on an admin page (excluding text, revisions, and prior permalinks)
abstract member FindPageOfPosts : WebLogId -> pageNbr: int -> postsPerPage: int -> Task<Post list>
/// Find posts to be displayed on a page (excluding revisions and prior permalinks)
abstract member FindPageOfPublishedPosts : WebLogId -> pageNbr : int -> postsPerPage : int -> Task<Post list>
abstract member FindPageOfPublishedPosts : WebLogId -> pageNbr: int -> postsPerPage: int -> Task<Post list>
/// Find posts to be displayed on a tag list page (excluding revisions and prior permalinks)
abstract member FindPageOfTaggedPosts :
WebLogId -> tag : string -> pageNbr : int -> postsPerPage : int -> Task<Post list>
WebLogId -> tag : string -> pageNbr: int -> postsPerPage: int -> Task<Post list>
/// Find the next older and newer post for the given published date/time (excluding revisions and prior permalinks)
abstract member FindSurroundingPosts : WebLogId -> publishedOn : Instant -> Task<Post option * Post option>
abstract member FindSurroundingPosts : WebLogId -> publishedOn: Instant -> Task<Post option * Post option>
/// Restore posts from a backup
abstract member Restore : Post list -> Task<unit>
@ -259,6 +260,9 @@ type IWebLogData =
/// Find a web log by its ID
abstract member FindById : WebLogId -> Task<WebLog option>
/// Update redirect rules for a web log
abstract member UpdateRedirectRules : WebLog -> Task<unit>
/// Update RSS options for a web log
abstract member UpdateRssOptions : WebLog -> Task<unit>

View File

@ -5,27 +5,25 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Data.Sqlite" Version="6.0.8" />
<PackageReference Include="Microsoft.Extensions.Caching.Abstractions" Version="6.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="6.0.0" />
<PackageReference Include="BitBadger.Documents.Postgres" Version="3.1.0" />
<PackageReference Include="BitBadger.Documents.Sqlite" Version="3.1.0" />
<PackageReference Include="Microsoft.Data.Sqlite" Version="8.0.6" />
<PackageReference Include="Microsoft.Extensions.Caching.Abstractions" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="8.0.0" />
<PackageReference Include="Microsoft.FSharpLu.Json" Version="0.11.7" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.1" />
<PackageReference Include="NodaTime" Version="3.1.2" />
<PackageReference Include="NodaTime.Serialization.JsonNet" Version="3.0.0" />
<PackageReference Include="Npgsql" Version="6.0.6" />
<PackageReference Include="Npgsql.FSharp" Version="5.3.0" />
<PackageReference Include="Npgsql.NodaTime" Version="6.0.6" />
<PackageReference Include="NodaTime.Serialization.JsonNet" Version="3.1.0" />
<PackageReference Include="Npgsql.NodaTime" Version="8.0.3" />
<PackageReference Include="RethinkDb.Driver" Version="2.3.150" />
<PackageReference Include="RethinkDb.Driver.FSharp" Version="0.9.0-beta-07" />
<PackageReference Update="FSharp.Core" Version="6.0.5" />
<PackageReference Update="FSharp.Core" Version="8.0.300" />
</ItemGroup>
<ItemGroup>
<Compile Include="Converters.fs" />
<Compile Include="Interfaces.fs" />
<Compile Include="Utils.fs" />
<Compile Include="RethinkDbData.fs" />
<Compile Include="SQLite\Helpers.fs" />
<Compile Include="SQLite\SQLiteHelpers.fs" />
<Compile Include="SQLite\SQLiteCategoryData.fs" />
<Compile Include="SQLite\SQLitePageData.fs" />
<Compile Include="SQLite\SQLitePostData.fs" />
@ -45,7 +43,13 @@
<Compile Include="Postgres\PostgresUploadData.fs" />
<Compile Include="Postgres\PostgresWebLogData.fs" />
<Compile Include="Postgres\PostgresWebLogUserData.fs" />
<Compile Include="PostgresData.fs" />
<Compile Include="PostgresData.fs" />
</ItemGroup>
<ItemGroup>
<AssemblyAttribute Include="System.Runtime.CompilerServices.InternalsVisibleToAttribute">
<_Parameter1>MyWebLog.Tests</_Parameter1>
</AssemblyAttribute>
</ItemGroup>
</Project>

View File

@ -2,37 +2,37 @@ namespace MyWebLog.Data.Postgres
open System.Threading
open System.Threading.Tasks
open BitBadger.Documents.Postgres
open Microsoft.Extensions.Caching.Distributed
open NodaTime
open Npgsql.FSharp
/// Helper types and functions for the cache
[<AutoOpen>]
module private Helpers =
/// The cache entry
type Entry =
{ /// The ID of the cache entry
Id : string
/// The value to be cached
Payload : byte[]
/// When this entry will expire
ExpireAt : Instant
/// The duration by which the expiration should be pushed out when being refreshed
SlidingExpiration : Duration option
/// The must-expire-by date/time for the cache entry
AbsoluteExpiration : Instant option
}
type Entry = {
/// The ID of the cache entry
Id: string
/// The value to be cached
Payload: byte array
/// When this entry will expire
ExpireAt: Instant
/// The duration by which the expiration should be pushed out when being refreshed
SlidingExpiration: Duration option
/// The must-expire-by date/time for the cache entry
AbsoluteExpiration: Instant option
}
/// Run a task synchronously
let sync<'T> (it : Task<'T>) = it |> (Async.AwaitTask >> Async.RunSynchronously)
let sync<'T> (it: Task<'T>) = it |> (Async.AwaitTask >> Async.RunSynchronously)
/// Get the current instant
let getNow () = SystemClock.Instance.GetCurrentInstant ()
let getNow () = SystemClock.Instance.GetCurrentInstant()
/// Create a parameter for the expire-at time
let expireParam =
@ -40,32 +40,28 @@ module private Helpers =
/// A distributed cache implementation in PostgreSQL used to handle sessions for myWebLog
type DistributedCache (connStr : string) =
type DistributedCache () =
// ~~~ INITIALIZATION ~~~
do
task {
let! exists =
Sql.connect connStr
|> Sql.query $"
SELECT EXISTS
(SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = 'session')
AS {existsName}"
|> Sql.executeRowAsync Map.toExists
Custom.scalar
"SELECT EXISTS
(SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = 'session')
AS it"
[]
toExists
if not exists then
let! _ =
Sql.connect connStr
|> Sql.query
do! Custom.nonQuery
"CREATE TABLE session (
id TEXT NOT NULL PRIMARY KEY,
payload BYTEA NOT NULL,
expire_at TIMESTAMPTZ NOT NULL,
sliding_expiration INTERVAL,
absolute_expiration TIMESTAMPTZ);
CREATE INDEX idx_session_expiration ON session (expire_at)"
|> Sql.executeNonQueryAsync
()
CREATE INDEX idx_session_expiration ON session (expire_at)" []
} |> sync
// ~~~ SUPPORT FUNCTIONS ~~~
@ -74,16 +70,15 @@ type DistributedCache (connStr : string) =
let getEntry key = backgroundTask {
let idParam = "@id", Sql.string key
let! tryEntry =
Sql.connect connStr
|> Sql.query "SELECT * FROM session WHERE id = @id"
|> Sql.parameters [ idParam ]
|> Sql.executeAsync (fun row ->
{ Id = row.string "id"
Payload = row.bytea "payload"
ExpireAt = row.fieldValue<Instant> "expire_at"
SlidingExpiration = row.fieldValueOrNone<Duration> "sliding_expiration"
AbsoluteExpiration = row.fieldValueOrNone<Instant> "absolute_expiration" })
|> tryHead
Custom.single
"SELECT * FROM session WHERE id = @id"
[ idParam ]
(fun row ->
{ Id = row.string "id"
Payload = row.bytea "payload"
ExpireAt = row.fieldValue<Instant> "expire_at"
SlidingExpiration = row.fieldValueOrNone<Duration> "sliding_expiration"
AbsoluteExpiration = row.fieldValueOrNone<Instant> "absolute_expiration" })
match tryEntry with
| Some entry ->
let now = getNow ()
@ -96,11 +91,9 @@ type DistributedCache (connStr : string) =
true, { entry with ExpireAt = absExp }
else true, { entry with ExpireAt = now.Plus slideExp }
if needsRefresh then
let! _ =
Sql.connect connStr
|> Sql.query "UPDATE session SET expire_at = @expireAt WHERE id = @id"
|> Sql.parameters [ expireParam item.ExpireAt; idParam ]
|> Sql.executeNonQueryAsync
do! Custom.nonQuery
"UPDATE session SET expire_at = @expireAt WHERE id = @id"
[ expireParam item.ExpireAt; idParam ]
()
return if item.ExpireAt > now then Some entry else None
| None -> return None
@ -112,27 +105,17 @@ type DistributedCache (connStr : string) =
/// Purge expired entries every 30 minutes
let purge () = backgroundTask {
let now = getNow ()
if lastPurge.Plus (Duration.FromMinutes 30L) < now then
let! _ =
Sql.connect connStr
|> Sql.query "DELETE FROM session WHERE expire_at < @expireAt"
|> Sql.parameters [ expireParam now ]
|> Sql.executeNonQueryAsync
if lastPurge.Plus(Duration.FromMinutes 30L) < now then
do! Custom.nonQuery "DELETE FROM session WHERE expire_at < @expireAt" [ expireParam now ]
lastPurge <- now
}
/// Remove a cache entry
let removeEntry key = backgroundTask {
let! _ =
Sql.connect connStr
|> Sql.query "DELETE FROM session WHERE id = @id"
|> Sql.parameters [ "@id", Sql.string key ]
|> Sql.executeNonQueryAsync
()
}
let removeEntry key =
Custom.nonQuery "DELETE FROM session WHERE id = @id" [ "@id", Sql.string key ]
/// Save an entry
let saveEntry (opts : DistributedCacheEntryOptions) key payload = backgroundTask {
let saveEntry (opts: DistributedCacheEntryOptions) key payload =
let now = getNow ()
let expireAt, slideExp, absExp =
if opts.SlidingExpiration.HasValue then
@ -142,38 +125,32 @@ type DistributedCache (connStr : string) =
let exp = Instant.FromDateTimeOffset opts.AbsoluteExpiration.Value
exp, None, Some exp
elif opts.AbsoluteExpirationRelativeToNow.HasValue then
let exp = now.Plus (Duration.FromTimeSpan opts.AbsoluteExpirationRelativeToNow.Value)
let exp = now.Plus(Duration.FromTimeSpan opts.AbsoluteExpirationRelativeToNow.Value)
exp, None, Some exp
else
// Default to 1 hour sliding expiration
let slide = Duration.FromHours 1
now.Plus slide, Some slide, None
let! _ =
Sql.connect connStr
|> Sql.query
"INSERT INTO session (
id, payload, expire_at, sliding_expiration, absolute_expiration
) VALUES (
@id, @payload, @expireAt, @slideExp, @absExp
) ON CONFLICT (id) DO UPDATE
SET payload = EXCLUDED.payload,
expire_at = EXCLUDED.expire_at,
sliding_expiration = EXCLUDED.sliding_expiration,
absolute_expiration = EXCLUDED.absolute_expiration"
|> Sql.parameters
[ "@id", Sql.string key
"@payload", Sql.bytea payload
expireParam expireAt
optParam "slideExp" slideExp
optParam "absExp" absExp ]
|> Sql.executeNonQueryAsync
()
}
Custom.nonQuery
"INSERT INTO session (
id, payload, expire_at, sliding_expiration, absolute_expiration
) VALUES (
@id, @payload, @expireAt, @slideExp, @absExp
) ON CONFLICT (id) DO UPDATE
SET payload = EXCLUDED.payload,
expire_at = EXCLUDED.expire_at,
sliding_expiration = EXCLUDED.sliding_expiration,
absolute_expiration = EXCLUDED.absolute_expiration"
[ "@id", Sql.string key
"@payload", Sql.bytea payload
expireParam expireAt
optParam "slideExp" slideExp
optParam "absExp" absExp ]
// ~~~ IMPLEMENTATION FUNCTIONS ~~~
/// Retrieve the data for a cache entry
let get key (_ : CancellationToken) = backgroundTask {
let get key (_: CancellationToken) = backgroundTask {
match! getEntry key with
| Some entry ->
do! purge ()
@ -182,29 +159,29 @@ type DistributedCache (connStr : string) =
}
/// Refresh an entry
let refresh key (cancelToken : CancellationToken) = backgroundTask {
let refresh key (cancelToken: CancellationToken) = backgroundTask {
let! _ = get key cancelToken
()
}
/// Remove an entry
let remove key (_ : CancellationToken) = backgroundTask {
let remove key (_: CancellationToken) = backgroundTask {
do! removeEntry key
do! purge ()
}
/// Set an entry
let set key value options (_ : CancellationToken) = backgroundTask {
let set key value options (_: CancellationToken) = backgroundTask {
do! saveEntry options key value
do! purge ()
}
interface IDistributedCache with
member this.Get key = get key CancellationToken.None |> sync
member this.GetAsync (key, token) = get key token
member this.Refresh key = refresh key CancellationToken.None |> sync
member this.RefreshAsync (key, token) = refresh key token
member this.Remove key = remove key CancellationToken.None |> sync
member this.RemoveAsync (key, token) = remove key token
member this.Set (key, value, options) = set key value options CancellationToken.None |> sync
member this.SetAsync (key, value, options, token) = set key value options token
member _.Get key = get key CancellationToken.None |> sync
member _.GetAsync(key, token) = get key token
member _.Refresh key = refresh key CancellationToken.None |> sync
member _.RefreshAsync(key, token) = refresh key token
member _.Remove key = remove key CancellationToken.None |> sync
member _.RemoveAsync(key, token) = remove key token
member _.Set(key, value, options) = set key value options CancellationToken.None |> sync
member _.SetAsync(key, value, options, token) = set key value options token

View File

@ -1,34 +1,37 @@
namespace MyWebLog.Data.Postgres
open BitBadger.Documents
open BitBadger.Documents.Postgres
open Microsoft.Extensions.Logging
open MyWebLog
open MyWebLog.Data
open Npgsql
open Npgsql.FSharp
/// PostgreSQL myWebLog category data implementation
type PostgresCategoryData (conn : NpgsqlConnection) =
type PostgresCategoryData(log: ILogger) =
/// Count all categories for the given web log
let countAll webLogId =
Sql.existingConnection conn
|> Sql.query $"SELECT COUNT(id) AS {countName} FROM category WHERE web_log_id = @webLogId"
|> Sql.parameters [ webLogIdParam webLogId ]
|> Sql.executeRowAsync Map.toCount
log.LogTrace "Category.countAll"
Count.byContains Table.Category (webLogDoc webLogId)
/// Count all top-level categories for the given web log
let countTopLevel webLogId =
Sql.existingConnection conn
|> Sql.query $"SELECT COUNT(id) AS {countName} FROM category WHERE web_log_id = @webLogId AND parent_id IS NULL"
|> Sql.parameters [ webLogIdParam webLogId ]
|> Sql.executeRowAsync Map.toCount
log.LogTrace "Category.countTopLevel"
Custom.scalar
$"""{Query.Count.byContains Table.Category}
AND {Query.whereByField (Field.NEX (nameof Category.Empty.ParentId)) ""}"""
[ webLogContains webLogId ]
toCount
/// Retrieve all categories for the given web log in a DotLiquid-friendly format
let findAllForView webLogId = backgroundTask {
log.LogTrace "Category.findAllForView"
let! cats =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM category WHERE web_log_id = @webLogId ORDER BY LOWER(name)"
|> Sql.parameters [ webLogIdParam webLogId ]
|> Sql.executeAsync Map.toCategory
Custom.list
$"{selectWithCriteria Table.Category} ORDER BY LOWER(data ->> '{nameof Category.Empty.Name}')"
[ webLogContains webLogId ]
fromData<Category>
let ordered = Utils.orderByHierarchy cats None None []
let counts =
ordered
@ -37,21 +40,18 @@ type PostgresCategoryData (conn : NpgsqlConnection) =
let catIdSql, catIdParams =
ordered
|> Seq.filter (fun cat -> cat.ParentNames |> Array.contains it.Name)
|> Seq.map (fun cat -> cat.Id)
|> Seq.map _.Id
|> Seq.append (Seq.singleton it.Id)
|> List.ofSeq
|> inClause "AND pc.category_id" "id" id
|> arrayContains (nameof Post.Empty.CategoryIds) id
let postCount =
Sql.existingConnection conn
|> Sql.query $"
SELECT COUNT(DISTINCT p.id) AS {countName}
FROM post p
INNER JOIN post_category pc ON pc.post_id = p.id
WHERE p.web_log_id = @webLogId
AND p.status = 'Published'
{catIdSql}"
|> Sql.parameters (webLogIdParam webLogId :: catIdParams)
|> Sql.executeRowAsync Map.toCount
Custom.scalar
$"""SELECT COUNT(DISTINCT data ->> '{nameof Post.Empty.Id}') AS it
FROM {Table.Post}
WHERE {Query.whereDataContains "@criteria"}
AND {catIdSql}"""
[ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |}; catIdParams ]
toCount
|> Async.AwaitTask
|> Async.RunSynchronously
it.Id, postCount)
@ -63,99 +63,82 @@ type PostgresCategoryData (conn : NpgsqlConnection) =
PostCount = counts
|> List.tryFind (fun c -> fst c = cat.Id)
|> Option.map snd
|> Option.defaultValue 0
})
|> Option.defaultValue 0 })
|> Array.ofSeq
}
/// Find a category by its ID for the given web log
let findById catId webLogId =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM category WHERE id = @id AND web_log_id = @webLogId"
|> Sql.parameters [ "@id", Sql.string (CategoryId.toString catId); webLogIdParam webLogId ]
|> Sql.executeAsync Map.toCategory
|> tryHead
log.LogTrace "Category.findById"
Document.findByIdAndWebLog<CategoryId, Category> Table.Category catId webLogId
/// Find all categories for the given web log
let findByWebLog webLogId =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM category WHERE web_log_id = @webLogId"
|> Sql.parameters [ webLogIdParam webLogId ]
|> Sql.executeAsync Map.toCategory
log.LogTrace "Category.findByWebLog"
Document.findByWebLog<Category> Table.Category webLogId
/// Delete a category
let delete catId webLogId = backgroundTask {
log.LogTrace "Category.delete"
match! findById catId webLogId with
| Some cat ->
// Reassign any children to the category's parent category
let parentParam = "@parentId", Sql.string (CategoryId.toString catId)
let! hasChildren =
Sql.existingConnection conn
|> Sql.query $"SELECT EXISTS (SELECT 1 FROM category WHERE parent_id = @parentId) AS {existsName}"
|> Sql.parameters [ parentParam ]
|> Sql.executeRowAsync Map.toExists
let! children = Find.byContains<Category> Table.Category {| ParentId = catId |}
let hasChildren = not (List.isEmpty children)
if hasChildren then
let childQuery, childParams =
if cat.ParentId.IsSome then
Query.Patch.byId Table.Category,
children
|> List.map (fun child -> [ idParam child.Id; jsonParam "@data" {| ParentId = cat.ParentId |} ])
else
Query.RemoveFields.byId Table.Category,
children
|> List.map (fun child ->
[ idParam child.Id; fieldNameParam [ nameof Category.Empty.ParentId ] ])
let! _ =
Sql.existingConnection conn
|> Sql.query "UPDATE category SET parent_id = @newParentId WHERE parent_id = @parentId"
|> Sql.parameters
[ parentParam
"@newParentId", Sql.stringOrNone (cat.ParentId |> Option.map CategoryId.toString) ]
|> Sql.executeNonQueryAsync
Configuration.dataSource ()
|> Sql.fromDataSource
|> Sql.executeTransactionAsync [ childQuery, childParams ]
()
// Delete the category off all posts where it is assigned, and the category itself
let! _ =
Sql.existingConnection conn
|> Sql.query
"DELETE FROM post_category
WHERE category_id = @id
AND post_id IN (SELECT id FROM post WHERE web_log_id = @webLogId);
DELETE FROM category WHERE id = @id"
|> Sql.parameters [ "@id", Sql.string (CategoryId.toString catId); webLogIdParam webLogId ]
|> Sql.executeNonQueryAsync
// Delete the category off all posts where it is assigned
let! posts =
Custom.list
$"SELECT data FROM {Table.Post} WHERE data -> '{nameof Post.Empty.CategoryIds}' @> @id"
[ jsonParam "@id" [| string catId |] ]
fromData<Post>
if not (List.isEmpty posts) then
let! _ =
Configuration.dataSource ()
|> Sql.fromDataSource
|> Sql.executeTransactionAsync
[ Query.Patch.byId Table.Post,
posts
|> List.map (fun post ->
[ idParam post.Id
jsonParam
"@data"
{| CategoryIds = post.CategoryIds |> List.filter (fun cat -> cat <> catId) |} ]) ]
()
// Delete the category itself
do! Delete.byId Table.Category catId
return if hasChildren then ReassignedChildCategories else CategoryDeleted
| None -> return CategoryNotFound
}
/// The INSERT statement for a category
let catInsert =
"INSERT INTO category (
id, web_log_id, name, slug, description, parent_id
) VALUES (
@id, @webLogId, @name, @slug, @description, @parentId
)"
/// Create parameters for a category insert / update
let catParameters (cat : Category) = [
webLogIdParam cat.WebLogId
"@id", Sql.string (CategoryId.toString cat.Id)
"@name", Sql.string cat.Name
"@slug", Sql.string cat.Slug
"@description", Sql.stringOrNone cat.Description
"@parentId", Sql.stringOrNone (cat.ParentId |> Option.map CategoryId.toString)
]
/// Save a category
let save cat = backgroundTask {
let! _ =
Sql.existingConnection conn
|> Sql.query $"
{catInsert} ON CONFLICT (id) DO UPDATE
SET name = EXCLUDED.name,
slug = EXCLUDED.slug,
description = EXCLUDED.description,
parent_id = EXCLUDED.parent_id"
|> Sql.parameters (catParameters cat)
|> Sql.executeNonQueryAsync
()
let save (cat: Category) = backgroundTask {
log.LogTrace "Category.save"
do! save Table.Category cat
}
/// Restore categories from a backup
let restore cats = backgroundTask {
log.LogTrace "Category.restore"
let! _ =
Sql.existingConnection conn
Configuration.dataSource ()
|> Sql.fromDataSource
|> Sql.executeTransactionAsync [
catInsert, cats |> List.map catParameters
Query.insert Table.Category, cats |> List.map (fun c -> [ jsonParam "@data" c ])
]
()
}

View File

@ -2,27 +2,91 @@
[<AutoOpen>]
module MyWebLog.Data.Postgres.PostgresHelpers
/// The table names used in the PostgreSQL implementation
[<RequireQualifiedAccess>]
module Table =
/// Categories
[<Literal>]
let Category = "category"
/// Database Version
[<Literal>]
let DbVersion = "db_version"
/// Pages
[<Literal>]
let Page = "page"
/// Page Revisions
[<Literal>]
let PageRevision = "page_revision"
/// Posts
[<Literal>]
let Post = "post"
/// Post Comments
[<Literal>]
let PostComment = "post_comment"
/// Post Revisions
[<Literal>]
let PostRevision = "post_revision"
/// Tag/URL Mappings
[<Literal>]
let TagMap = "tag_map"
/// Themes
[<Literal>]
let Theme = "theme"
/// Theme Assets
[<Literal>]
let ThemeAsset = "theme_asset"
/// Uploads
[<Literal>]
let Upload = "upload"
/// Web Logs
[<Literal>]
let WebLog = "web_log"
/// Users
[<Literal>]
let WebLogUser = "web_log_user"
open System
open System.Threading.Tasks
open BitBadger.Documents
open BitBadger.Documents.Postgres
open MyWebLog
open MyWebLog.Data
open Newtonsoft.Json
open NodaTime
open Npgsql
open Npgsql.FSharp
/// Create a SQL parameter for the web log ID
let webLogIdParam webLogId =
"@webLogId", Sql.string (WebLogId.toString webLogId)
let webLogIdParam (webLogId: WebLogId) =
"@webLogId", Sql.string (string webLogId)
/// The name of the field to select to be able to use Map.toCount
let countName = "the_count"
/// Create an anonymous record with the given web log ID
let webLogDoc (webLogId: WebLogId) =
{| WebLogId = webLogId |}
/// The name of the field to select to be able to use Map.toExists
let existsName = "does_exist"
/// Create a parameter for a web log document-contains query
let webLogContains webLogId =
jsonParam "@criteria" (webLogDoc webLogId)
/// A SQL string to select data from a table with the given JSON document contains criteria
let selectWithCriteria tableName =
$"""{Query.selectFromTable tableName} WHERE {Query.whereDataContains "@criteria"}"""
/// Create the SQL and parameters for an IN clause
let inClause<'T> colNameAndPrefix paramName (valueFunc: 'T -> string) (items : 'T list) =
let inClause<'T> colNameAndPrefix paramName (items: 'T list) =
if List.isEmpty items then "", []
else
let mutable idx = 0
@ -30,211 +94,130 @@ let inClause<'T> colNameAndPrefix paramName (valueFunc: 'T -> string) (items : '
|> List.skip 1
|> List.fold (fun (itemS, itemP) it ->
idx <- idx + 1
$"{itemS}, @%s{paramName}{idx}", ($"@%s{paramName}{idx}", Sql.string (valueFunc it)) :: itemP)
$"{itemS}, @%s{paramName}{idx}", ($"@%s{paramName}{idx}", Sql.string (string it)) :: itemP)
(Seq.ofList items
|> Seq.map (fun it ->
$"%s{colNameAndPrefix} IN (@%s{paramName}0", [ $"@%s{paramName}0", Sql.string (valueFunc it) ])
$"%s{colNameAndPrefix} IN (@%s{paramName}0", [ $"@%s{paramName}0", Sql.string (string it) ])
|> Seq.head)
|> function sql, ps -> $"{sql})", ps
/// Create the SQL and parameters for the array equivalent of an IN clause
let arrayInClause<'T> name (valueFunc : 'T -> string) (items : 'T list) =
if List.isEmpty items then "TRUE = FALSE", []
else
let mutable idx = 0
items
|> List.skip 1
|> List.fold (fun (itemS, itemP) it ->
idx <- idx + 1
$"{itemS} OR %s{name} && ARRAY[@{name}{idx}]",
($"@{name}{idx}", Sql.string (valueFunc it)) :: itemP)
(Seq.ofList items
|> Seq.map (fun it ->
$"{name} && ARRAY[@{name}0]", [ $"@{name}0", Sql.string (valueFunc it) ])
|> Seq.head)
/// Create the SQL and parameters for match-any array query
let arrayContains<'T> name (valueFunc: 'T -> string) (items: 'T list) =
$"data['{name}'] ?| @{name}Values",
($"@{name}Values", Sql.stringArray (items |> List.map valueFunc |> Array.ofList))
/// Get the first result of the given query
let tryHead<'T> (query : Task<'T list>) = backgroundTask {
let tryHead<'T> (query: Task<'T list>) = backgroundTask {
let! results = query
return List.tryHead results
}
/// Create a parameter for a non-standard type
let typedParam<'T> name (it : 'T) =
$"@%s{name}", Sql.parameter (NpgsqlParameter ($"@{name}", it))
let typedParam<'T> name (it: 'T) =
$"@%s{name}", Sql.parameter (NpgsqlParameter($"@{name}", it))
/// Create a parameter for a possibly-missing non-standard type
let optParam<'T> name (it : 'T option) =
let p = NpgsqlParameter ($"@%s{name}", if Option.isSome it then box it.Value else DBNull.Value)
let optParam<'T> name (it: 'T option) =
let p = NpgsqlParameter($"@%s{name}", if Option.isSome it then box it.Value else DBNull.Value)
p.ParameterName, Sql.parameter p
/// Mapping functions for SQL queries
module Map =
/// Map an id field to a category ID
let toCategoryId (row : RowReader) =
CategoryId (row.string "id")
/// Create a category from the current row
let toCategory (row : RowReader) : Category =
{ Id = toCategoryId row
WebLogId = row.string "web_log_id" |> WebLogId
Name = row.string "name"
Slug = row.string "slug"
Description = row.stringOrNone "description"
ParentId = row.stringOrNone "parent_id" |> Option.map CategoryId
}
/// Get a count from a row
let toCount (row : RowReader) =
row.int countName
/// Create a custom feed from the current row
let toCustomFeed (ser : JsonSerializer) (row : RowReader) : CustomFeed =
{ Id = row.string "id" |> CustomFeedId
Source = row.string "source" |> CustomFeedSource.parse
Path = row.string "path" |> Permalink
Podcast = row.stringOrNone "podcast" |> Option.map (Utils.deserialize ser)
}
/// Get a true/false value as to whether an item exists
let toExists (row : RowReader) =
row.bool existsName
/// Create a meta item from the current row
let toMetaItem (row : RowReader) : MetaItem =
{ Name = row.string "name"
Value = row.string "value"
}
/// Create a permalink from the current row
let toPermalink (row : RowReader) =
let toPermalink (row: RowReader) =
Permalink (row.string "permalink")
/// Create a page from the current row
let toPage (ser : JsonSerializer) (row : RowReader) : Page =
{ Page.empty with
Id = row.string "id" |> PageId
WebLogId = row.string "web_log_id" |> WebLogId
AuthorId = row.string "author_id" |> WebLogUserId
Title = row.string "title"
Permalink = toPermalink row
PriorPermalinks = row.stringArray "prior_permalinks" |> Array.map Permalink |> List.ofArray
PublishedOn = row.fieldValue<Instant> "published_on"
UpdatedOn = row.fieldValue<Instant> "updated_on"
IsInPageList = row.bool "is_in_page_list"
Template = row.stringOrNone "template"
Text = row.string "page_text"
Metadata = row.stringOrNone "meta_items"
|> Option.map (Utils.deserialize ser)
|> Option.defaultValue []
}
/// Create a post from the current row
let toPost (ser : JsonSerializer) (row : RowReader) : Post =
{ Post.empty with
Id = row.string "id" |> PostId
WebLogId = row.string "web_log_id" |> WebLogId
AuthorId = row.string "author_id" |> WebLogUserId
Status = row.string "status" |> PostStatus.parse
Title = row.string "title"
Permalink = toPermalink row
PriorPermalinks = row.stringArray "prior_permalinks" |> Array.map Permalink |> List.ofArray
PublishedOn = row.fieldValueOrNone<Instant> "published_on"
UpdatedOn = row.fieldValue<Instant> "updated_on"
Template = row.stringOrNone "template"
Text = row.string "post_text"
Episode = row.stringOrNone "episode" |> Option.map (Utils.deserialize ser)
CategoryIds = row.stringArrayOrNone "category_ids"
|> Option.map (Array.map CategoryId >> List.ofArray)
|> Option.defaultValue []
Tags = row.stringArrayOrNone "tags"
|> Option.map List.ofArray
|> Option.defaultValue []
Metadata = row.stringOrNone "meta_items"
|> Option.map (Utils.deserialize ser)
|> Option.defaultValue []
}
/// Create a revision from the current row
let toRevision (row : RowReader) : Revision =
{ AsOf = row.fieldValue<Instant> "as_of"
Text = row.string "revision_text" |> MarkupText.parse
}
/// Create a tag mapping from the current row
let toTagMap (row : RowReader) : TagMap =
{ Id = row.string "id" |> TagMapId
WebLogId = row.string "web_log_id" |> WebLogId
Tag = row.string "tag"
UrlValue = row.string "url_value"
}
/// Create a theme from the current row (excludes templates)
let toTheme (row : RowReader) : Theme =
{ Theme.empty with
Id = row.string "id" |> ThemeId
Name = row.string "name"
Version = row.string "version"
}
let toRevision (row: RowReader) : Revision =
{ AsOf = row.fieldValue<Instant> "as_of"
Text = row.string "revision_text" |> MarkupText.Parse }
/// Create a theme asset from the current row
let toThemeAsset includeData (row : RowReader) : ThemeAsset =
{ Id = ThemeAssetId (ThemeId (row.string "theme_id"), row.string "path")
UpdatedOn = row.fieldValue<Instant> "updated_on"
Data = if includeData then row.bytea "data" else [||]
}
let toThemeAsset includeData (row: RowReader) : ThemeAsset =
{ Id = ThemeAssetId (ThemeId (row.string "theme_id"), row.string "path")
UpdatedOn = row.fieldValue<Instant> "updated_on"
Data = if includeData then row.bytea "data" else [||] }
/// Create a theme template from the current row
let toThemeTemplate includeText (row : RowReader) : ThemeTemplate =
{ Name = row.string "name"
Text = if includeText then row.string "template" else ""
}
/// Create an uploaded file from the current row
let toUpload includeData (row : RowReader) : Upload =
{ Id = row.string "id" |> UploadId
WebLogId = row.string "web_log_id" |> WebLogId
Path = row.string "path" |> Permalink
UpdatedOn = row.fieldValue<Instant> "updated_on"
Data = if includeData then row.bytea "data" else [||]
}
let toUpload includeData (row: RowReader) : Upload =
{ Id = row.string "id" |> UploadId
WebLogId = row.string "web_log_id" |> WebLogId
Path = row.string "path" |> Permalink
UpdatedOn = row.fieldValue<Instant> "updated_on"
Data = if includeData then row.bytea "data" else [||] }
/// Document manipulation functions
module Document =
/// Create a web log from the current row
let toWebLog (row : RowReader) : WebLog =
{ Id = row.string "id" |> WebLogId
Name = row.string "name"
Slug = row.string "slug"
Subtitle = row.stringOrNone "subtitle"
DefaultPage = row.string "default_page"
PostsPerPage = row.int "posts_per_page"
ThemeId = row.string "theme_id" |> ThemeId
UrlBase = row.string "url_base"
TimeZone = row.string "time_zone"
AutoHtmx = row.bool "auto_htmx"
Uploads = row.string "uploads" |> UploadDestination.parse
Rss = {
IsFeedEnabled = row.bool "is_feed_enabled"
FeedName = row.string "feed_name"
ItemsInFeed = row.intOrNone "items_in_feed"
IsCategoryEnabled = row.bool "is_category_enabled"
IsTagEnabled = row.bool "is_tag_enabled"
Copyright = row.stringOrNone "copyright"
CustomFeeds = []
}
}
/// Determine whether a document exists with the given key for the given web log
let existsByWebLog<'TKey> table (key: 'TKey) webLogId =
Custom.scalar
$"""SELECT EXISTS (
SELECT 1 FROM %s{table} WHERE {Query.whereById "@id"} AND {Query.whereDataContains "@criteria"}
) AS it"""
[ "@id", Sql.string (string key); webLogContains webLogId ]
toExists
/// Create a web log user from the current row
let toWebLogUser (row : RowReader) : WebLogUser =
{ Id = row.string "id" |> WebLogUserId
WebLogId = row.string "web_log_id" |> WebLogId
Email = row.string "email"
FirstName = row.string "first_name"
LastName = row.string "last_name"
PreferredName = row.string "preferred_name"
PasswordHash = row.string "password_hash"
Url = row.stringOrNone "url"
AccessLevel = row.string "access_level" |> AccessLevel.parse
CreatedOn = row.fieldValue<Instant> "created_on"
LastSeenOn = row.fieldValueOrNone<Instant> "last_seen_on"
}
/// Find a document by its ID for the given web log
let findByIdAndWebLog<'TKey, 'TDoc> table (key: 'TKey) webLogId =
Custom.single
$"""{Query.selectFromTable table} WHERE {Query.whereById "@id"} AND {Query.whereDataContains "@criteria"}"""
[ "@id", Sql.string (string key); webLogContains webLogId ]
fromData<'TDoc>
/// Find documents for the given web log
let findByWebLog<'TDoc> table webLogId : Task<'TDoc list> =
Find.byContains table (webLogDoc webLogId)
/// Functions to support revisions
module Revisions =
/// Find all revisions for the given entity
let findByEntityId<'TKey> revTable entityTable (key: 'TKey) =
Custom.list
$"SELECT as_of, revision_text FROM %s{revTable} WHERE %s{entityTable}_id = @id ORDER BY as_of DESC"
[ "@id", Sql.string (string key) ]
Map.toRevision
/// Find all revisions for all posts for the given web log
let findByWebLog<'TKey> revTable entityTable (keyFunc: string -> 'TKey) webLogId =
Custom.list
$"""SELECT pr.*
FROM %s{revTable} pr
INNER JOIN %s{entityTable} p ON p.data ->> '{nameof Post.Empty.Id}' = pr.{entityTable}_id
WHERE p.{Query.whereDataContains "@criteria"}
ORDER BY as_of DESC"""
[ webLogContains webLogId ]
(fun row -> keyFunc (row.string $"{entityTable}_id"), Map.toRevision row)
/// Parameters for a revision INSERT statement
let revParams<'TKey> (key: 'TKey) rev = [
typedParam "asOf" rev.AsOf
"@id", Sql.string (string key)
"@text", Sql.string (string rev.Text)
]
/// The SQL statement to insert a revision
let insertSql table =
$"INSERT INTO %s{table} VALUES (@id, @asOf, @text)"
/// Update a page's revisions
let update<'TKey> revTable entityTable (key: 'TKey) oldRevs newRevs = backgroundTask {
let toDelete, toAdd = Utils.diffRevisions oldRevs newRevs
if not (List.isEmpty toDelete) || not (List.isEmpty toAdd) then
let! _ =
Configuration.dataSource ()
|> Sql.fromDataSource
|> Sql.executeTransactionAsync
[ if not (List.isEmpty toDelete) then
$"DELETE FROM %s{revTable} WHERE %s{entityTable}_id = @id AND as_of = @asOf",
toDelete
|> List.map (fun it ->
[ "@id", Sql.string (string key)
typedParam "asOf" it.AsOf ])
if not (List.isEmpty toAdd) then
insertSql revTable, toAdd |> List.map (revParams key) ]
()
}

View File

@ -1,167 +1,128 @@
namespace MyWebLog.Data.Postgres
open BitBadger.Documents
open BitBadger.Documents.Postgres
open Microsoft.Extensions.Logging
open MyWebLog
open MyWebLog.Data
open Newtonsoft.Json
open Npgsql
open Npgsql.FSharp
/// PostgreSQL myWebLog page data implementation
type PostgresPageData (conn : NpgsqlConnection, ser : JsonSerializer) =
/// PostgreSQL myWebLog page data implementation
type PostgresPageData(log: ILogger) =
// SUPPORT FUNCTIONS
/// Append revisions and permalinks to a page
let appendPageRevisions (page : Page) = backgroundTask {
let! revisions =
Sql.existingConnection conn
|> Sql.query "SELECT as_of, revision_text FROM page_revision WHERE page_id = @pageId ORDER BY as_of DESC"
|> Sql.parameters [ "@pageId", Sql.string (PageId.toString page.Id) ]
|> Sql.executeAsync Map.toRevision
/// Append revisions to a page
let appendPageRevisions (page: Page) = backgroundTask {
log.LogTrace "Page.appendPageRevisions"
let! revisions = Revisions.findByEntityId Table.PageRevision Table.Page page.Id
return { page with Revisions = revisions }
}
/// Shorthand to map to a page
let toPage = Map.toPage ser
/// Return a page with no text or revisions
let pageWithoutText row =
{ toPage row with Text = "" }
/// The INSERT statement for a page revision
let revInsert = "INSERT INTO page_revision VALUES (@pageId, @asOf, @text)"
/// Parameters for a revision INSERT statement
let revParams pageId rev = [
typedParam "asOf" rev.AsOf
"@pageId", Sql.string (PageId.toString pageId)
"@text", Sql.string (MarkupText.toString rev.Text)
]
let pageWithoutText (row: RowReader) =
{ fromData<Page> row with Text = "" }
/// Update a page's revisions
let updatePageRevisions pageId oldRevs newRevs = backgroundTask {
let toDelete, toAdd = Utils.diffRevisions oldRevs newRevs
if not (List.isEmpty toDelete) || not (List.isEmpty toAdd) then
let! _ =
Sql.existingConnection conn
|> Sql.executeTransactionAsync [
if not (List.isEmpty toDelete) then
"DELETE FROM page_revision WHERE page_id = @pageId AND as_of = @asOf",
toDelete
|> List.map (fun it -> [
"@pageId", Sql.string (PageId.toString pageId)
typedParam "asOf" it.AsOf
])
if not (List.isEmpty toAdd) then
revInsert, toAdd |> List.map (revParams pageId)
]
()
}
let updatePageRevisions (pageId: PageId) oldRevs newRevs =
log.LogTrace "Page.updatePageRevisions"
Revisions.update Table.PageRevision Table.Page pageId oldRevs newRevs
/// Does the given page exist?
let pageExists pageId webLogId =
Sql.existingConnection conn
|> Sql.query $"SELECT EXISTS (SELECT 1 FROM page WHERE id = @id AND web_log_id = @webLogId) AS {existsName}"
|> Sql.parameters [ "@id", Sql.string (PageId.toString pageId); webLogIdParam webLogId ]
|> Sql.executeRowAsync Map.toExists
let pageExists (pageId: PageId) webLogId =
log.LogTrace "Page.pageExists"
Document.existsByWebLog Table.Page pageId webLogId
// IMPLEMENTATION FUNCTIONS
/// Get all pages for a web log (without text, revisions, prior permalinks, or metadata)
/// Add a page
let add (page: Page) = backgroundTask {
log.LogTrace "Page.add"
do! insert Table.Page { page with Revisions = [] }
do! updatePageRevisions page.Id [] page.Revisions
()
}
/// Get all pages for a web log (without text, metadata, revisions, or prior permalinks)
let all webLogId =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM page WHERE web_log_id = @webLogId ORDER BY LOWER(title)"
|> Sql.parameters [ webLogIdParam webLogId ]
|> Sql.executeAsync pageWithoutText
log.LogTrace "Page.all"
Custom.list
$"{selectWithCriteria Table.Page} ORDER BY LOWER(data ->> '{nameof Page.Empty.Title}')"
[ webLogContains webLogId ]
(fun row -> { fromData<Page> row with Text = ""; Metadata = []; PriorPermalinks = [] })
/// Count all pages for the given web log
let countAll webLogId =
Sql.existingConnection conn
|> Sql.query $"SELECT COUNT(id) AS {countName} FROM page WHERE web_log_id = @webLogId"
|> Sql.parameters [ webLogIdParam webLogId ]
|> Sql.executeRowAsync Map.toCount
log.LogTrace "Page.countAll"
Count.byContains Table.Page (webLogDoc webLogId)
/// Count all pages shown in the page list for the given web log
let countListed webLogId =
Sql.existingConnection conn
|> Sql.query $"
SELECT COUNT(id) AS {countName}
FROM page
WHERE web_log_id = @webLogId
AND is_in_page_list = TRUE"
|> Sql.parameters [ webLogIdParam webLogId ]
|> Sql.executeRowAsync Map.toCount
log.LogTrace "Page.countListed"
Count.byContains Table.Page {| webLogDoc webLogId with IsInPageList = true |}
/// Find a page by its ID (without revisions)
let findById pageId webLogId =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM page WHERE id = @id AND web_log_id = @webLogId"
|> Sql.parameters [ "@id", Sql.string (PageId.toString pageId); webLogIdParam webLogId ]
|> Sql.executeAsync toPage
|> tryHead
/// Find a page by its ID (without revisions or prior permalinks)
let findById pageId webLogId = backgroundTask {
log.LogTrace "Page.findById"
match! Document.findByIdAndWebLog<PageId, Page> Table.Page pageId webLogId with
| Some page -> return Some { page with PriorPermalinks = [] }
| None -> return None
}
/// Find a complete page by its ID
let findFullById pageId webLogId = backgroundTask {
match! findById pageId webLogId with
log.LogTrace "Page.findFullById"
match! Document.findByIdAndWebLog<PageId, Page> Table.Page pageId webLogId with
| Some page ->
let! withMore = appendPageRevisions page
return Some withMore
| None -> return None
}
// TODO: need to handle when the page being deleted is the home page
/// Delete a page by its ID
let delete pageId webLogId = backgroundTask {
log.LogTrace "Page.delete"
match! pageExists pageId webLogId with
| true ->
let! _ =
Sql.existingConnection conn
|> Sql.query
"DELETE FROM page_revision WHERE page_id = @id;
DELETE FROM page WHERE id = @id"
|> Sql.parameters [ "@id", Sql.string (PageId.toString pageId) ]
|> Sql.executeNonQueryAsync
do! Custom.nonQuery
$"""DELETE FROM {Table.PageRevision} WHERE page_id = @id;
DELETE FROM {Table.Page} WHERE {Query.whereById "@id"}"""
[ idParam pageId ]
return true
| false -> return false
}
/// Find a page by its permalink for the given web log
let findByPermalink permalink webLogId =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM page WHERE web_log_id = @webLogId AND permalink = @link"
|> Sql.parameters [ webLogIdParam webLogId; "@link", Sql.string (Permalink.toString permalink) ]
|> Sql.executeAsync toPage
|> tryHead
let findByPermalink (permalink: Permalink) webLogId = backgroundTask {
log.LogTrace "Page.findByPermalink"
let! page =
Find.byContains<Page> Table.Page {| webLogDoc webLogId with Permalink = permalink |}
|> tryHead
return page |> Option.map (fun pg -> { pg with PriorPermalinks = [] })
}
/// Find the current permalink within a set of potential prior permalinks for the given web log
let findCurrentPermalink permalinks webLogId = backgroundTask {
let findCurrentPermalink (permalinks: Permalink list) webLogId = backgroundTask {
log.LogTrace "Page.findCurrentPermalink"
if List.isEmpty permalinks then return None
else
let linkSql, linkParams = arrayInClause "prior_permalinks" Permalink.toString permalinks
let linkSql, linkParam = arrayContains (nameof Page.Empty.PriorPermalinks) string permalinks
return!
Sql.existingConnection conn
|> Sql.query $"SELECT permalink FROM page WHERE web_log_id = @webLogId AND ({linkSql})"
|> Sql.parameters (webLogIdParam webLogId :: linkParams)
|> Sql.executeAsync Map.toPermalink
|> tryHead
Custom.single
$"""SELECT data ->> '{nameof Page.Empty.Permalink}' AS permalink
FROM page
WHERE {Query.whereDataContains "@criteria"}
AND {linkSql}"""
[ webLogContains webLogId; linkParam ]
Map.toPermalink
}
/// Get all complete pages for the given web log
let findFullByWebLog webLogId = backgroundTask {
let! pages =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM page WHERE web_log_id = @webLogId"
|> Sql.parameters [ webLogIdParam webLogId ]
|> Sql.executeAsync toPage
let! revisions =
Sql.existingConnection conn
|> Sql.query
"SELECT *
FROM page_revision pr
INNER JOIN page p ON p.id = pr.page_id
WHERE p.web_log_id = @webLogId
ORDER BY pr.as_of DESC"
|> Sql.parameters [ webLogIdParam webLogId ]
|> Sql.executeAsync (fun row -> PageId (row.string "page_id"), Map.toRevision row)
log.LogTrace "Page.findFullByWebLog"
let! pages = Document.findByWebLog<Page> Table.Page webLogId
let! revisions = Revisions.findByWebLog Table.PageRevision Table.Page PageId webLogId
return
pages
|> List.map (fun it ->
@ -170,101 +131,60 @@ type PostgresPageData (conn : NpgsqlConnection, ser : JsonSerializer) =
/// Get all listed pages for the given web log (without revisions or text)
let findListed webLogId =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM page WHERE web_log_id = @webLogId AND is_in_page_list = TRUE ORDER BY LOWER(title)"
|> Sql.parameters [ webLogIdParam webLogId ]
|> Sql.executeAsync pageWithoutText
log.LogTrace "Page.findListed"
Custom.list
$"{selectWithCriteria Table.Page} ORDER BY LOWER(data ->> '{nameof Page.Empty.Title}')"
[ jsonParam "@criteria" {| webLogDoc webLogId with IsInPageList = true |} ]
pageWithoutText
/// Get a page of pages for the given web log (without revisions)
let findPageOfPages webLogId pageNbr =
Sql.existingConnection conn
|> Sql.query
"SELECT *
FROM page
WHERE web_log_id = @webLogId
ORDER BY LOWER(title)
LIMIT @pageSize OFFSET @toSkip"
|> Sql.parameters [ webLogIdParam webLogId; "@pageSize", Sql.int 26; "@toSkip", Sql.int ((pageNbr - 1) * 25) ]
|> Sql.executeAsync toPage
log.LogTrace "Page.findPageOfPages"
Custom.list
$"{selectWithCriteria Table.Page}
ORDER BY LOWER(data->>'{nameof Page.Empty.Title}')
LIMIT @pageSize OFFSET @toSkip"
[ webLogContains webLogId; "@pageSize", Sql.int 26; "@toSkip", Sql.int ((pageNbr - 1) * 25) ]
(fun row -> { fromData<Page> row with Metadata = []; PriorPermalinks = [] })
/// The INSERT statement for a page
let pageInsert =
"INSERT INTO page (
id, web_log_id, author_id, title, permalink, prior_permalinks, published_on, updated_on, is_in_page_list,
template, page_text, meta_items
) VALUES (
@id, @webLogId, @authorId, @title, @permalink, @priorPermalinks, @publishedOn, @updatedOn, @isInPageList,
@template, @text, @metaItems
)"
/// The parameters for saving a page
let pageParams (page : Page) = [
webLogIdParam page.WebLogId
"@id", Sql.string (PageId.toString page.Id)
"@authorId", Sql.string (WebLogUserId.toString page.AuthorId)
"@title", Sql.string page.Title
"@permalink", Sql.string (Permalink.toString page.Permalink)
"@isInPageList", Sql.bool page.IsInPageList
"@template", Sql.stringOrNone page.Template
"@text", Sql.string page.Text
"@metaItems", Sql.jsonb (Utils.serialize ser page.Metadata)
"@priorPermalinks", Sql.stringArray (page.PriorPermalinks |> List.map Permalink.toString |> Array.ofList)
typedParam "publishedOn" page.PublishedOn
typedParam "updatedOn" page.UpdatedOn
]
/// Restore pages from a backup
let restore (pages : Page list) = backgroundTask {
let restore (pages: Page list) = backgroundTask {
log.LogTrace "Page.restore"
let revisions = pages |> List.collect (fun p -> p.Revisions |> List.map (fun r -> p.Id, r))
let! _ =
Sql.existingConnection conn
|> Sql.executeTransactionAsync [
pageInsert, pages |> List.map pageParams
revInsert, revisions |> List.map (fun (pageId, rev) -> revParams pageId rev)
]
Configuration.dataSource ()
|> Sql.fromDataSource
|> Sql.executeTransactionAsync
[ Query.insert Table.Page,
pages |> List.map (fun page -> [ jsonParam "@data" { page with Revisions = [] } ])
Revisions.insertSql Table.PageRevision,
revisions |> List.map (fun (pageId, rev) -> Revisions.revParams pageId rev) ]
()
}
/// Save a page
let save (page : Page) = backgroundTask {
let! oldPage = findFullById page.Id page.WebLogId
let! _ =
Sql.existingConnection conn
|> Sql.query $"
{pageInsert} ON CONFLICT (id) DO UPDATE
SET author_id = EXCLUDED.author_id,
title = EXCLUDED.title,
permalink = EXCLUDED.permalink,
prior_permalinks = EXCLUDED.prior_permalinks,
published_on = EXCLUDED.published_on,
updated_on = EXCLUDED.updated_on,
is_in_page_list = EXCLUDED.is_in_page_list,
template = EXCLUDED.template,
page_text = EXCLUDED.page_text,
meta_items = EXCLUDED.meta_items"
|> Sql.parameters (pageParams page)
|> Sql.executeNonQueryAsync
do! updatePageRevisions page.Id (match oldPage with Some p -> p.Revisions | None -> []) page.Revisions
/// Update a page
let update (page: Page) = backgroundTask {
log.LogTrace "Page.update"
match! findFullById page.Id page.WebLogId with
| Some oldPage ->
do! Update.byId Table.Page page.Id { page with Revisions = [] }
do! updatePageRevisions page.Id oldPage.Revisions page.Revisions
| None -> ()
()
}
/// Update a page's prior permalinks
let updatePriorPermalinks pageId webLogId permalinks = backgroundTask {
let updatePriorPermalinks pageId webLogId (permalinks: Permalink list) = backgroundTask {
log.LogTrace "Page.updatePriorPermalinks"
match! pageExists pageId webLogId with
| true ->
let! _ =
Sql.existingConnection conn
|> Sql.query "UPDATE page SET prior_permalinks = @prior WHERE id = @id"
|> Sql.parameters
[ "@id", Sql.string (PageId.toString pageId)
"@prior", Sql.stringArray (permalinks |> List.map Permalink.toString |> Array.ofList) ]
|> Sql.executeNonQueryAsync
do! Patch.byId Table.Page pageId {| PriorPermalinks = permalinks |}
return true
| false -> return false
}
interface IPageData with
member _.Add page = save page
member _.Add page = add page
member _.All webLogId = all webLogId
member _.CountAll webLogId = countAll webLogId
member _.CountListed webLogId = countListed webLogId
@ -277,5 +197,5 @@ type PostgresPageData (conn : NpgsqlConnection, ser : JsonSerializer) =
member _.FindListed webLogId = findListed webLogId
member _.FindPageOfPages webLogId pageNbr = findPageOfPages webLogId pageNbr
member _.Restore pages = restore pages
member _.Update page = save page
member _.Update page = update page
member _.UpdatePriorPermalinks pageId webLogId permalinks = updatePriorPermalinks pageId webLogId permalinks

View File

@ -1,129 +1,77 @@
namespace MyWebLog.Data.Postgres
open BitBadger.Documents
open BitBadger.Documents.Postgres
open Microsoft.Extensions.Logging
open MyWebLog
open MyWebLog.Data
open Newtonsoft.Json
open NodaTime
open Npgsql
open Npgsql.FSharp
/// PostgreSQL myWebLog post data implementation
type PostgresPostData (conn : NpgsqlConnection, ser : JsonSerializer) =
/// PostgreSQL myWebLog post data implementation
type PostgresPostData(log: ILogger) =
// SUPPORT FUNCTIONS
/// Append revisions to a post
let appendPostRevisions (post : Post) = backgroundTask {
let! revisions =
Sql.existingConnection conn
|> Sql.query "SELECT as_of, revision_text FROM post_revision WHERE post_id = @id ORDER BY as_of DESC"
|> Sql.parameters [ "@id", Sql.string (PostId.toString post.Id) ]
|> Sql.executeAsync Map.toRevision
let appendPostRevisions (post: Post) = backgroundTask {
log.LogTrace "Post.appendPostRevisions"
let! revisions = Revisions.findByEntityId Table.PostRevision Table.Post post.Id
return { post with Revisions = revisions }
}
/// The SELECT statement for a post that will include category IDs
let selectPost =
"SELECT *, ARRAY(SELECT cat.category_id FROM post_category cat WHERE cat.post_id = p.id) AS category_ids
FROM post p"
/// Shorthand for mapping to a post
let toPost = Map.toPost ser
/// Return a post with no revisions or prior permalinks
let postWithoutLinks row =
{ fromData<Post> row with PriorPermalinks = [] }
/// Return a post with no revisions, prior permalinks, or text
let postWithoutText row =
{ toPost row with Text = "" }
/// The INSERT statement for a post/category cross-reference
let catInsert = "INSERT INTO post_category VALUES (@postId, @categoryId)"
/// Parameters for adding or updating a post/category cross-reference
let catParams postId cat = [
"@postId", Sql.string (PostId.toString postId)
"categoryId", Sql.string (CategoryId.toString cat)
]
/// Update a post's assigned categories
let updatePostCategories postId oldCats newCats = backgroundTask {
let toDelete, toAdd = Utils.diffLists oldCats newCats CategoryId.toString
if not (List.isEmpty toDelete) || not (List.isEmpty toAdd) then
let! _ =
Sql.existingConnection conn
|> Sql.executeTransactionAsync [
if not (List.isEmpty toDelete) then
"DELETE FROM post_category WHERE post_id = @postId AND category_id = @categoryId",
toDelete |> List.map (catParams postId)
if not (List.isEmpty toAdd) then
catInsert, toAdd |> List.map (catParams postId)
]
()
}
/// The INSERT statement for a post revision
let revInsert = "INSERT INTO post_revision VALUES (@postId, @asOf, @text)"
/// The parameters for adding a post revision
let revParams postId rev = [
typedParam "asOf" rev.AsOf
"@postId", Sql.string (PostId.toString postId)
"@text", Sql.string (MarkupText.toString rev.Text)
]
{ postWithoutLinks row with Text = "" }
/// Update a post's revisions
let updatePostRevisions postId oldRevs newRevs = backgroundTask {
let toDelete, toAdd = Utils.diffRevisions oldRevs newRevs
if not (List.isEmpty toDelete) || not (List.isEmpty toAdd) then
let! _ =
Sql.existingConnection conn
|> Sql.executeTransactionAsync [
if not (List.isEmpty toDelete) then
"DELETE FROM post_revision WHERE post_id = @postId AND as_of = @asOf",
toDelete
|> List.map (fun it -> [
"@postId", Sql.string (PostId.toString postId)
typedParam "asOf" it.AsOf
])
if not (List.isEmpty toAdd) then
revInsert, toAdd |> List.map (revParams postId)
]
()
}
let updatePostRevisions (postId: PostId) oldRevs newRevs =
log.LogTrace "Post.updatePostRevisions"
Revisions.update Table.PostRevision Table.Post postId oldRevs newRevs
/// Does the given post exist?
let postExists postId webLogId =
Sql.existingConnection conn
|> Sql.query $"SELECT EXISTS (SELECT 1 FROM post WHERE id = @id AND web_log_id = @webLogId) AS {existsName}"
|> Sql.parameters [ "@id", Sql.string (PostId.toString postId); webLogIdParam webLogId ]
|> Sql.executeRowAsync Map.toExists
let postExists (postId: PostId) webLogId =
log.LogTrace "Post.postExists"
Document.existsByWebLog Table.Post postId webLogId
// IMPLEMENTATION FUNCTIONS
/// Add a post
let add (post : Post) = backgroundTask {
log.LogTrace "Post.add"
do! insert Table.Post { post with Revisions = [] }
do! updatePostRevisions post.Id [] post.Revisions
}
/// Count posts in a status for the given web log
let countByStatus status webLogId =
Sql.existingConnection conn
|> Sql.query $"SELECT COUNT(id) AS {countName} FROM post WHERE web_log_id = @webLogId AND status = @status"
|> Sql.parameters [ webLogIdParam webLogId; "@status", Sql.string (PostStatus.toString status) ]
|> Sql.executeRowAsync Map.toCount
let countByStatus (status: PostStatus) webLogId =
log.LogTrace "Post.countByStatus"
Count.byContains Table.Post {| webLogDoc webLogId with Status = status |}
/// Find a post by its ID for the given web log (excluding revisions)
let findById postId webLogId =
Sql.existingConnection conn
|> Sql.query $"{selectPost} WHERE id = @id AND web_log_id = @webLogId"
|> Sql.parameters [ "@id", Sql.string (PostId.toString postId); webLogIdParam webLogId ]
|> Sql.executeAsync toPost
|> tryHead
let findById postId webLogId = backgroundTask {
log.LogTrace "Post.findById"
match! Document.findByIdAndWebLog<PostId, Post> Table.Post postId webLogId with
| Some post -> return Some { post with PriorPermalinks = [] }
| None -> return None
}
/// Find a post by its permalink for the given web log (excluding revisions and prior permalinks)
let findByPermalink permalink webLogId =
Sql.existingConnection conn
|> Sql.query $"{selectPost} WHERE web_log_id = @webLogId AND permalink = @link"
|> Sql.parameters [ webLogIdParam webLogId; "@link", Sql.string (Permalink.toString permalink) ]
|> Sql.executeAsync toPost
|> tryHead
/// Find a post by its permalink for the given web log (excluding revisions)
let findByPermalink (permalink: Permalink) webLogId =
log.LogTrace "Post.findByPermalink"
Custom.single
(selectWithCriteria Table.Post)
[ jsonParam "@criteria" {| webLogDoc webLogId with Permalink = permalink |} ]
postWithoutLinks
/// Find a complete post by its ID for the given web log
let findFullById postId webLogId = backgroundTask {
match! findById postId webLogId with
log.LogTrace "Post.findFullById"
match! Document.findByIdAndWebLog<PostId, Post> Table.Post postId webLogId with
| Some post ->
let! withRevisions = appendPostRevisions post
return Some withRevisions
@ -132,50 +80,39 @@ type PostgresPostData (conn : NpgsqlConnection, ser : JsonSerializer) =
/// Delete a post by its ID for the given web log
let delete postId webLogId = backgroundTask {
log.LogTrace "Post.delete"
match! postExists postId webLogId with
| true ->
let! _ =
Sql.existingConnection conn
|> Sql.query
"DELETE FROM post_revision WHERE post_id = @id;
DELETE FROM post_category WHERE post_id = @id;
DELETE FROM post WHERE id = @id"
|> Sql.parameters [ "@id", Sql.string (PostId.toString postId) ]
|> Sql.executeNonQueryAsync
do! Custom.nonQuery
$"""DELETE FROM {Table.PostComment} WHERE {Query.whereDataContains "@criteria"};
DELETE FROM {Table.PostRevision} WHERE post_id = @id;
DELETE FROM {Table.Post} WHERE {Query.whereById "@id"}"""
[ idParam postId; jsonParam "@criteria" {| PostId = postId |} ]
return true
| false -> return false
}
/// Find the current permalink from a list of potential prior permalinks for the given web log
let findCurrentPermalink permalinks webLogId = backgroundTask {
let findCurrentPermalink (permalinks: Permalink list) webLogId = backgroundTask {
log.LogTrace "Post.findCurrentPermalink"
if List.isEmpty permalinks then return None
else
let linkSql, linkParams = arrayInClause "prior_permalinks" Permalink.toString permalinks
let linkSql, linkParam = arrayContains (nameof Post.Empty.PriorPermalinks) string permalinks
return!
Sql.existingConnection conn
|> Sql.query $"SELECT permalink FROM post WHERE web_log_id = @webLogId AND ({linkSql})"
|> Sql.parameters (webLogIdParam webLogId :: linkParams)
|> Sql.executeAsync Map.toPermalink
|> tryHead
Custom.single
$"""SELECT data ->> '{nameof Post.Empty.Permalink}' AS permalink
FROM {Table.Post}
WHERE {Query.whereDataContains "@criteria"}
AND {linkSql}"""
[ webLogContains webLogId; linkParam ]
Map.toPermalink
}
/// Get all complete posts for the given web log
let findFullByWebLog webLogId = backgroundTask {
let! posts =
Sql.existingConnection conn
|> Sql.query $"{selectPost} WHERE web_log_id = @webLogId"
|> Sql.parameters [ webLogIdParam webLogId ]
|> Sql.executeAsync toPost
let! revisions =
Sql.existingConnection conn
|> Sql.query
"SELECT *
FROM post_revision pr
INNER JOIN post p ON p.id = pr.post_id
WHERE p.web_log_id = @webLogId
ORDER BY as_of DESC"
|> Sql.parameters [ webLogIdParam webLogId ]
|> Sql.executeAsync (fun row -> PostId (row.string "post_id"), Map.toRevision row)
log.LogTrace "Post.findFullByWebLog"
let! posts = Document.findByWebLog<Post> Table.Post webLogId
let! revisions = Revisions.findByWebLog Table.PostRevision Table.Post PostId webLogId
return
posts
|> List.map (fun it ->
@ -183,181 +120,102 @@ type PostgresPostData (conn : NpgsqlConnection, ser : JsonSerializer) =
}
/// Get a page of categorized posts for the given web log (excludes revisions)
let findPageOfCategorizedPosts webLogId categoryIds pageNbr postsPerPage =
let catSql, catParams = inClause "AND pc.category_id" "catId" CategoryId.toString categoryIds
Sql.existingConnection conn
|> Sql.query $"
{selectPost}
INNER JOIN post_category pc ON pc.post_id = p.id
WHERE p.web_log_id = @webLogId
AND p.status = @status
{catSql}
ORDER BY published_on DESC
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|> Sql.parameters
[ webLogIdParam webLogId
"@status", Sql.string (PostStatus.toString Published)
yield! catParams ]
|> Sql.executeAsync toPost
let findPageOfCategorizedPosts webLogId (categoryIds: CategoryId list) pageNbr postsPerPage =
log.LogTrace "Post.findPageOfCategorizedPosts"
let catSql, catParam = arrayContains (nameof Post.Empty.CategoryIds) string categoryIds
Custom.list
$"{selectWithCriteria Table.Post}
AND {catSql}
ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' DESC
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
[ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |}; catParam ]
postWithoutLinks
/// Get a page of posts for the given web log (excludes text and revisions)
let findPageOfPosts webLogId pageNbr postsPerPage =
Sql.existingConnection conn
|> Sql.query $"
{selectPost}
WHERE web_log_id = @webLogId
ORDER BY published_on DESC NULLS FIRST, updated_on
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|> Sql.parameters [ webLogIdParam webLogId ]
|> Sql.executeAsync postWithoutText
log.LogTrace "Post.findPageOfPosts"
Custom.list
$"{selectWithCriteria Table.Post}
ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' DESC NULLS FIRST,
data ->> '{nameof Post.Empty.UpdatedOn}'
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
[ webLogContains webLogId ]
postWithoutText
/// Get a page of published posts for the given web log (excludes revisions)
let findPageOfPublishedPosts webLogId pageNbr postsPerPage =
Sql.existingConnection conn
|> Sql.query $"
{selectPost}
WHERE web_log_id = @webLogId
AND status = @status
ORDER BY published_on DESC
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|> Sql.parameters [ webLogIdParam webLogId; "@status", Sql.string (PostStatus.toString Published) ]
|> Sql.executeAsync toPost
log.LogTrace "Post.findPageOfPublishedPosts"
Custom.list
$"{selectWithCriteria Table.Post}
ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' DESC
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
[ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |} ]
postWithoutLinks
/// Get a page of tagged posts for the given web log (excludes revisions and prior permalinks)
let findPageOfTaggedPosts webLogId (tag : string) pageNbr postsPerPage =
Sql.existingConnection conn
|> Sql.query $"
{selectPost}
WHERE web_log_id = @webLogId
AND status = @status
AND tags && ARRAY[@tag]
ORDER BY published_on DESC
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
|> Sql.parameters
[ webLogIdParam webLogId
"@status", Sql.string (PostStatus.toString Published)
"@tag", Sql.string tag
]
|> Sql.executeAsync toPost
let findPageOfTaggedPosts webLogId (tag: string) pageNbr postsPerPage =
log.LogTrace "Post.findPageOfTaggedPosts"
Custom.list
$"{selectWithCriteria Table.Post}
AND data['{nameof Post.Empty.Tags}'] @> @tag
ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' DESC
LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}"
[ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |}; jsonParam "@tag" [| tag |] ]
postWithoutLinks
/// Find the next newest and oldest post from a publish date for the given web log
let findSurroundingPosts webLogId (publishedOn : Instant) = backgroundTask {
let queryParams () = Sql.parameters [
webLogIdParam webLogId
typedParam "publishedOn" publishedOn
"@status", Sql.string (PostStatus.toString Published)
]
let! older =
Sql.existingConnection conn
|> Sql.query $"
{selectPost}
WHERE web_log_id = @webLogId
AND status = @status
AND published_on < @publishedOn
ORDER BY published_on DESC
LIMIT 1"
|> queryParams ()
|> Sql.executeAsync toPost
let! newer =
Sql.existingConnection conn
|> Sql.query $"
{selectPost}
WHERE web_log_id = @webLogId
AND status = @status
AND published_on > @publishedOn
ORDER BY published_on
LIMIT 1"
|> queryParams ()
|> Sql.executeAsync toPost
let findSurroundingPosts webLogId (publishedOn: Instant) = backgroundTask {
log.LogTrace "Post.findSurroundingPosts"
let queryParams () =
[ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |}
"@publishedOn", Sql.timestamptz (publishedOn.ToDateTimeOffset()) ]
let query op direction =
$"{selectWithCriteria Table.Post}
AND (data ->> '{nameof Post.Empty.PublishedOn}')::timestamp with time zone %s{op} @publishedOn
ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' %s{direction}
LIMIT 1"
let! older = Custom.list (query "<" "DESC") (queryParams ()) postWithoutLinks
let! newer = Custom.list (query ">" "") (queryParams ()) postWithoutLinks
return List.tryHead older, List.tryHead newer
}
/// The INSERT statement for a post
let postInsert =
"INSERT INTO post (
id, web_log_id, author_id, status, title, permalink, prior_permalinks, published_on, updated_on,
template, post_text, tags, meta_items, episode
) VALUES (
@id, @webLogId, @authorId, @status, @title, @permalink, @priorPermalinks, @publishedOn, @updatedOn,
@template, @text, @tags, @metaItems, @episode
)"
/// The parameters for saving a post
let postParams (post : Post) = [
webLogIdParam post.WebLogId
"@id", Sql.string (PostId.toString post.Id)
"@authorId", Sql.string (WebLogUserId.toString post.AuthorId)
"@status", Sql.string (PostStatus.toString post.Status)
"@title", Sql.string post.Title
"@permalink", Sql.string (Permalink.toString post.Permalink)
"@template", Sql.stringOrNone post.Template
"@text", Sql.string post.Text
"@priorPermalinks", Sql.stringArray (post.PriorPermalinks |> List.map Permalink.toString |> Array.ofList)
"@episode", Sql.jsonbOrNone (post.Episode |> Option.map (Utils.serialize ser))
"@tags", Sql.stringArrayOrNone (if List.isEmpty post.Tags then None else Some (Array.ofList post.Tags))
"@metaItems",
if List.isEmpty post.Metadata then None else Some (Utils.serialize ser post.Metadata)
|> Sql.jsonbOrNone
optParam "publishedOn" post.PublishedOn
typedParam "updatedOn" post.UpdatedOn
]
/// Save a post
let save (post : Post) = backgroundTask {
let! oldPost = findFullById post.Id post.WebLogId
let! _ =
Sql.existingConnection conn
|> Sql.query $"
{postInsert} ON CONFLICT (id) DO UPDATE
SET author_id = EXCLUDED.author_id,
status = EXCLUDED.status,
title = EXCLUDED.title,
permalink = EXCLUDED.permalink,
prior_permalinks = EXCLUDED.prior_permalinks,
published_on = EXCLUDED.published_on,
updated_on = EXCLUDED.updated_on,
template = EXCLUDED.template,
post_text = EXCLUDED.post_text,
tags = EXCLUDED.tags,
meta_items = EXCLUDED.meta_items,
episode = EXCLUDED.episode"
|> Sql.parameters (postParams post)
|> Sql.executeNonQueryAsync
do! updatePostCategories post.Id (match oldPost with Some p -> p.CategoryIds | None -> []) post.CategoryIds
do! updatePostRevisions post.Id (match oldPost with Some p -> p.Revisions | None -> []) post.Revisions
/// Update a post
let update (post : Post) = backgroundTask {
log.LogTrace "Post.save"
match! findFullById post.Id post.WebLogId with
| Some oldPost ->
do! Update.byId Table.Post post.Id { post with Revisions = [] }
do! updatePostRevisions post.Id oldPost.Revisions post.Revisions
| None -> ()
}
/// Restore posts from a backup
let restore posts = backgroundTask {
let cats = posts |> List.collect (fun p -> p.CategoryIds |> List.map (fun c -> p.Id, c))
let revisions = posts |> List.collect (fun p -> p.Revisions |> List.map (fun r -> p.Id, r))
log.LogTrace "Post.restore"
let revisions = posts |> List.collect (fun p -> p.Revisions |> List.map (fun r -> p.Id, r))
let! _ =
Sql.existingConnection conn
|> Sql.executeTransactionAsync [
postInsert, posts |> List.map postParams
catInsert, cats |> List.map (fun (postId, catId) -> catParams postId catId)
revInsert, revisions |> List.map (fun (postId, rev) -> revParams postId rev)
]
Configuration.dataSource ()
|> Sql.fromDataSource
|> Sql.executeTransactionAsync
[ Query.insert Table.Post,
posts |> List.map (fun post -> [ jsonParam "@data" { post with Revisions = [] } ])
Revisions.insertSql Table.PostRevision,
revisions |> List.map (fun (postId, rev) -> Revisions.revParams postId rev) ]
()
}
/// Update prior permalinks for a post
let updatePriorPermalinks postId webLogId permalinks = backgroundTask {
let updatePriorPermalinks postId webLogId (permalinks: Permalink list) = backgroundTask {
log.LogTrace "Post.updatePriorPermalinks"
match! postExists postId webLogId with
| true ->
let! _ =
Sql.existingConnection conn
|> Sql.query "UPDATE post SET prior_permalinks = @prior WHERE id = @id"
|> Sql.parameters
[ "@id", Sql.string (PostId.toString postId)
"@prior", Sql.stringArray (permalinks |> List.map Permalink.toString |> Array.ofList) ]
|> Sql.executeNonQueryAsync
do! Patch.byId Table.Post postId {| PriorPermalinks = permalinks |}
return true
| false -> return false
}
interface IPostData with
member _.Add post = save post
member _.Add post = add post
member _.CountByStatus status webLogId = countByStatus status webLogId
member _.Delete postId webLogId = delete postId webLogId
member _.FindById postId webLogId = findById postId webLogId
@ -374,5 +232,5 @@ type PostgresPostData (conn : NpgsqlConnection, ser : JsonSerializer) =
findPageOfTaggedPosts webLogId tag pageNbr postsPerPage
member _.FindSurroundingPosts webLogId publishedOn = findSurroundingPosts webLogId publishedOn
member _.Restore posts = restore posts
member _.Update post = save post
member _.Update post = update post
member _.UpdatePriorPermalinks postId webLogId permalinks = updatePriorPermalinks postId webLogId permalinks

View File

@ -1,101 +1,65 @@
namespace MyWebLog.Data.Postgres
open BitBadger.Documents
open BitBadger.Documents.Postgres
open Microsoft.Extensions.Logging
open MyWebLog
open MyWebLog.Data
open Npgsql
open Npgsql.FSharp
/// PostgreSQL myWebLog tag mapping data implementation
type PostgresTagMapData (conn : NpgsqlConnection) =
/// PostgreSQL myWebLog tag mapping data implementation
type PostgresTagMapData(log: ILogger) =
/// Find a tag mapping by its ID for the given web log
let findById tagMapId webLogId =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM tag_map WHERE id = @id AND web_log_id = @webLogId"
|> Sql.parameters [ "@id", Sql.string (TagMapId.toString tagMapId); webLogIdParam webLogId ]
|> Sql.executeAsync Map.toTagMap
|> tryHead
log.LogTrace "TagMap.findById"
Document.findByIdAndWebLog<TagMapId, TagMap> Table.TagMap tagMapId webLogId
/// Delete a tag mapping for the given web log
let delete tagMapId webLogId = backgroundTask {
let idParams = [ "@id", Sql.string (TagMapId.toString tagMapId) ]
let! exists =
Sql.existingConnection conn
|> Sql.query $"
SELECT EXISTS
(SELECT 1 FROM tag_map WHERE id = @id AND web_log_id = @webLogId)
AS {existsName}"
|> Sql.parameters (webLogIdParam webLogId :: idParams)
|> Sql.executeRowAsync Map.toExists
let delete (tagMapId: TagMapId) webLogId = backgroundTask {
log.LogTrace "TagMap.delete"
let! exists = Document.existsByWebLog Table.TagMap tagMapId webLogId
if exists then
let! _ =
Sql.existingConnection conn
|> Sql.query "DELETE FROM tag_map WHERE id = @id"
|> Sql.parameters idParams
|> Sql.executeNonQueryAsync
do! Delete.byId Table.TagMap tagMapId
return true
else return false
}
/// Find a tag mapping by its URL value for the given web log
let findByUrlValue urlValue webLogId =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM tag_map WHERE web_log_id = @webLogId AND url_value = @urlValue"
|> Sql.parameters [ webLogIdParam webLogId; "@urlValue", Sql.string urlValue ]
|> Sql.executeAsync Map.toTagMap
|> tryHead
let findByUrlValue (urlValue: string) webLogId =
log.LogTrace "TagMap.findByUrlValue"
Find.firstByContains<TagMap> Table.TagMap {| webLogDoc webLogId with UrlValue = urlValue |}
/// Get all tag mappings for the given web log
let findByWebLog webLogId =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM tag_map WHERE web_log_id = @webLogId ORDER BY tag"
|> Sql.parameters [ webLogIdParam webLogId ]
|> Sql.executeAsync Map.toTagMap
log.LogTrace "TagMap.findByWebLog"
Custom.list
$"{selectWithCriteria Table.TagMap} ORDER BY data ->> 'tag'"
[ webLogContains webLogId ]
fromData<TagMap>
/// Find any tag mappings in a list of tags for the given web log
let findMappingForTags tags webLogId =
let tagSql, tagParams = inClause "AND tag" "tag" id tags
Sql.existingConnection conn
|> Sql.query $"SELECT * FROM tag_map WHERE web_log_id = @webLogId {tagSql}"
|> Sql.parameters (webLogIdParam webLogId :: tagParams)
|> Sql.executeAsync Map.toTagMap
/// The INSERT statement for a tag mapping
let tagMapInsert =
"INSERT INTO tag_map (
id, web_log_id, tag, url_value
) VALUES (
@id, @webLogId, @tag, @urlValue
)"
/// The parameters for saving a tag mapping
let tagMapParams (tagMap : TagMap) = [
webLogIdParam tagMap.WebLogId
"@id", Sql.string (TagMapId.toString tagMap.Id)
"@tag", Sql.string tagMap.Tag
"@urlValue", Sql.string tagMap.UrlValue
]
log.LogTrace "TagMap.findMappingForTags"
let tagSql, tagParam = arrayContains (nameof TagMap.Empty.Tag) id tags
Custom.list
$"{selectWithCriteria Table.TagMap} AND {tagSql}"
[ webLogContains webLogId; tagParam ]
fromData<TagMap>
/// Save a tag mapping
let save tagMap = backgroundTask {
let! _ =
Sql.existingConnection conn
|> Sql.query $"
{tagMapInsert} ON CONFLICT (id) DO UPDATE
SET tag = EXCLUDED.tag,
url_value = EXCLUDED.url_value"
|> Sql.parameters (tagMapParams tagMap)
|> Sql.executeNonQueryAsync
()
}
let save (tagMap: TagMap) =
log.LogTrace "TagMap.save"
save Table.TagMap tagMap
/// Restore tag mappings from a backup
let restore tagMaps = backgroundTask {
let restore (tagMaps: TagMap list) = backgroundTask {
let! _ =
Sql.existingConnection conn
|> Sql.executeTransactionAsync [
tagMapInsert, tagMaps |> List.map tagMapParams
]
Configuration.dataSource ()
|> Sql.fromDataSource
|> Sql.executeTransactionAsync
[ Query.insert Table.TagMap,
tagMaps |> List.map (fun tagMap -> [ jsonParam "@data" tagMap ]) ]
()
}

View File

@ -1,132 +1,64 @@
namespace MyWebLog.Data.Postgres
open BitBadger.Documents
open BitBadger.Documents.Postgres
open Microsoft.Extensions.Logging
open MyWebLog
open MyWebLog.Data
open Npgsql
open Npgsql.FSharp
/// PostreSQL myWebLog theme data implementation
type PostgresThemeData (conn : NpgsqlConnection) =
/// PostreSQL myWebLog theme data implementation
type PostgresThemeData(log: ILogger) =
/// Clear out the template text from a theme
let withoutTemplateText row =
let theme = fromData<Theme> row
{ theme with Templates = theme.Templates |> List.map (fun template -> { template with Text = "" }) }
/// Retrieve all themes (except 'admin'; excludes template text)
let all () = backgroundTask {
let! themes =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM theme WHERE id <> 'admin' ORDER BY id"
|> Sql.executeAsync Map.toTheme
let! templates =
Sql.existingConnection conn
|> Sql.query "SELECT name, theme_id FROM theme_template WHERE theme_id <> 'admin' ORDER BY name"
|> Sql.executeAsync (fun row -> ThemeId (row.string "theme_id"), Map.toThemeTemplate false row)
return
themes
|> List.map (fun t ->
{ t with Templates = templates |> List.filter (fun tt -> fst tt = t.Id) |> List.map snd })
}
let all () =
log.LogTrace "Theme.all"
Custom.list
$"{Query.selectFromTable Table.Theme}
WHERE data ->> '{nameof Theme.Empty.Id}' <> 'admin'
ORDER BY data ->> '{nameof Theme.Empty.Id}'"
[]
withoutTemplateText
/// Does a given theme exist?
let exists themeId =
Sql.existingConnection conn
|> Sql.query "SELECT EXISTS (SELECT 1 FROM theme WHERE id = @id) AS does_exist"
|> Sql.parameters [ "@id", Sql.string (ThemeId.toString themeId) ]
|> Sql.executeRowAsync Map.toExists
let exists (themeId: ThemeId) =
log.LogTrace "Theme.exists"
Exists.byId Table.Theme themeId
/// Find a theme by its ID
let findById themeId = backgroundTask {
let themeIdParam = [ "@id", Sql.string (ThemeId.toString themeId) ]
let! theme =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM theme WHERE id = @id"
|> Sql.parameters themeIdParam
|> Sql.executeAsync Map.toTheme
|> tryHead
if Option.isSome theme then
let! templates =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM theme_template WHERE theme_id = @id"
|> Sql.parameters themeIdParam
|> Sql.executeAsync (Map.toThemeTemplate true)
return Some { theme.Value with Templates = templates }
else return None
}
let findById (themeId: ThemeId) =
log.LogTrace "Theme.findById"
Find.byId<ThemeId, Theme> Table.Theme themeId
/// Find a theme by its ID (excludes the text of templates)
let findByIdWithoutText themeId = backgroundTask {
match! findById themeId with
| Some theme ->
return Some {
theme with Templates = theme.Templates |> List.map (fun t -> { t with Text = "" })
}
| None -> return None
}
let findByIdWithoutText (themeId: ThemeId) =
log.LogTrace "Theme.findByIdWithoutText"
Custom.single (Query.Find.byId Table.Theme) [ idParam themeId ] withoutTemplateText
/// Delete a theme by its ID
let delete themeId = backgroundTask {
let idParams = [ "@id", Sql.string (ThemeId.toString themeId) ]
let! exists =
Sql.existingConnection conn
|> Sql.query $"SELECT EXISTS (SELECT 1 FROM theme WHERE id = @id) AS {existsName}"
|> Sql.parameters idParams
|> Sql.executeRowAsync Map.toExists
if exists then
let! _ =
Sql.existingConnection conn
|> Sql.query
"DELETE FROM theme_asset WHERE theme_id = @id;
DELETE FROM theme_template WHERE theme_id = @id;
DELETE FROM theme WHERE id = @id"
|> Sql.parameters idParams
|> Sql.executeNonQueryAsync
log.LogTrace "Theme.delete"
match! exists themeId with
| true ->
do! Custom.nonQuery
$"""DELETE FROM {Table.ThemeAsset} WHERE theme_id = @id;
DELETE FROM {Table.Theme} WHERE {Query.whereById "@id"}"""
[ idParam themeId ]
return true
else return false
| false -> return false
}
/// Save a theme
let save (theme : Theme) = backgroundTask {
let! oldTheme = findById theme.Id
let themeIdParam = Sql.string (ThemeId.toString theme.Id)
let! _ =
Sql.existingConnection conn
|> Sql.query
"INSERT INTO theme VALUES (@id, @name, @version)
ON CONFLICT (id) DO UPDATE
SET name = EXCLUDED.name,
version = EXCLUDED.version"
|> Sql.parameters
[ "@id", themeIdParam
"@name", Sql.string theme.Name
"@version", Sql.string theme.Version ]
|> Sql.executeNonQueryAsync
let toDelete, _ =
Utils.diffLists (oldTheme |> Option.map (fun t -> t.Templates) |> Option.defaultValue [])
theme.Templates (fun t -> t.Name)
let toAddOrUpdate =
theme.Templates
|> List.filter (fun t -> not (toDelete |> List.exists (fun d -> d.Name = t.Name)))
if not (List.isEmpty toDelete) || not (List.isEmpty toAddOrUpdate) then
let! _ =
Sql.existingConnection conn
|> Sql.executeTransactionAsync [
if not (List.isEmpty toDelete) then
"DELETE FROM theme_template WHERE theme_id = @themeId AND name = @name",
toDelete |> List.map (fun tmpl -> [ "@themeId", themeIdParam; "@name", Sql.string tmpl.Name ])
if not (List.isEmpty toAddOrUpdate) then
"INSERT INTO theme_template VALUES (@themeId, @name, @template)
ON CONFLICT (theme_id, name) DO UPDATE
SET template = EXCLUDED.template",
toAddOrUpdate |> List.map (fun tmpl -> [
"@themeId", themeIdParam
"@name", Sql.string tmpl.Name
"@template", Sql.string tmpl.Text
])
]
()
}
let save (theme: Theme) =
log.LogTrace "Theme.save"
save Table.Theme theme
interface IThemeData with
member _.All () = all ()
member _.All() = all ()
member _.Delete themeId = delete themeId
member _.Exists themeId = exists themeId
member _.FindById themeId = findById themeId
@ -134,72 +66,60 @@ type PostgresThemeData (conn : NpgsqlConnection) =
member _.Save theme = save theme
/// PostreSQL myWebLog theme data implementation
type PostgresThemeAssetData (conn : NpgsqlConnection) =
/// PostreSQL myWebLog theme data implementation
type PostgresThemeAssetData(log: ILogger) =
/// Get all theme assets (excludes data)
let all () =
Sql.existingConnection conn
|> Sql.query "SELECT theme_id, path, updated_on FROM theme_asset"
|> Sql.executeAsync (Map.toThemeAsset false)
log.LogTrace "ThemeAsset.all"
Custom.list $"SELECT theme_id, path, updated_on FROM {Table.ThemeAsset}" [] (Map.toThemeAsset false)
/// Delete all assets for the given theme
let deleteByTheme themeId = backgroundTask {
let! _ =
Sql.existingConnection conn
|> Sql.query "DELETE FROM theme_asset WHERE theme_id = @themeId"
|> Sql.parameters [ "@themeId", Sql.string (ThemeId.toString themeId) ]
|> Sql.executeNonQueryAsync
()
}
let deleteByTheme (themeId: ThemeId) =
log.LogTrace "ThemeAsset.deleteByTheme"
Custom.nonQuery $"DELETE FROM {Table.ThemeAsset} WHERE theme_id = @id" [ idParam themeId ]
/// Find a theme asset by its ID
let findById assetId =
log.LogTrace "ThemeAsset.findById"
let (ThemeAssetId (ThemeId themeId, path)) = assetId
Sql.existingConnection conn
|> Sql.query "SELECT * FROM theme_asset WHERE theme_id = @themeId AND path = @path"
|> Sql.parameters [ "@themeId", Sql.string themeId; "@path", Sql.string path ]
|> Sql.executeAsync (Map.toThemeAsset true)
|> tryHead
Custom.single
$"SELECT * FROM {Table.ThemeAsset} WHERE theme_id = @id AND path = @path"
[ idParam themeId; "@path", Sql.string path ]
(Map.toThemeAsset true)
/// Get theme assets for the given theme (excludes data)
let findByTheme themeId =
Sql.existingConnection conn
|> Sql.query "SELECT theme_id, path, updated_on FROM theme_asset WHERE theme_id = @themeId"
|> Sql.parameters [ "@themeId", Sql.string (ThemeId.toString themeId) ]
|> Sql.executeAsync (Map.toThemeAsset false)
let findByTheme (themeId: ThemeId) =
log.LogTrace "ThemeAsset.findByTheme"
Custom.list
$"SELECT theme_id, path, updated_on FROM {Table.ThemeAsset} WHERE theme_id = @id"
[ idParam themeId ]
(Map.toThemeAsset false)
/// Get theme assets for the given theme
let findByThemeWithData themeId =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM theme_asset WHERE theme_id = @themeId"
|> Sql.parameters [ "@themeId", Sql.string (ThemeId.toString themeId) ]
|> Sql.executeAsync (Map.toThemeAsset true)
let findByThemeWithData (themeId: ThemeId) =
log.LogTrace "ThemeAsset.findByThemeWithData"
Custom.list $"SELECT * FROM {Table.ThemeAsset} WHERE theme_id = @id" [ idParam themeId ] (Map.toThemeAsset true)
/// Save a theme asset
let save (asset : ThemeAsset) = backgroundTask {
let save (asset: ThemeAsset) =
log.LogTrace "ThemeAsset.save"
let (ThemeAssetId (ThemeId themeId, path)) = asset.Id
let! _ =
Sql.existingConnection conn
|> Sql.query
"INSERT INTO theme_asset (
theme_id, path, updated_on, data
) VALUES (
@themeId, @path, @updatedOn, @data
) ON CONFLICT (theme_id, path) DO UPDATE
SET updated_on = EXCLUDED.updated_on,
data = EXCLUDED.data"
|> Sql.parameters
[ "@themeId", Sql.string themeId
"@path", Sql.string path
"@data", Sql.bytea asset.Data
typedParam "updatedOn" asset.UpdatedOn ]
|> Sql.executeNonQueryAsync
()
}
Custom.nonQuery
$"INSERT INTO {Table.ThemeAsset} (
theme_id, path, updated_on, data
) VALUES (
@themeId, @path, @updatedOn, @data
) ON CONFLICT (theme_id, path) DO UPDATE
SET updated_on = EXCLUDED.updated_on,
data = EXCLUDED.data"
[ "@themeId", Sql.string themeId
"@path", Sql.string path
"@data", Sql.bytea asset.Data
typedParam "updatedOn" asset.UpdatedOn ]
interface IThemeAssetData with
member _.All () = all ()
member _.All() = all ()
member _.DeleteByTheme themeId = deleteByTheme themeId
member _.FindById assetId = findById assetId
member _.FindByTheme themeId = findByTheme themeId

View File

@ -1,89 +1,82 @@
namespace MyWebLog.Data.Postgres
open BitBadger.Documents.Postgres
open Microsoft.Extensions.Logging
open MyWebLog
open MyWebLog.Data
open Npgsql
open Npgsql.FSharp
/// PostgreSQL myWebLog uploaded file data implementation
type PostgresUploadData (conn : NpgsqlConnection) =
/// PostgreSQL myWebLog uploaded file data implementation
type PostgresUploadData(log: ILogger) =
/// The INSERT statement for an uploaded file
let upInsert =
"INSERT INTO upload (
let upInsert = $"
INSERT INTO {Table.Upload} (
id, web_log_id, path, updated_on, data
) VALUES (
@id, @webLogId, @path, @updatedOn, @data
)"
/// Parameters for adding an uploaded file
let upParams (upload : Upload) = [
webLogIdParam upload.WebLogId
typedParam "updatedOn" upload.UpdatedOn
"@id", Sql.string (UploadId.toString upload.Id)
"@path", Sql.string (Permalink.toString upload.Path)
"@data", Sql.bytea upload.Data
]
let upParams (upload: Upload) =
[ webLogIdParam upload.WebLogId
typedParam "updatedOn" upload.UpdatedOn
idParam upload.Id
"@path", Sql.string (string upload.Path)
"@data", Sql.bytea upload.Data ]
/// Save an uploaded file
let add upload = backgroundTask {
let! _ =
Sql.existingConnection conn
|> Sql.query upInsert
|> Sql.parameters (upParams upload)
|> Sql.executeNonQueryAsync
()
}
let add upload =
log.LogTrace "Upload.add"
Custom.nonQuery upInsert (upParams upload)
/// Delete an uploaded file by its ID
let delete uploadId webLogId = backgroundTask {
let theParams = [ "@id", Sql.string (UploadId.toString uploadId); webLogIdParam webLogId ]
log.LogTrace "Upload.delete"
let idParam = [ idParam uploadId ]
let! path =
Sql.existingConnection conn
|> Sql.query "SELECT path FROM upload WHERE id = @id AND web_log_id = @webLogId"
|> Sql.parameters theParams
|> Sql.executeAsync (fun row -> row.string "path")
|> tryHead
Custom.single
$"SELECT path FROM {Table.Upload} WHERE id = @id AND web_log_id = @webLogId"
(webLogIdParam webLogId :: idParam)
(fun row -> row.string "path")
if Option.isSome path then
let! _ =
Sql.existingConnection conn
|> Sql.query "DELETE FROM upload WHERE id = @id AND web_log_id = @webLogId"
|> Sql.parameters theParams
|> Sql.executeNonQueryAsync
do! Custom.nonQuery $"DELETE FROM {Table.Upload} WHERE id = @id" idParam
return Ok path.Value
else return Error $"""Upload ID {UploadId.toString uploadId} not found"""
else return Error $"Upload ID {uploadId} not found"
}
/// Find an uploaded file by its path for the given web log
let findByPath path webLogId =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM upload WHERE web_log_id = @webLogId AND path = @path"
|> Sql.parameters [ webLogIdParam webLogId; "@path", Sql.string path ]
|> Sql.executeAsync (Map.toUpload true)
|> tryHead
log.LogTrace "Upload.findByPath"
Custom.single
$"SELECT * FROM {Table.Upload} WHERE web_log_id = @webLogId AND path = @path"
[ webLogIdParam webLogId; "@path", Sql.string path ]
(Map.toUpload true)
/// Find all uploaded files for the given web log (excludes data)
let findByWebLog webLogId =
Sql.existingConnection conn
|> Sql.query "SELECT id, web_log_id, path, updated_on FROM upload WHERE web_log_id = @webLogId"
|> Sql.parameters [ webLogIdParam webLogId ]
|> Sql.executeAsync (Map.toUpload false)
log.LogTrace "Upload.findByWebLog"
Custom.list
$"SELECT id, web_log_id, path, updated_on FROM {Table.Upload} WHERE web_log_id = @webLogId"
[ webLogIdParam webLogId ]
(Map.toUpload false)
/// Find all uploaded files for the given web log
let findByWebLogWithData webLogId =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM upload WHERE web_log_id = @webLogId"
|> Sql.parameters [ webLogIdParam webLogId ]
|> Sql.executeAsync (Map.toUpload true)
log.LogTrace "Upload.findByWebLogWithData"
Custom.list
$"SELECT * FROM {Table.Upload} WHERE web_log_id = @webLogId"
[ webLogIdParam webLogId ]
(Map.toUpload true)
/// Restore uploads from a backup
let restore uploads = backgroundTask {
log.LogTrace "Upload.restore"
for batch in uploads |> List.chunkBySize 5 do
let! _ =
Sql.existingConnection conn
|> Sql.executeTransactionAsync [
upInsert, batch |> List.map upParams
]
Configuration.dataSource ()
|> Sql.fromDataSource
|> Sql.executeTransactionAsync [ upInsert, batch |> List.map upParams ]
()
}

View File

@ -1,238 +1,83 @@
namespace MyWebLog.Data.Postgres
open BitBadger.Documents
open BitBadger.Documents.Postgres
open Microsoft.Extensions.Logging
open MyWebLog
open MyWebLog.Data
open Newtonsoft.Json
open Npgsql
open Npgsql.FSharp
/// PostgreSQL myWebLog web log data implementation
type PostgresWebLogData (conn : NpgsqlConnection, ser : JsonSerializer) =
// SUPPORT FUNCTIONS
/// The parameters for web log INSERT or web log/RSS options UPDATE statements
let rssParams (webLog : WebLog) = [
"@isFeedEnabled", Sql.bool webLog.Rss.IsFeedEnabled
"@feedName", Sql.string webLog.Rss.FeedName
"@itemsInFeed", Sql.intOrNone webLog.Rss.ItemsInFeed
"@isCategoryEnabled", Sql.bool webLog.Rss.IsCategoryEnabled
"@isTagEnabled", Sql.bool webLog.Rss.IsTagEnabled
"@copyright", Sql.stringOrNone webLog.Rss.Copyright
]
/// The parameters for web log INSERT or UPDATE statements
let webLogParams (webLog : WebLog) = [
"@id", Sql.string (WebLogId.toString webLog.Id)
"@name", Sql.string webLog.Name
"@slug", Sql.string webLog.Slug
"@subtitle", Sql.stringOrNone webLog.Subtitle
"@defaultPage", Sql.string webLog.DefaultPage
"@postsPerPage", Sql.int webLog.PostsPerPage
"@themeId", Sql.string (ThemeId.toString webLog.ThemeId)
"@urlBase", Sql.string webLog.UrlBase
"@timeZone", Sql.string webLog.TimeZone
"@autoHtmx", Sql.bool webLog.AutoHtmx
"@uploads", Sql.string (UploadDestination.toString webLog.Uploads)
yield! rssParams webLog
]
/// Shorthand to map a result to a custom feed
let toCustomFeed =
Map.toCustomFeed ser
/// Get the current custom feeds for a web log
let getCustomFeeds (webLog : WebLog) =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM web_log_feed WHERE web_log_id = @webLogId"
|> Sql.parameters [ webLogIdParam webLog.Id ]
|> Sql.executeAsync toCustomFeed
/// Append custom feeds to a web log
let appendCustomFeeds (webLog : WebLog) = backgroundTask {
let! feeds = getCustomFeeds webLog
return { webLog with Rss = { webLog.Rss with CustomFeeds = feeds } }
}
/// The parameters to save a custom feed
let feedParams webLogId (feed : CustomFeed) = [
webLogIdParam webLogId
"@id", Sql.string (CustomFeedId.toString feed.Id)
"@source", Sql.string (CustomFeedSource.toString feed.Source)
"@path", Sql.string (Permalink.toString feed.Path)
"@podcast", Sql.jsonbOrNone (feed.Podcast |> Option.map (Utils.serialize ser))
]
/// Update the custom feeds for a web log
let updateCustomFeeds (webLog : WebLog) = backgroundTask {
let! feeds = getCustomFeeds webLog
let toDelete, _ = Utils.diffLists feeds webLog.Rss.CustomFeeds (fun it -> $"{CustomFeedId.toString it.Id}")
let toId (feed : CustomFeed) = feed.Id
let toAddOrUpdate =
webLog.Rss.CustomFeeds |> List.filter (fun f -> not (toDelete |> List.map toId |> List.contains f.Id))
if not (List.isEmpty toDelete) || not (List.isEmpty toAddOrUpdate) then
let! _ =
Sql.existingConnection conn
|> Sql.executeTransactionAsync [
if not (List.isEmpty toDelete) then
"DELETE FROM web_log_feed WHERE id = @id",
toDelete |> List.map (fun it -> [ "@id", Sql.string (CustomFeedId.toString it.Id) ])
if not (List.isEmpty toAddOrUpdate) then
"INSERT INTO web_log_feed (
id, web_log_id, source, path, podcast
) VALUES (
@id, @webLogId, @source, @path, @podcast
) ON CONFLICT (id) DO UPDATE
SET source = EXCLUDED.source,
path = EXCLUDED.path,
podcast = EXCLUDED.podcast",
toAddOrUpdate |> List.map (feedParams webLog.Id)
]
()
}
// IMPLEMENTATION FUNCTIONS
/// PostgreSQL myWebLog web log data implementation
type PostgresWebLogData(log: ILogger) =
/// Add a web log
let add webLog = backgroundTask {
let! _ =
Sql.existingConnection conn
|> Sql.query
"INSERT INTO web_log (
id, name, slug, subtitle, default_page, posts_per_page, theme_id, url_base, time_zone, auto_htmx,
uploads, is_feed_enabled, feed_name, items_in_feed, is_category_enabled, is_tag_enabled, copyright
) VALUES (
@id, @name, @slug, @subtitle, @defaultPage, @postsPerPage, @themeId, @urlBase, @timeZone, @autoHtmx,
@uploads, @isFeedEnabled, @feedName, @itemsInFeed, @isCategoryEnabled, @isTagEnabled, @copyright
)"
|> Sql.parameters (webLogParams webLog)
|> Sql.executeNonQueryAsync
do! updateCustomFeeds webLog
}
let add (webLog: WebLog) =
log.LogTrace "WebLog.add"
insert Table.WebLog webLog
/// Retrieve all web logs
let all () = backgroundTask {
let! webLogs =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM web_log"
|> Sql.executeAsync Map.toWebLog
let! feeds =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM web_log_feed"
|> Sql.executeAsync (fun row -> WebLogId (row.string "web_log_id"), toCustomFeed row)
return
webLogs
|> List.map (fun it ->
{ it with
Rss =
{ it.Rss with
CustomFeeds = feeds |> List.filter (fun (wlId, _) -> wlId = it.Id) |> List.map snd } })
}
let all () =
log.LogTrace "WebLog.all"
Find.all<WebLog> Table.WebLog
/// Delete a web log by its ID
let delete webLogId = backgroundTask {
let subQuery table = $"(SELECT id FROM {table} WHERE web_log_id = @webLogId)"
let postSubQuery = subQuery "post"
let pageSubQuery = subQuery "page"
let! _ =
Sql.existingConnection conn
|> Sql.query $"
DELETE FROM post_comment WHERE post_id IN {postSubQuery};
DELETE FROM post_revision WHERE post_id IN {postSubQuery};
DELETE FROM post_category WHERE post_id IN {postSubQuery};
DELETE FROM post WHERE web_log_id = @webLogId;
DELETE FROM page_revision WHERE page_id IN {pageSubQuery};
DELETE FROM page WHERE web_log_id = @webLogId;
DELETE FROM category WHERE web_log_id = @webLogId;
DELETE FROM tag_map WHERE web_log_id = @webLogId;
DELETE FROM upload WHERE web_log_id = @webLogId;
DELETE FROM web_log_user WHERE web_log_id = @webLogId;
DELETE FROM web_log_feed WHERE web_log_id = @webLogId;
DELETE FROM web_log WHERE id = @webLogId"
|> Sql.parameters [ webLogIdParam webLogId ]
|> Sql.executeNonQueryAsync
()
}
let delete webLogId =
log.LogTrace "WebLog.delete"
Custom.nonQuery
$"""DELETE FROM {Table.PostComment}
WHERE data ->> '{nameof Comment.Empty.PostId}'
IN (SELECT data ->> '{nameof Post.Empty.Id}'
FROM {Table.Post}
WHERE {Query.whereDataContains "@criteria"});
DELETE FROM {Table.PostRevision}
WHERE post_id IN (SELECT data ->> 'Id' FROM {Table.Post} WHERE {Query.whereDataContains "@criteria"});
DELETE FROM {Table.PageRevision}
WHERE page_id IN (SELECT data ->> 'Id' FROM {Table.Page} WHERE {Query.whereDataContains "@criteria"});
{Query.Delete.byContains Table.Post};
{Query.Delete.byContains Table.Page};
{Query.Delete.byContains Table.Category};
{Query.Delete.byContains Table.TagMap};
{Query.Delete.byContains Table.WebLogUser};
DELETE FROM {Table.Upload} WHERE web_log_id = @webLogId;
DELETE FROM {Table.WebLog} WHERE {Query.whereById "@webLogId"}"""
[ webLogIdParam webLogId; webLogContains webLogId ]
/// Find a web log by its host (URL base)
let findByHost url = backgroundTask {
let! webLog =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM web_log WHERE url_base = @urlBase"
|> Sql.parameters [ "@urlBase", Sql.string url ]
|> Sql.executeAsync Map.toWebLog
|> tryHead
if Option.isSome webLog then
let! withFeeds = appendCustomFeeds webLog.Value
return Some withFeeds
else return None
}
let findByHost (url: string) =
log.LogTrace "WebLog.findByHost"
Find.firstByContains<WebLog> Table.WebLog {| UrlBase = url |}
/// Find a web log by its ID
let findById webLogId = backgroundTask {
let! webLog =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM web_log WHERE id = @webLogId"
|> Sql.parameters [ webLogIdParam webLogId ]
|> Sql.executeAsync Map.toWebLog
|> tryHead
if Option.isSome webLog then
let! withFeeds = appendCustomFeeds webLog.Value
return Some withFeeds
else return None
}
let findById (webLogId: WebLogId) =
log.LogTrace "WebLog.findById"
Find.byId<WebLogId, WebLog> Table.WebLog webLogId
/// Update settings for a web log
let updateSettings webLog = backgroundTask {
let! _ =
Sql.existingConnection conn
|> Sql.query
"UPDATE web_log
SET name = @name,
slug = @slug,
subtitle = @subtitle,
default_page = @defaultPage,
posts_per_page = @postsPerPage,
theme_id = @themeId,
url_base = @urlBase,
time_zone = @timeZone,
auto_htmx = @autoHtmx,
uploads = @uploads,
is_feed_enabled = @isFeedEnabled,
feed_name = @feedName,
items_in_feed = @itemsInFeed,
is_category_enabled = @isCategoryEnabled,
is_tag_enabled = @isTagEnabled,
copyright = @copyright
WHERE id = @id"
|> Sql.parameters (webLogParams webLog)
|> Sql.executeNonQueryAsync
()
/// Update redirect rules for a web log
let updateRedirectRules (webLog: WebLog) = backgroundTask {
log.LogTrace "WebLog.updateRedirectRules"
match! findById webLog.Id with
| Some _ -> do! Patch.byId Table.WebLog webLog.Id {| RedirectRules = webLog.RedirectRules |}
| None -> ()
}
/// Update RSS options for a web log
let updateRssOptions (webLog : WebLog) = backgroundTask {
let! _ =
Sql.existingConnection conn
|> Sql.query
"UPDATE web_log
SET is_feed_enabled = @isFeedEnabled,
feed_name = @feedName,
items_in_feed = @itemsInFeed,
is_category_enabled = @isCategoryEnabled,
is_tag_enabled = @isTagEnabled,
copyright = @copyright
WHERE id = @webLogId"
|> Sql.parameters (webLogIdParam webLog.Id :: rssParams webLog)
|> Sql.executeNonQueryAsync
do! updateCustomFeeds webLog
let updateRssOptions (webLog: WebLog) = backgroundTask {
log.LogTrace "WebLog.updateRssOptions"
match! findById webLog.Id with
| Some _ -> do! Patch.byId Table.WebLog webLog.Id {| Rss = webLog.Rss |}
| None -> ()
}
/// Update settings for a web log
let updateSettings (webLog: WebLog) =
log.LogTrace "WebLog.updateSettings"
Update.byId Table.WebLog webLog.Id webLog
interface IWebLogData with
member _.Add webLog = add webLog
member _.All () = all ()
member _.All() = all ()
member _.Delete webLogId = delete webLogId
member _.FindByHost url = findByHost url
member _.FindById webLogId = findById webLogId
member _.UpdateSettings webLog = updateSettings webLog
member _.UpdateRedirectRules webLog = updateRedirectRules webLog
member _.UpdateRssOptions webLog = updateRssOptions webLog
member _.UpdateSettings webLog = updateSettings webLog

View File

@ -1,143 +1,97 @@
namespace MyWebLog.Data.Postgres
open BitBadger.Documents
open BitBadger.Documents.Postgres
open Microsoft.Extensions.Logging
open MyWebLog
open MyWebLog.Data
open Npgsql
open Npgsql.FSharp
/// PostgreSQL myWebLog user data implementation
type PostgresWebLogUserData (conn : NpgsqlConnection) =
/// PostgreSQL myWebLog user data implementation
type PostgresWebLogUserData(log: ILogger) =
/// The INSERT statement for a user
let userInsert =
"INSERT INTO web_log_user (
id, web_log_id, email, first_name, last_name, preferred_name, password_hash, url, access_level,
created_on, last_seen_on
) VALUES (
@id, @webLogId, @email, @firstName, @lastName, @preferredName, @passwordHash, @url, @accessLevel,
@createdOn, @lastSeenOn
)"
/// Add a user
let add (user: WebLogUser) =
log.LogTrace "WebLogUser.add"
insert Table.WebLogUser user
/// Parameters for saving web log users
let userParams (user : WebLogUser) = [
"@id", Sql.string (WebLogUserId.toString user.Id)
"@webLogId", Sql.string (WebLogId.toString user.WebLogId)
"@email", Sql.string user.Email
"@firstName", Sql.string user.FirstName
"@lastName", Sql.string user.LastName
"@preferredName", Sql.string user.PreferredName
"@passwordHash", Sql.string user.PasswordHash
"@url", Sql.stringOrNone user.Url
"@accessLevel", Sql.string (AccessLevel.toString user.AccessLevel)
typedParam "createdOn" user.CreatedOn
optParam "lastSeenOn" user.LastSeenOn
]
/// Find a user by their ID for the given web log
let findById userId webLogId =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM web_log_user WHERE id = @id AND web_log_id = @webLogId"
|> Sql.parameters [ "@id", Sql.string (WebLogUserId.toString userId); webLogIdParam webLogId ]
|> Sql.executeAsync Map.toWebLogUser
|> tryHead
log.LogTrace "WebLogUser.findById"
Document.findByIdAndWebLog<WebLogUserId, WebLogUser> Table.WebLogUser userId webLogId
/// Delete a user if they have no posts or pages
let delete userId webLogId = backgroundTask {
log.LogTrace "WebLogUser.delete"
match! findById userId webLogId with
| Some _ ->
let userParam = [ "@userId", Sql.string (WebLogUserId.toString userId) ]
let criteria = Query.whereDataContains "@criteria"
let! isAuthor =
Sql.existingConnection conn
|> Sql.query
"SELECT ( EXISTS (SELECT 1 FROM page WHERE author_id = @userId
OR EXISTS (SELECT 1 FROM post WHERE author_id = @userId)) AS does_exist"
|> Sql.parameters userParam
|> Sql.executeRowAsync Map.toExists
Custom.scalar
$" SELECT ( EXISTS (SELECT 1 FROM {Table.Page} WHERE {criteria})
OR EXISTS (SELECT 1 FROM {Table.Post} WHERE {criteria})
) AS it"
[ jsonParam "@criteria" {| AuthorId = userId |} ]
toExists
if isAuthor then
return Error "User has pages or posts; cannot delete"
else
let! _ =
Sql.existingConnection conn
|> Sql.query "DELETE FROM web_log_user WHERE id = @userId"
|> Sql.parameters userParam
|> Sql.executeNonQueryAsync
do! Delete.byId Table.WebLogUser userId
return Ok true
| None -> return Error "User does not exist"
}
/// Find a user by their e-mail address for the given web log
let findByEmail email webLogId =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM web_log_user WHERE web_log_id = @webLogId AND email = @email"
|> Sql.parameters [ webLogIdParam webLogId; "@email", Sql.string email ]
|> Sql.executeAsync Map.toWebLogUser
|> tryHead
let findByEmail (email: string) webLogId =
log.LogTrace "WebLogUser.findByEmail"
Find.firstByContains<WebLogUser> Table.WebLogUser {| webLogDoc webLogId with Email = email |}
/// Get all users for the given web log
let findByWebLog webLogId =
Sql.existingConnection conn
|> Sql.query "SELECT * FROM web_log_user WHERE web_log_id = @webLogId ORDER BY LOWER(preferred_name)"
|> Sql.parameters [ webLogIdParam webLogId ]
|> Sql.executeAsync Map.toWebLogUser
log.LogTrace "WebLogUser.findByWebLog"
Custom.list
$"{selectWithCriteria Table.WebLogUser} ORDER BY LOWER(data ->> '{nameof WebLogUser.Empty.PreferredName}')"
[ webLogContains webLogId ]
fromData<WebLogUser>
/// Find the names of users by their IDs for the given web log
let findNames webLogId userIds = backgroundTask {
let idSql, idParams = inClause "AND id" "id" WebLogUserId.toString userIds
let findNames webLogId (userIds: WebLogUserId list) = backgroundTask {
log.LogTrace "WebLogUser.findNames"
let idSql, idParams = inClause $"AND data ->> '{nameof WebLogUser.Empty.Id}'" "id" userIds
let! users =
Sql.existingConnection conn
|> Sql.query $"SELECT * FROM web_log_user WHERE web_log_id = @webLogId {idSql}"
|> Sql.parameters (webLogIdParam webLogId :: idParams)
|> Sql.executeAsync Map.toWebLogUser
return
users
|> List.map (fun u -> { Name = WebLogUserId.toString u.Id; Value = WebLogUser.displayName u })
Custom.list
$"{selectWithCriteria Table.WebLogUser} {idSql}"
(webLogContains webLogId :: idParams)
fromData<WebLogUser>
return users |> List.map (fun u -> { Name = string u.Id; Value = u.DisplayName })
}
/// Restore users from a backup
let restore users = backgroundTask {
let restore (users: WebLogUser list) = backgroundTask {
log.LogTrace "WebLogUser.restore"
let! _ =
Sql.existingConnection conn
|> Sql.executeTransactionAsync [
userInsert, users |> List.map userParams
]
Configuration.dataSource ()
|> Sql.fromDataSource
|> Sql.executeTransactionAsync
[ Query.insert Table.WebLogUser, users |> List.map (fun user -> [ jsonParam "@data" user ]) ]
()
}
/// Set a user's last seen date/time to now
let setLastSeen userId webLogId = backgroundTask {
let! _ =
Sql.existingConnection conn
|> Sql.query "UPDATE web_log_user SET last_seen_on = @lastSeenOn WHERE id = @id AND web_log_id = @webLogId"
|> Sql.parameters
[ webLogIdParam webLogId
typedParam "lastSeenOn" (Noda.now ())
"@id", Sql.string (WebLogUserId.toString userId) ]
|> Sql.executeNonQueryAsync
()
let setLastSeen (userId: WebLogUserId) webLogId = backgroundTask {
log.LogTrace "WebLogUser.setLastSeen"
match! Document.existsByWebLog Table.WebLogUser userId webLogId with
| true -> do! Patch.byId Table.WebLogUser userId {| LastSeenOn = Some (Noda.now ()) |}
| false -> ()
}
/// Save a user
let save user = backgroundTask {
let! _ =
Sql.existingConnection conn
|> Sql.query $"
{userInsert} ON CONFLICT (id) DO UPDATE
SET email = @email,
first_name = @firstName,
last_name = @lastName,
preferred_name = @preferredName,
password_hash = @passwordHash,
url = @url,
access_level = @accessLevel,
created_on = @createdOn,
last_seen_on = @lastSeenOn"
|> Sql.parameters (userParams user)
|> Sql.executeNonQueryAsync
()
}
/// Update a user
let update (user: WebLogUser) =
log.LogTrace "WebLogUser.update"
Update.byId Table.WebLogUser user.Id user
interface IWebLogUserData with
member _.Add user = save user
member _.Add user = add user
member _.Delete userId webLogId = delete userId webLogId
member _.FindByEmail email webLogId = findByEmail email webLogId
member _.FindById userId webLogId = findById userId webLogId
@ -145,5 +99,4 @@ type PostgresWebLogUserData (conn : NpgsqlConnection) =
member _.FindNames webLogId userIds = findNames webLogId userIds
member _.Restore users = restore users
member _.SetLastSeen userId webLogId = setLastSeen userId webLogId
member _.Update user = save user
member _.Update user = update user

View File

@ -1,208 +1,125 @@
namespace MyWebLog.Data
open BitBadger.Documents
open BitBadger.Documents.Postgres
open Microsoft.Extensions.Logging
open MyWebLog
open MyWebLog.Data.Postgres
open Newtonsoft.Json
open Npgsql
open Npgsql.FSharp
/// Data implementation for PostgreSQL
type PostgresData (conn : NpgsqlConnection, log : ILogger<PostgresData>, ser : JsonSerializer) =
type PostgresData(log: ILogger<PostgresData>, ser: JsonSerializer) =
/// Create any needed tables
let ensureTables () = backgroundTask {
let _ = NpgsqlConnection.GlobalTypeMapper.UseNodaTime ()
// Set up the PostgreSQL document store
Configuration.useSerializer (Utils.createDocumentSerializer ser)
let! tables =
Sql.existingConnection conn
|> Sql.query "SELECT tablename FROM pg_tables WHERE schemaname = 'public'"
|> Sql.executeAsync (fun row -> row.string "tablename")
Custom.list
"SELECT tablename FROM pg_tables WHERE schemaname = 'public'" [] (fun row -> row.string "tablename")
let needsTable table = not (List.contains table tables)
let mutable isNew = false
let sql = seq {
// Theme tables
if needsTable "theme" then
isNew <- true
"CREATE TABLE theme (
id TEXT NOT NULL PRIMARY KEY,
name TEXT NOT NULL,
version TEXT NOT NULL)"
if needsTable "theme_template" then
"CREATE TABLE theme_template (
theme_id TEXT NOT NULL REFERENCES theme (id),
name TEXT NOT NULL,
template TEXT NOT NULL,
PRIMARY KEY (theme_id, name))"
if needsTable "theme_asset" then
"CREATE TABLE theme_asset (
theme_id TEXT NOT NULL REFERENCES theme (id),
if needsTable Table.Theme then
Query.Definition.ensureTable Table.Theme
Query.Definition.ensureKey Table.Theme
if needsTable Table.ThemeAsset then
$"CREATE TABLE {Table.ThemeAsset} (
theme_id TEXT NOT NULL,
path TEXT NOT NULL,
updated_on TIMESTAMPTZ NOT NULL,
data BYTEA NOT NULL,
PRIMARY KEY (theme_id, path))"
// Web log tables
if needsTable "web_log" then
"CREATE TABLE web_log (
id TEXT NOT NULL PRIMARY KEY,
name TEXT NOT NULL,
slug TEXT NOT NULL,
subtitle TEXT,
default_page TEXT NOT NULL,
posts_per_page INTEGER NOT NULL,
theme_id TEXT NOT NULL REFERENCES theme (id),
url_base TEXT NOT NULL,
time_zone TEXT NOT NULL,
auto_htmx BOOLEAN NOT NULL DEFAULT FALSE,
uploads TEXT NOT NULL,
is_feed_enabled BOOLEAN NOT NULL DEFAULT FALSE,
feed_name TEXT NOT NULL,
items_in_feed INTEGER,
is_category_enabled BOOLEAN NOT NULL DEFAULT FALSE,
is_tag_enabled BOOLEAN NOT NULL DEFAULT FALSE,
copyright TEXT)"
"CREATE INDEX web_log_theme_idx ON web_log (theme_id)"
if needsTable "web_log_feed" then
"CREATE TABLE web_log_feed (
id TEXT NOT NULL PRIMARY KEY,
web_log_id TEXT NOT NULL REFERENCES web_log (id),
source TEXT NOT NULL,
path TEXT NOT NULL,
podcast JSONB)"
"CREATE INDEX web_log_feed_web_log_idx ON web_log_feed (web_log_id)"
// Web log table
if needsTable Table.WebLog then
Query.Definition.ensureTable Table.WebLog
Query.Definition.ensureKey Table.WebLog
Query.Definition.ensureDocumentIndex Table.WebLog Optimized
// Category table
if needsTable "category" then
"CREATE TABLE category (
id TEXT NOT NULL PRIMARY KEY,
web_log_id TEXT NOT NULL REFERENCES web_log (id),
name TEXT NOT NULL,
slug TEXT NOT NULL,
description TEXT,
parent_id TEXT)"
"CREATE INDEX category_web_log_idx ON category (web_log_id)"
if needsTable Table.Category then
Query.Definition.ensureTable Table.Category
Query.Definition.ensureKey Table.Category
Query.Definition.ensureDocumentIndex Table.Category Optimized
// Web log user table
if needsTable "web_log_user" then
"CREATE TABLE web_log_user (
id TEXT NOT NULL PRIMARY KEY,
web_log_id TEXT NOT NULL REFERENCES web_log (id),
email TEXT NOT NULL,
first_name TEXT NOT NULL,
last_name TEXT NOT NULL,
preferred_name TEXT NOT NULL,
password_hash TEXT NOT NULL,
url TEXT,
access_level TEXT NOT NULL,
created_on TIMESTAMPTZ NOT NULL,
last_seen_on TIMESTAMPTZ)"
"CREATE INDEX web_log_user_web_log_idx ON web_log_user (web_log_id)"
"CREATE INDEX web_log_user_email_idx ON web_log_user (web_log_id, email)"
if needsTable Table.WebLogUser then
Query.Definition.ensureTable Table.WebLogUser
Query.Definition.ensureKey Table.WebLogUser
Query.Definition.ensureDocumentIndex Table.WebLogUser Optimized
// Page tables
if needsTable "page" then
"CREATE TABLE page (
id TEXT NOT NULL PRIMARY KEY,
web_log_id TEXT NOT NULL REFERENCES web_log (id),
author_id TEXT NOT NULL REFERENCES web_log_user (id),
title TEXT NOT NULL,
permalink TEXT NOT NULL,
prior_permalinks TEXT[] NOT NULL DEFAULT '{}',
published_on TIMESTAMPTZ NOT NULL,
updated_on TIMESTAMPTZ NOT NULL,
is_in_page_list BOOLEAN NOT NULL DEFAULT FALSE,
template TEXT,
page_text TEXT NOT NULL,
meta_items JSONB)"
"CREATE INDEX page_web_log_idx ON page (web_log_id)"
"CREATE INDEX page_author_idx ON page (author_id)"
"CREATE INDEX page_permalink_idx ON page (web_log_id, permalink)"
if needsTable "page_revision" then
"CREATE TABLE page_revision (
page_id TEXT NOT NULL REFERENCES page (id),
if needsTable Table.Page then
Query.Definition.ensureTable Table.Page
Query.Definition.ensureKey Table.Page
Query.Definition.ensureIndexOn Table.Page "author" [ nameof Page.Empty.AuthorId ]
Query.Definition.ensureIndexOn
Table.Page "permalink" [ nameof Page.Empty.WebLogId; nameof Page.Empty.Permalink ]
if needsTable Table.PageRevision then
$"CREATE TABLE {Table.PageRevision} (
page_id TEXT NOT NULL,
as_of TIMESTAMPTZ NOT NULL,
revision_text TEXT NOT NULL,
PRIMARY KEY (page_id, as_of))"
// Post tables
if needsTable "post" then
"CREATE TABLE post (
id TEXT NOT NULL PRIMARY KEY,
web_log_id TEXT NOT NULL REFERENCES web_log (id),
author_id TEXT NOT NULL REFERENCES web_log_user (id),
status TEXT NOT NULL,
title TEXT NOT NULL,
permalink TEXT NOT NULL,
prior_permalinks TEXT[] NOT NULL DEFAULT '{}',
published_on TIMESTAMPTZ,
updated_on TIMESTAMPTZ NOT NULL,
template TEXT,
post_text TEXT NOT NULL,
tags TEXT[],
meta_items JSONB,
episode JSONB)"
"CREATE INDEX post_web_log_idx ON post (web_log_id)"
"CREATE INDEX post_author_idx ON post (author_id)"
"CREATE INDEX post_status_idx ON post (web_log_id, status, updated_on)"
"CREATE INDEX post_permalink_idx ON post (web_log_id, permalink)"
if needsTable "post_category" then
"CREATE TABLE post_category (
post_id TEXT NOT NULL REFERENCES post (id),
category_id TEXT NOT NULL REFERENCES category (id),
PRIMARY KEY (post_id, category_id))"
"CREATE INDEX post_category_category_idx ON post_category (category_id)"
if needsTable "post_revision" then
"CREATE TABLE post_revision (
post_id TEXT NOT NULL REFERENCES post (id),
if needsTable Table.Post then
Query.Definition.ensureTable Table.Post
Query.Definition.ensureKey Table.Post
Query.Definition.ensureIndexOn Table.Post "author" [ nameof Post.Empty.AuthorId ]
Query.Definition.ensureIndexOn
Table.Post "permalink" [ nameof Post.Empty.WebLogId; nameof Post.Empty.Permalink ]
Query.Definition.ensureIndexOn
Table.Post
"status"
[ nameof Post.Empty.WebLogId; nameof Post.Empty.Status; nameof Post.Empty.UpdatedOn ]
$"CREATE INDEX idx_post_category ON {Table.Post} USING GIN ((data['{nameof Post.Empty.CategoryIds}']))"
$"CREATE INDEX idx_post_tag ON {Table.Post} USING GIN ((data['{nameof Post.Empty.Tags}']))"
if needsTable Table.PostRevision then
$"CREATE TABLE {Table.PostRevision} (
post_id TEXT NOT NULL,
as_of TIMESTAMPTZ NOT NULL,
revision_text TEXT NOT NULL,
PRIMARY KEY (post_id, as_of))"
if needsTable "post_comment" then
"CREATE TABLE post_comment (
id TEXT NOT NULL PRIMARY KEY,
post_id TEXT NOT NULL REFERENCES post(id),
in_reply_to_id TEXT,
name TEXT NOT NULL,
email TEXT NOT NULL,
url TEXT,
status TEXT NOT NULL,
posted_on TIMESTAMPTZ NOT NULL,
comment_text TEXT NOT NULL)"
"CREATE INDEX post_comment_post_idx ON post_comment (post_id)"
if needsTable Table.PostComment then
Query.Definition.ensureTable Table.PostComment
Query.Definition.ensureKey Table.PostComment
Query.Definition.ensureIndexOn Table.PostComment "post" [ nameof Comment.Empty.PostId ]
// Tag map table
if needsTable "tag_map" then
"CREATE TABLE tag_map (
id TEXT NOT NULL PRIMARY KEY,
web_log_id TEXT NOT NULL REFERENCES web_log (id),
tag TEXT NOT NULL,
url_value TEXT NOT NULL)"
"CREATE INDEX tag_map_web_log_idx ON tag_map (web_log_id)"
if needsTable Table.TagMap then
Query.Definition.ensureTable Table.TagMap
Query.Definition.ensureKey Table.TagMap
Query.Definition.ensureDocumentIndex Table.TagMap Optimized
// Uploaded file table
if needsTable "upload" then
"CREATE TABLE upload (
if needsTable Table.Upload then
$"CREATE TABLE {Table.Upload} (
id TEXT NOT NULL PRIMARY KEY,
web_log_id TEXT NOT NULL REFERENCES web_log (id),
web_log_id TEXT NOT NULL,
path TEXT NOT NULL,
updated_on TIMESTAMPTZ NOT NULL,
data BYTEA NOT NULL)"
"CREATE INDEX upload_web_log_idx ON upload (web_log_id)"
"CREATE INDEX upload_path_idx ON upload (web_log_id, path)"
$"CREATE INDEX idx_upload_web_log ON {Table.Upload} (web_log_id)"
$"CREATE INDEX idx_upload_path ON {Table.Upload} (web_log_id, path)"
// Database version table
if needsTable "db_version" then
"CREATE TABLE db_version (id TEXT NOT NULL PRIMARY KEY)"
$"INSERT INTO db_version VALUES ('{Utils.currentDbVersion}')"
if needsTable Table.DbVersion then
$"CREATE TABLE {Table.DbVersion} (id TEXT NOT NULL PRIMARY KEY)"
$"INSERT INTO {Table.DbVersion} VALUES ('{Utils.Migration.currentDbVersion}')"
}
Sql.existingConnection conn
Configuration.dataSource ()
|> Sql.fromDataSource
|> Sql.executeTransactionAsync
(sql
|> Seq.map (fun s ->
let parts = s.Split ' '
if parts[1].ToLowerInvariant () = "table" then
let parts = s.Replace(" IF NOT EXISTS", "", System.StringComparison.OrdinalIgnoreCase).Split ' '
if parts[1].ToLowerInvariant() = "table" then
log.LogInformation $"Creating {parts[2]} table..."
s, [ [] ])
|> List.ofSeq)
@ -213,48 +130,140 @@ type PostgresData (conn : NpgsqlConnection, log : ILogger<PostgresData>, ser : J
/// Set a specific database version
let setDbVersion version = backgroundTask {
let! _ =
Sql.existingConnection conn
|> Sql.query $"DELETE FROM db_version; INSERT INTO db_version VALUES ('%s{version}')"
|> Sql.executeNonQueryAsync
()
do! Custom.nonQuery $"DELETE FROM db_version; INSERT INTO db_version VALUES ('%s{version}')" []
return version
}
/// Migrate from v2-rc2 to v2 (manual migration required)
let migrateV2Rc2ToV2 () = backgroundTask {
let! webLogs =
Custom.list
$"SELECT url_base, slug FROM {Table.WebLog}" [] (fun row -> row.string "url_base", row.string "slug")
Utils.Migration.backupAndRestoreRequired log "v2-rc2" "v2" webLogs
}
/// Migrate from v2 to v2.1.1
let migrateV2ToV2point1point1 () = backgroundTask {
let migration = "v2 to v2.1.1"
Utils.Migration.logStep log migration "Adding empty redirect rule set to all weblogs"
do! Custom.nonQuery $"""UPDATE {Table.WebLog} SET data = data || '{{ "RedirectRules": [] }}'::jsonb""" []
let tables =
[ Table.Category; Table.Page; Table.Post; Table.PostComment; Table.TagMap; Table.Theme; Table.WebLog
Table.WebLogUser ]
Utils.Migration.logStep log migration "Adding unique indexes on ID fields"
do! Custom.nonQuery (tables |> List.map Query.Definition.ensureKey |> String.concat "; ") []
Utils.Migration.logStep log migration "Removing constraints"
let fkToDrop =
[ "page_revision", "page_revision_page_id_fkey"
"post_revision", "post_revision_post_id_fkey"
"theme_asset", "theme_asset_theme_id_fkey"
"upload", "upload_web_log_id_fkey"
"category", "category_pkey"
"page", "page_pkey"
"post", "post_pkey"
"post_comment", "post_comment_pkey"
"tag_map", "tag_map_pkey"
"theme", "theme_pkey"
"web_log", "web_log_pkey"
"web_log_user", "web_log_user_pkey" ]
do! Custom.nonQuery
(fkToDrop
|> List.map (fun (tbl, fk) -> $"ALTER TABLE {tbl} DROP CONSTRAINT {fk}")
|> String.concat "; ")
[]
Utils.Migration.logStep log migration "Dropping old indexes"
let toDrop =
[ "idx_category"; "page_author_idx"; "page_permalink_idx"; "page_web_log_idx"; "post_author_idx"
"post_category_idx"; "post_permalink_idx"; "post_status_idx"; "post_tag_idx"; "post_web_log_idx"
"post_comment_post_idx"; "idx_tag_map"; "idx_web_log"; "idx_web_log_user" ]
do! Custom.nonQuery (toDrop |> List.map (sprintf "DROP INDEX %s") |> String.concat "; ") []
Utils.Migration.logStep log migration "Dropping old ID columns"
do! Custom.nonQuery (tables |> List.map (sprintf "ALTER TABLE %s DROP COLUMN id") |> String.concat "; ") []
Utils.Migration.logStep log migration "Adding new indexes"
let newIdx =
[ yield! tables |> List.map Query.Definition.ensureKey
Query.Definition.ensureDocumentIndex Table.Category Optimized
Query.Definition.ensureDocumentIndex Table.TagMap Optimized
Query.Definition.ensureDocumentIndex Table.WebLog Optimized
Query.Definition.ensureDocumentIndex Table.WebLogUser Optimized
Query.Definition.ensureIndexOn Table.Page "author" [ nameof Page.Empty.AuthorId ]
Query.Definition.ensureIndexOn
Table.Page "permalink" [ nameof Page.Empty.WebLogId; nameof Page.Empty.Permalink ]
Query.Definition.ensureIndexOn Table.Post "author" [ nameof Post.Empty.AuthorId ]
Query.Definition.ensureIndexOn
Table.Post "permalink" [ nameof Post.Empty.WebLogId; nameof Post.Empty.Permalink ]
Query.Definition.ensureIndexOn
Table.Post
"status"
[ nameof Post.Empty.WebLogId; nameof Post.Empty.Status; nameof Post.Empty.UpdatedOn ]
$"CREATE INDEX idx_post_category ON {Table.Post} USING GIN ((data['{nameof Post.Empty.CategoryIds}']))"
$"CREATE INDEX idx_post_tag ON {Table.Post} USING GIN ((data['{nameof Post.Empty.Tags}']))"
Query.Definition.ensureIndexOn Table.PostComment "post" [ nameof Comment.Empty.PostId ] ]
do! Custom.nonQuery (newIdx |> String.concat "; ") []
Utils.Migration.logStep log migration "Setting database to version 2.1.1"
return! setDbVersion "v2.1.1"
}
/// Migrate from v2.1.1 to v2.2
let migrateV2point1point1ToV2point2 () = backgroundTask {
Utils.Migration.logStep log "v2.1.1 to v2.2" "Setting e-mail to lowercase"
do! Custom.nonQuery
$"""UPDATE {Table.WebLogUser} SET data = data || ('{{"Email":"' || lower(data->>'Email') || '"}}')::jsonb"""
[]
Utils.Migration.logStep log "v2.1.1 to v2.2" "Setting database version to v2.2"
return! setDbVersion "v2.2"
}
/// Do required data migration between versions
let migrate version = backgroundTask {
match version with
| Some "v2-rc2" -> ()
// Future versions will be inserted here
| Some _
| None ->
log.LogWarning $"Unknown database version; assuming {Utils.currentDbVersion}"
do! setDbVersion Utils.currentDbVersion
let mutable v = defaultArg version ""
if v = "v2-rc2" then
let! webLogs =
Custom.list
$"SELECT url_base, slug FROM {Table.WebLog}" []
(fun row -> row.string "url_base", row.string "slug")
Utils.Migration.backupAndRestoreRequired log "v2-rc2" "v2" webLogs
if v = "v2" then
let! ver = migrateV2ToV2point1point1 ()
v <- ver
if v = "v2.1.1" then
let! ver = migrateV2point1point1ToV2point2 ()
v <- ver
if v <> Utils.Migration.currentDbVersion then
log.LogWarning $"Unknown database version; assuming {Utils.Migration.currentDbVersion}"
let! _ = setDbVersion Utils.Migration.currentDbVersion
()
}
interface IData with
member _.Category = PostgresCategoryData conn
member _.Page = PostgresPageData (conn, ser)
member _.Post = PostgresPostData (conn, ser)
member _.TagMap = PostgresTagMapData conn
member _.Theme = PostgresThemeData conn
member _.ThemeAsset = PostgresThemeAssetData conn
member _.Upload = PostgresUploadData conn
member _.WebLog = PostgresWebLogData (conn, ser)
member _.WebLogUser = PostgresWebLogUserData conn
member _.Category = PostgresCategoryData log
member _.Page = PostgresPageData log
member _.Post = PostgresPostData log
member _.TagMap = PostgresTagMapData log
member _.Theme = PostgresThemeData log
member _.ThemeAsset = PostgresThemeAssetData log
member _.Upload = PostgresUploadData log
member _.WebLog = PostgresWebLogData log
member _.WebLogUser = PostgresWebLogUserData log
member _.Serializer = ser
member _.StartUp () = backgroundTask {
log.LogTrace "PostgresData.StartUp"
do! ensureTables ()
let! version =
Sql.existingConnection conn
|> Sql.query "SELECT id FROM db_version"
|> Sql.executeAsync (fun row -> row.string "id")
|> tryHead
match version with
| Some v when v = Utils.currentDbVersion -> ()
| Some _
| None -> do! migrate version
let! version = Custom.single "SELECT id FROM db_version" [] (fun row -> row.string "id")
do! migrate version
}

View File

@ -5,7 +5,6 @@ open MyWebLog
open RethinkDb.Driver
/// Functions to assist with retrieving data
[<AutoOpen>]
module private RethinkHelpers =
/// Table names
@ -70,38 +69,39 @@ module private RethinkHelpers =
let r = RethinkDB.R
/// Verify that the web log ID matches before returning an item
let verifyWebLog<'T> webLogId (prop : 'T -> WebLogId) (f : Net.IConnection -> Task<'T option>) =
let verifyWebLog<'T> webLogId (prop: 'T -> WebLogId) (f: Net.IConnection -> Task<'T option>) =
fun conn -> backgroundTask {
match! f conn with Some it when (prop it) = webLogId -> return Some it | _ -> return None
}
/// Get the first item from a list, or None if the list is empty
let tryFirst<'T> (f : Net.IConnection -> Task<'T list>) =
let tryFirst<'T> (f: Net.IConnection -> Task<'T list>) =
fun conn -> backgroundTask {
let! results = f conn
return results |> List.tryHead
}
/// Cast a strongly-typed list to an object list
let objList<'T> (objects : 'T list) = objects |> List.map (fun it -> it :> obj)
let objList<'T> (objects: 'T list) = objects |> List.map (fun it -> it :> obj)
open System
open Microsoft.Extensions.Logging
open MyWebLog.ViewModels
open RethinkDb.Driver.FSharp
open RethinkHelpers
/// RethinkDB implementation of data functions for myWebLog
type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<RethinkDbData>) =
type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger<RethinkDbData>) =
/// Match theme asset IDs by their prefix (the theme ID)
let matchAssetByThemeId themeId =
let keyPrefix = $"^{ThemeId.toString themeId}/"
fun (row : Ast.ReqlExpr) -> row[nameof ThemeAsset.empty.Id].Match keyPrefix :> obj
let keyPrefix = $"^{themeId}/"
fun (row: Ast.ReqlExpr) -> row[nameof ThemeAsset.Empty.Id].Match keyPrefix :> obj
/// Function to exclude template text from themes
let withoutTemplateText (row : Ast.ReqlExpr) : obj =
{| Templates = row[nameof Theme.empty.Templates].Without [| nameof ThemeTemplate.empty.Text |] |}
let withoutTemplateText (row: Ast.ReqlExpr) : obj =
{| Templates = row[nameof Theme.Empty.Templates].Merge(r.HashMap(nameof ThemeTemplate.Empty.Text, "")) |}
/// Ensure field indexes exist, as well as special indexes for selected tables
let ensureIndexes table fields = backgroundTask {
@ -112,27 +112,27 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
do! rethink { withTable table; indexCreate field; write; withRetryOnce; ignoreResult conn }
// Post and page need index by web log ID and permalink
if [ Table.Page; Table.Post ] |> List.contains table then
let permalinkIdx = nameof Page.empty.Permalink
let permalinkIdx = nameof Page.Empty.Permalink
if not (indexes |> List.contains permalinkIdx) then
log.LogInformation $"Creating index {table}.{permalinkIdx}..."
do! rethink {
withTable table
indexCreate permalinkIdx
(fun row -> r.Array (row[nameof Page.empty.WebLogId], row[permalinkIdx].Downcase ()) :> obj)
(fun row -> r.Array(row[nameof Page.Empty.WebLogId], row[permalinkIdx].Downcase()) :> obj)
write; withRetryOnce; ignoreResult conn
}
// Prior permalinks are searched when a post or page permalink do not match the current URL
let priorIdx = nameof Post.empty.PriorPermalinks
let priorIdx = nameof Post.Empty.PriorPermalinks
if not (indexes |> List.contains priorIdx) then
log.LogInformation $"Creating index {table}.{priorIdx}..."
do! rethink {
withTable table
indexCreate priorIdx (fun row -> row[priorIdx].Downcase () :> obj) [ Multi ]
indexCreate priorIdx [ Multi ]
write; withRetryOnce; ignoreResult conn
}
// Post needs indexes by category and tag (used for counting and retrieving posts)
if Table.Post = table then
for idx in [ nameof Post.empty.CategoryIds; nameof Post.empty.Tags ] do
for idx in [ nameof Post.Empty.CategoryIds; nameof Post.Empty.Tags ] do
if not (List.contains idx indexes) then
log.LogInformation $"Creating index {table}.{idx}..."
do! rethink {
@ -147,7 +147,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
do! rethink {
withTable table
indexCreate Index.WebLogAndTag (fun row ->
[| row[nameof TagMap.empty.WebLogId]; row[nameof TagMap.empty.Tag] |] :> obj)
[| row[nameof TagMap.Empty.WebLogId]; row[nameof TagMap.Empty.Tag] |] :> obj)
write; withRetryOnce; ignoreResult conn
}
if not (indexes |> List.contains Index.WebLogAndUrl) then
@ -155,7 +155,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
do! rethink {
withTable table
indexCreate Index.WebLogAndUrl (fun row ->
[| row[nameof TagMap.empty.WebLogId]; row[nameof TagMap.empty.UrlValue] |] :> obj)
[| row[nameof TagMap.Empty.WebLogId]; row[nameof TagMap.Empty.UrlValue] |] :> obj)
write; withRetryOnce; ignoreResult conn
}
// Uploaded files need an index by web log ID and path, as that is how they are retrieved
@ -165,7 +165,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
do! rethink {
withTable table
indexCreate Index.WebLogAndPath (fun row ->
[| row[nameof Upload.empty.WebLogId]; row[nameof Upload.empty.Path] |] :> obj)
[| row[nameof Upload.Empty.WebLogId]; row[nameof Upload.Empty.Path] |] :> obj)
write; withRetryOnce; ignoreResult conn
}
// Users log on with e-mail
@ -175,14 +175,18 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
do! rethink {
withTable table
indexCreate Index.LogOn (fun row ->
[| row[nameof WebLogUser.empty.WebLogId]; row[nameof WebLogUser.empty.Email] |] :> obj)
[| row[nameof WebLogUser.Empty.WebLogId]; row[nameof WebLogUser.Empty.Email] |] :> obj)
write; withRetryOnce; ignoreResult conn
}
do! rethink { withTable table; indexWait; result; withRetryDefault; ignoreResult conn }
}
/// The batch size for restoration methods
let restoreBatchSize = 100
/// A value to use when files need to be retrieved without their data
let emptyFile = r.Binary(Array.Empty<byte>())
/// Delete assets for the given theme ID
let deleteAssetsByTheme themeId = rethink {
withTable Table.ThemeAsset
@ -192,7 +196,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
}
/// Set a specific database version
let setDbVersion (version : string) = backgroundTask {
let setDbVersion (version: string) = backgroundTask {
do! rethink {
withTable Table.DbVersion
delete
@ -207,23 +211,78 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
/// Migrate from v2-rc1 to v2-rc2
let migrateV2Rc1ToV2Rc2 () = backgroundTask {
let logStep = Utils.logMigrationStep log "v2-rc1 to v2-rc2"
let logStep = Utils.Migration.logStep log "v2-rc1 to v2-rc2"
logStep "**IMPORTANT**"
logStep "See release notes about required backup/restoration for RethinkDB."
logStep "If there is an error immediately below this message, this is why."
logStep "Setting database version to v2-rc2"
do! setDbVersion "v2-rc2"
}
/// Migrate from v2-rc2 to v2
let migrateV2Rc2ToV2 () = backgroundTask {
Utils.Migration.logStep log "v2-rc2 to v2" "Setting database version; no migration required"
do! setDbVersion "v2"
}
/// Migrate from v2 to v2.1
let migrateV2ToV2point1 () = backgroundTask {
Utils.Migration.logStep log "v2 to v2.1" "Adding empty redirect rule set to all weblogs"
do! rethink {
withTable Table.WebLog
update [ nameof WebLog.Empty.RedirectRules, [] :> obj ]
write; withRetryOnce; ignoreResult conn
}
Utils.Migration.logStep log "v2 to v2.1" "Setting database version to v2.1"
do! setDbVersion "v2.1"
}
/// Migrate from v2.1 to v2.1.1
let migrateV2point1ToV2point1point1 () = backgroundTask {
Utils.Migration.logStep log "v2.1 to v2.1.1" "Setting database version; no migration required"
do! setDbVersion "v2.1.1"
}
/// Migrate from v2.1.1 to v2.2
let migrateV2point1point1ToV2point2 () = backgroundTask {
Utils.Migration.logStep log "v2.1.1 to v2.2" "Setting e-mail to lowercase"
do! rethink {
withTable Table.WebLogUser
update (fun row -> {| Email = row[nameof WebLogUser.Empty.Email].Downcase() |})
write; withRetryOnce; ignoreResult conn
}
Utils.Migration.logStep log "v2.1.1 to v2.2" "Setting database version to v2.2"
do! setDbVersion "v2.2"
}
/// Migrate data between versions
let migrate version = backgroundTask {
match version with
| Some v when v = "v2-rc2" -> ()
| Some v when v = "v2-rc1" -> do! migrateV2Rc1ToV2Rc2 ()
| Some _
| None ->
log.LogWarning $"Unknown database version; assuming {Utils.currentDbVersion}"
do! setDbVersion Utils.currentDbVersion
let mutable v = defaultArg version ""
if v = "v2-rc1" then
do! migrateV2Rc1ToV2Rc2 ()
v <- "v2-rc2"
if v = "v2-rc2" then
do! migrateV2Rc2ToV2 ()
v <- "v2"
if v = "v2" then
do! migrateV2ToV2point1 ()
v <- "v2.1"
if v = "v2.1" then
do! migrateV2point1ToV2point1point1 ()
v <- "v2.1.1"
if v = "v2.1.1" then
do! migrateV2point1point1ToV2point2 ()
v <- "v2.2"
if v <> Utils.Migration.currentDbVersion then
log.LogWarning $"Unknown database version; assuming {Utils.Migration.currentDbVersion}"
do! setDbVersion Utils.Migration.currentDbVersion
}
/// The connection for this instance
@ -242,15 +301,15 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
member _.CountAll webLogId = rethink<int> {
withTable Table.Category
getAll [ webLogId ] (nameof Category.empty.WebLogId)
getAll [ webLogId ] (nameof Category.Empty.WebLogId)
count
result; withRetryDefault conn
}
member _.CountTopLevel webLogId = rethink<int> {
withTable Table.Category
getAll [ webLogId ] (nameof Category.empty.WebLogId)
filter (nameof Category.empty.ParentId) None
getAll [ webLogId ] (nameof Category.Empty.WebLogId)
filter (nameof Category.Empty.ParentId) None (Default FilterDefaultHandling.Return)
count
result; withRetryDefault conn
}
@ -258,8 +317,8 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
member _.FindAllForView webLogId = backgroundTask {
let! cats = rethink<Category list> {
withTable Table.Category
getAll [ webLogId ] (nameof Category.empty.WebLogId)
orderByFunc (fun it -> it[nameof Category.empty.Name].Downcase () :> obj)
getAll [ webLogId ] (nameof Category.Empty.WebLogId)
orderByFunc (fun it -> it[nameof Category.Empty.Name].Downcase() :> obj)
result; withRetryDefault conn
}
let ordered = Utils.orderByHierarchy cats None None []
@ -275,8 +334,8 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
|> List.ofSeq
let! count = rethink<int> {
withTable Table.Post
getAll catIds (nameof Post.empty.CategoryIds)
filter (nameof Post.empty.Status) Published
getAll catIds (nameof Post.Empty.CategoryIds)
filter (nameof Post.Empty.Status) Published
distinct
count
result; withRetryDefault conn
@ -291,8 +350,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
PostCount = counts
|> Array.tryFind (fun c -> fst c = cat.Id)
|> Option.map snd
|> Option.defaultValue 0
})
|> Option.defaultValue 0 })
|> Array.ofSeq
}
@ -302,11 +360,11 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
get catId
resultOption; withRetryOptionDefault
}
|> verifyWebLog webLogId (fun c -> c.WebLogId) <| conn
|> verifyWebLog webLogId _.WebLogId <| conn
member _.FindByWebLog webLogId = rethink<Category list> {
withTable Table.Category
getAll [ webLogId ] (nameof Category.empty.WebLogId)
getAll [ webLogId ] (nameof Category.Empty.WebLogId)
result; withRetryDefault conn
}
@ -316,24 +374,26 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
// Reassign any children to the category's parent category
let! children = rethink<int> {
withTable Table.Category
filter (nameof Category.empty.ParentId) catId
filter (nameof Category.Empty.ParentId) catId
count
result; withRetryDefault conn
}
if children > 0 then
do! rethink {
withTable Table.Category
filter (nameof Category.empty.ParentId) catId
update [ nameof Category.empty.ParentId, cat.ParentId :> obj ]
filter (nameof Category.Empty.ParentId) catId
update [ nameof Category.Empty.ParentId, cat.ParentId :> obj ]
write; withRetryDefault; ignoreResult conn
}
// Delete the category off all posts where it is assigned
do! rethink {
withTable Table.Post
getAll [ webLogId ] (nameof Post.empty.WebLogId)
filter (fun row -> row[nameof Post.empty.CategoryIds].Contains catId :> obj)
getAll [ webLogId ] (nameof Post.Empty.WebLogId)
filter (fun row -> row[nameof Post.Empty.CategoryIds].Contains catId :> obj)
update (fun row ->
{| CategoryIds = r.Array(row[nameof Post.empty.CategoryIds]).Remove catId |} :> obj)
{| CategoryIds =
row[nameof Post.Empty.CategoryIds].CoerceTo("array")
.SetDifference(r.Array(catId)) |} :> obj)
write; withRetryDefault; ignoreResult conn
}
// Delete the category itself
@ -379,26 +439,26 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
member _.All webLogId = rethink<Page list> {
withTable Table.Page
getAll [ webLogId ] (nameof Page.empty.WebLogId)
without [ nameof Page.empty.Text
nameof Page.empty.Metadata
nameof Page.empty.Revisions
nameof Page.empty.PriorPermalinks ]
orderByFunc (fun row -> row[nameof Page.empty.Title].Downcase () :> obj)
getAll [ webLogId ] (nameof Page.Empty.WebLogId)
merge (r.HashMap(nameof Page.Empty.Text, "")
.With(nameof Page.Empty.Metadata, [||])
.With(nameof Page.Empty.Revisions, [||])
.With(nameof Page.Empty.PriorPermalinks, [||]))
orderByFunc (fun row -> row[nameof Page.Empty.Title].Downcase() :> obj)
result; withRetryDefault conn
}
member _.CountAll webLogId = rethink<int> {
withTable Table.Page
getAll [ webLogId ] (nameof Page.empty.WebLogId)
getAll [ webLogId ] (nameof Page.Empty.WebLogId)
count
result; withRetryDefault conn
}
member _.CountListed webLogId = rethink<int> {
withTable Table.Page
getAll [ webLogId ] (nameof Page.empty.WebLogId)
filter (nameof Page.empty.IsInPageList) true
getAll [ webLogId ] (nameof Page.Empty.WebLogId)
filter (nameof Page.Empty.IsInPageList) true
count
result; withRetryDefault conn
}
@ -407,7 +467,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
let! result = rethink<Model.Result> {
withTable Table.Page
getAll [ pageId ]
filter (fun row -> row[nameof Page.empty.WebLogId].Eq webLogId :> obj)
filter (fun row -> row[nameof Page.Empty.WebLogId].Eq webLogId :> obj)
delete
write; withRetryDefault conn
}
@ -415,19 +475,22 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
}
member _.FindById pageId webLogId =
rethink<Page> {
rethink<Page list> {
withTable Table.Page
get pageId
without [ nameof Page.empty.PriorPermalinks; nameof Page.empty.Revisions ]
resultOption; withRetryOptionDefault
getAll [ pageId ]
filter (nameof Page.Empty.WebLogId) webLogId
merge (r.HashMap(nameof Page.Empty.PriorPermalinks, [||])
.With(nameof Page.Empty.Revisions, [||]))
result; withRetryDefault
}
|> verifyWebLog webLogId (fun it -> it.WebLogId) <| conn
|> tryFirst <| conn
member _.FindByPermalink permalink webLogId =
rethink<Page list> {
withTable Table.Page
getAll [ [| webLogId :> obj; permalink |] ] (nameof Page.empty.Permalink)
without [ nameof Page.empty.PriorPermalinks; nameof Page.empty.Revisions ]
getAll [ [| webLogId :> obj; permalink |] ] (nameof Page.Empty.Permalink)
merge (r.HashMap(nameof Page.Empty.PriorPermalinks, [||])
.With(nameof Page.Empty.Revisions, [||]))
limit 1
result; withRetryDefault
}
@ -437,14 +500,14 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
let! result =
(rethink<Page list> {
withTable Table.Page
getAll (objList permalinks) (nameof Page.empty.PriorPermalinks)
filter (nameof Page.empty.WebLogId) webLogId
without [ nameof Page.empty.Revisions; nameof Page.empty.Text ]
getAll (objList permalinks) (nameof Page.Empty.PriorPermalinks)
filter (nameof Page.Empty.WebLogId) webLogId
without [ nameof Page.Empty.Revisions; nameof Page.Empty.Text ]
limit 1
result; withRetryDefault
}
|> tryFirst) conn
return result |> Option.map (fun pg -> pg.Permalink)
return result |> Option.map _.Permalink
}
member _.FindFullById pageId webLogId =
@ -453,30 +516,32 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
get pageId
resultOption; withRetryOptionDefault
}
|> verifyWebLog webLogId (fun it -> it.WebLogId) <| conn
|> verifyWebLog webLogId _.WebLogId <| conn
member _.FindFullByWebLog webLogId = rethink<Page> {
withTable Table.Page
getAll [ webLogId ] (nameof Page.empty.WebLogId)
getAll [ webLogId ] (nameof Page.Empty.WebLogId)
resultCursor; withRetryCursorDefault; toList conn
}
member _.FindListed webLogId = rethink<Page list> {
withTable Table.Page
getAll [ webLogId ] (nameof Page.empty.WebLogId)
filter [ nameof Page.empty.IsInPageList, true :> obj ]
without [ nameof Page.empty.Text; nameof Page.empty.PriorPermalinks; nameof Page.empty.Revisions ]
orderBy (nameof Page.empty.Title)
getAll [ webLogId ] (nameof Page.Empty.WebLogId)
filter [ nameof Page.Empty.IsInPageList, true :> obj ]
merge (r.HashMap(nameof Page.Empty.Text, "")
.With(nameof Page.Empty.PriorPermalinks, [||])
.With(nameof Page.Empty.Revisions, [||]))
orderBy (nameof Page.Empty.Title)
result; withRetryDefault conn
}
member _.FindPageOfPages webLogId pageNbr = rethink<Page list> {
withTable Table.Page
getAll [ webLogId ] (nameof Page.empty.WebLogId)
without [ nameof Page.empty.Metadata
nameof Page.empty.PriorPermalinks
nameof Page.empty.Revisions ]
orderByFunc (fun row -> row[nameof Page.empty.Title].Downcase ())
getAll [ webLogId ] (nameof Page.Empty.WebLogId)
merge (r.HashMap(nameof Page.Empty.Metadata, [||])
.With(nameof Page.Empty.PriorPermalinks, [||])
.With(nameof Page.Empty.Revisions, [||]))
orderByFunc (fun row -> row[nameof Page.Empty.Title].Downcase())
skip ((pageNbr - 1) * 25)
limit 25
result; withRetryDefault conn
@ -504,7 +569,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
nameof page.PriorPermalinks, page.PriorPermalinks
nameof page.Metadata, page.Metadata
nameof page.Revisions, page.Revisions
]
]
write; withRetryDefault; ignoreResult conn
}
@ -514,7 +579,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
do! rethink {
withTable Table.Page
get pageId
update [ nameof Page.empty.PriorPermalinks, permalinks :> obj ]
update [ nameof Page.Empty.PriorPermalinks, permalinks :> obj ]
write; withRetryDefault; ignoreResult conn
}
return true
@ -533,8 +598,8 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
member _.CountByStatus status webLogId = rethink<int> {
withTable Table.Post
getAll [ webLogId ] (nameof Post.empty.WebLogId)
filter (nameof Post.empty.Status) status
getAll [ webLogId ] (nameof Post.Empty.WebLogId)
filter (nameof Post.Empty.Status) status
count
result; withRetryDefault conn
}
@ -543,7 +608,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
let! result = rethink<Model.Result> {
withTable Table.Post
getAll [ postId ]
filter (fun row -> row[nameof Post.empty.WebLogId].Eq webLogId :> obj)
filter (fun row -> row[nameof Post.Empty.WebLogId].Eq webLogId :> obj)
delete
write; withRetryDefault conn
}
@ -551,19 +616,22 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
}
member _.FindById postId webLogId =
rethink<Post> {
rethink<Post list> {
withTable Table.Post
get postId
without [ nameof Post.empty.PriorPermalinks; nameof Post.empty.Revisions ]
resultOption; withRetryOptionDefault
getAll [ postId ]
filter (nameof Post.Empty.WebLogId) webLogId
merge (r.HashMap(nameof Post.Empty.PriorPermalinks, [||])
.With(nameof Post.Empty.Revisions, [||]))
result; withRetryDefault
}
|> verifyWebLog webLogId (fun p -> p.WebLogId) <| conn
|> tryFirst <| conn
member _.FindByPermalink permalink webLogId =
rethink<Post list> {
withTable Table.Post
getAll [ [| webLogId :> obj; permalink |] ] (nameof Post.empty.Permalink)
without [ nameof Post.empty.PriorPermalinks; nameof Post.empty.Revisions ]
getAll [ [| webLogId :> obj; permalink |] ] (nameof Post.Empty.Permalink)
merge (r.HashMap(nameof Post.Empty.PriorPermalinks, [||])
.With(nameof Post.Empty.Revisions, [||]))
limit 1
result; withRetryDefault
}
@ -575,36 +643,37 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
get postId
resultOption; withRetryOptionDefault
}
|> verifyWebLog webLogId (fun p -> p.WebLogId) <| conn
|> verifyWebLog webLogId _.WebLogId <| conn
member _.FindCurrentPermalink permalinks webLogId = backgroundTask {
let! result =
(rethink<Post list> {
withTable Table.Post
getAll (objList permalinks) (nameof Post.empty.PriorPermalinks)
filter (nameof Post.empty.WebLogId) webLogId
without [ nameof Post.empty.Revisions; nameof Post.empty.Text ]
getAll (objList permalinks) (nameof Post.Empty.PriorPermalinks)
filter (nameof Post.Empty.WebLogId) webLogId
without [ nameof Post.Empty.Revisions; nameof Post.Empty.Text ]
limit 1
result; withRetryDefault
}
|> tryFirst) conn
return result |> Option.map (fun post -> post.Permalink)
return result |> Option.map _.Permalink
}
member _.FindFullByWebLog webLogId = rethink<Post> {
withTable Table.Post
getAll [ webLogId ] (nameof Post.empty.WebLogId)
getAll [ webLogId ] (nameof Post.Empty.WebLogId)
resultCursor; withRetryCursorDefault; toList conn
}
member _.FindPageOfCategorizedPosts webLogId categoryIds pageNbr postsPerPage = rethink<Post list> {
withTable Table.Post
getAll (objList categoryIds) (nameof Post.empty.CategoryIds)
filter [ nameof Post.empty.WebLogId, webLogId :> obj
nameof Post.empty.Status, Published ]
without [ nameof Post.empty.PriorPermalinks; nameof Post.empty.Revisions ]
getAll (objList categoryIds) (nameof Post.Empty.CategoryIds)
filter [ nameof Post.Empty.WebLogId, webLogId :> obj
nameof Post.Empty.Status, Published ]
merge (r.HashMap(nameof Post.Empty.PriorPermalinks, [||])
.With(nameof Post.Empty.Revisions, [||]))
distinct
orderByDescending (nameof Post.empty.PublishedOn)
orderByDescending (nameof Post.Empty.PublishedOn)
skip ((pageNbr - 1) * postsPerPage)
limit (postsPerPage + 1)
result; withRetryDefault conn
@ -612,10 +681,12 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
member _.FindPageOfPosts webLogId pageNbr postsPerPage = rethink<Post list> {
withTable Table.Post
getAll [ webLogId ] (nameof Post.empty.WebLogId)
without [ nameof Post.empty.PriorPermalinks; nameof Post.empty.Revisions ]
getAll [ webLogId ] (nameof Post.Empty.WebLogId)
merge (r.HashMap(nameof Post.Empty.Text, "")
.With(nameof Post.Empty.PriorPermalinks, [||])
.With(nameof Post.Empty.Revisions, [||]))
orderByFuncDescending (fun row ->
row[nameof Post.empty.PublishedOn].Default_ (nameof Post.empty.UpdatedOn) :> obj)
row[nameof Post.Empty.PublishedOn].Default_(nameof Post.Empty.UpdatedOn) :> obj)
skip ((pageNbr - 1) * postsPerPage)
limit (postsPerPage + 1)
result; withRetryDefault conn
@ -623,10 +694,11 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
member _.FindPageOfPublishedPosts webLogId pageNbr postsPerPage = rethink<Post list> {
withTable Table.Post
getAll [ webLogId ] (nameof Post.empty.WebLogId)
filter (nameof Post.empty.Status) Published
without [ nameof Post.empty.PriorPermalinks; nameof Post.empty.Revisions ]
orderByDescending (nameof Post.empty.PublishedOn)
getAll [ webLogId ] (nameof Post.Empty.WebLogId)
filter (nameof Post.Empty.Status) Published
merge (r.HashMap(nameof Post.Empty.PriorPermalinks, [||])
.With(nameof Post.Empty.Revisions, [||]))
orderByDescending (nameof Post.Empty.PublishedOn)
skip ((pageNbr - 1) * postsPerPage)
limit (postsPerPage + 1)
result; withRetryDefault conn
@ -634,11 +706,12 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
member _.FindPageOfTaggedPosts webLogId tag pageNbr postsPerPage = rethink<Post list> {
withTable Table.Post
getAll [ tag ] (nameof Post.empty.Tags)
filter [ nameof Post.empty.WebLogId, webLogId :> obj
nameof Post.empty.Status, Published ]
without [ nameof Post.empty.PriorPermalinks; nameof Post.empty.Revisions ]
orderByDescending (nameof Post.empty.PublishedOn)
getAll [ tag ] (nameof Post.Empty.Tags)
filter [ nameof Post.Empty.WebLogId, webLogId :> obj
nameof Post.Empty.Status, Published ]
merge (r.HashMap(nameof Post.Empty.PriorPermalinks, [||])
.With(nameof Post.Empty.Revisions, [||]))
orderByDescending (nameof Post.Empty.PublishedOn)
skip ((pageNbr - 1) * postsPerPage)
limit (postsPerPage + 1)
result; withRetryDefault conn
@ -648,10 +721,11 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
let! older =
rethink<Post list> {
withTable Table.Post
getAll [ webLogId ] (nameof Post.empty.WebLogId)
filter (fun row -> row[nameof Post.empty.PublishedOn].Lt publishedOn :> obj)
without [ nameof Post.empty.PriorPermalinks; nameof Post.empty.Revisions ]
orderByDescending (nameof Post.empty.PublishedOn)
getAll [ webLogId ] (nameof Post.Empty.WebLogId)
filter (fun row -> row[nameof Post.Empty.PublishedOn].Lt publishedOn :> obj)
merge (r.HashMap(nameof Post.Empty.PriorPermalinks, [||])
.With(nameof Post.Empty.Revisions, [||]))
orderByDescending (nameof Post.Empty.PublishedOn)
limit 1
result; withRetryDefault
}
@ -659,10 +733,11 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
let! newer =
rethink<Post list> {
withTable Table.Post
getAll [ webLogId ] (nameof Post.empty.WebLogId)
filter (fun row -> row[nameof Post.empty.PublishedOn].Gt publishedOn :> obj)
without [ nameof Post.empty.PriorPermalinks; nameof Post.empty.Revisions ]
orderBy (nameof Post.empty.PublishedOn)
getAll [ webLogId ] (nameof Post.Empty.WebLogId)
filter (fun row -> row[nameof Post.Empty.PublishedOn].Gt publishedOn :> obj)
merge (r.HashMap(nameof Post.Empty.PriorPermalinks, [||])
.With(nameof Post.Empty.Revisions, [||]))
orderBy (nameof Post.Empty.PublishedOn)
limit 1
result; withRetryDefault
}
@ -679,27 +754,25 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
}
}
member _.Update post = rethink {
withTable Table.Post
get post.Id
replace post
write; withRetryDefault; ignoreResult conn
member this.Update post = backgroundTask {
match! this.FindById post.Id post.WebLogId with
| Some _ ->
do! rethink {
withTable Table.Post
get post.Id
replace post
write; withRetryDefault; ignoreResult conn
}
| None -> ()
}
member _.UpdatePriorPermalinks postId webLogId permalinks = backgroundTask {
match! (
rethink<Post> {
withTable Table.Post
get postId
without [ nameof Post.empty.Revisions; nameof Post.empty.PriorPermalinks ]
resultOption; withRetryOptionDefault
}
|> verifyWebLog webLogId (fun p -> p.WebLogId)) conn with
member this.UpdatePriorPermalinks postId webLogId permalinks = backgroundTask {
match! this.FindById postId webLogId with
| Some _ ->
do! rethink {
withTable Table.Post
get postId
update [ nameof Post.empty.PriorPermalinks, permalinks :> obj ]
update [ nameof Post.Empty.PriorPermalinks, permalinks :> obj ]
write; withRetryDefault; ignoreResult conn
}
return true
@ -714,7 +787,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
let! result = rethink<Model.Result> {
withTable Table.TagMap
getAll [ tagMapId ]
filter (fun row -> row[nameof TagMap.empty.WebLogId].Eq webLogId :> obj)
filter (fun row -> row[nameof TagMap.Empty.WebLogId].Eq webLogId :> obj)
delete
write; withRetryDefault conn
}
@ -727,7 +800,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
get tagMapId
resultOption; withRetryOptionDefault
}
|> verifyWebLog webLogId (fun tm -> tm.WebLogId) <| conn
|> verifyWebLog webLogId _.WebLogId <| conn
member _.FindByUrlValue urlValue webLogId =
rethink<TagMap list> {
@ -740,9 +813,9 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
member _.FindByWebLog webLogId = rethink<TagMap list> {
withTable Table.TagMap
between [| webLogId :> obj; r.Minval () |] [| webLogId :> obj; r.Maxval () |]
between [| webLogId :> obj; r.Minval() |] [| webLogId :> obj; r.Maxval() |]
[ Index Index.WebLogAndTag ]
orderBy (nameof TagMap.empty.Tag)
orderBy (nameof TagMap.Empty.Tag)
result; withRetryDefault conn
}
@ -774,16 +847,16 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
member _.All () = rethink<Theme list> {
withTable Table.Theme
filter (fun row -> row[nameof Theme.empty.Id].Ne "admin" :> obj)
filter (fun row -> row[nameof Theme.Empty.Id].Ne "admin" :> obj)
merge withoutTemplateText
orderBy (nameof Theme.empty.Id)
orderBy (nameof Theme.Empty.Id)
result; withRetryDefault conn
}
member _.Exists themeId = backgroundTask {
let! count = rethink<int> {
withTable Table.Theme
filter (nameof Theme.empty.Id) themeId
filter (nameof Theme.Empty.Id) themeId
count
result; withRetryDefault conn
}
@ -796,12 +869,14 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
resultOption; withRetryOptionDefault conn
}
member _.FindByIdWithoutText themeId = rethink<Theme> {
withTable Table.Theme
get themeId
merge withoutTemplateText
resultOption; withRetryOptionDefault conn
}
member _.FindByIdWithoutText themeId =
rethink<Theme list> {
withTable Table.Theme
getAll [ themeId ]
merge withoutTemplateText
result; withRetryDefault
}
|> tryFirst <| conn
member this.Delete themeId = backgroundTask {
match! this.FindByIdWithoutText themeId with
@ -830,7 +905,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
member _.All () = rethink<ThemeAsset list> {
withTable Table.ThemeAsset
without [ nameof ThemeAsset.empty.Data ]
merge (r.HashMap(nameof ThemeAsset.Empty.Data, emptyFile))
result; withRetryDefault conn
}
@ -845,7 +920,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
member _.FindByTheme themeId = rethink<ThemeAsset list> {
withTable Table.ThemeAsset
filter (matchAssetByThemeId themeId)
without [ nameof ThemeAsset.empty.Data ]
merge (r.HashMap(nameof ThemeAsset.Empty.Data, emptyFile))
result; withRetryDefault conn
}
@ -879,7 +954,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
get uploadId
resultOption; withRetryOptionDefault
}
|> verifyWebLog<Upload> webLogId (fun u -> u.WebLogId) <| conn
|> verifyWebLog<Upload> webLogId _.WebLogId <| conn
match upload with
| Some up ->
do! rethink {
@ -888,8 +963,8 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
delete
write; withRetryDefault; ignoreResult conn
}
return Ok (Permalink.toString up.Path)
| None -> return Result.Error $"Upload ID {UploadId.toString uploadId} not found"
return Ok (string up.Path)
| None -> return Result.Error $"Upload ID {uploadId} not found"
}
member _.FindByPath path webLogId =
@ -902,15 +977,15 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
member _.FindByWebLog webLogId = rethink<Upload> {
withTable Table.Upload
between [| webLogId :> obj; r.Minval () |] [| webLogId :> obj; r.Maxval () |]
between [| webLogId :> obj; r.Minval() |] [| webLogId :> obj; r.Maxval() |]
[ Index Index.WebLogAndPath ]
without [ nameof Upload.empty.Data ]
merge (r.HashMap(nameof Upload.Empty.Data, emptyFile))
resultCursor; withRetryCursorDefault; toList conn
}
member _.FindByWebLogWithData webLogId = rethink<Upload> {
withTable Table.Upload
between [| webLogId :> obj; r.Minval () |] [| webLogId :> obj; r.Maxval () |]
between [| webLogId :> obj; r.Minval() |] [| webLogId :> obj; r.Maxval() |]
[ Index Index.WebLogAndPath ]
resultCursor; withRetryCursorDefault; toList conn
}
@ -919,7 +994,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
// Files can be large; we'll do 5 at a time
for batch in uploads |> List.chunkBySize 5 do
do! rethink {
withTable Table.TagMap
withTable Table.Upload
insert batch
write; withRetryOnce; ignoreResult conn
}
@ -942,24 +1017,24 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
member _.Delete webLogId = backgroundTask {
// Comments should be deleted by post IDs
let! thePostIds = rethink<{| Id : string |} list> {
let! thePostIds = rethink<{| Id: string |} list> {
withTable Table.Post
getAll [ webLogId ] (nameof Post.empty.WebLogId)
pluck [ nameof Post.empty.Id ]
getAll [ webLogId ] (nameof Post.Empty.WebLogId)
pluck [ nameof Post.Empty.Id ]
result; withRetryOnce conn
}
if not (List.isEmpty thePostIds) then
let postIds = thePostIds |> List.map (fun it -> it.Id :> obj)
do! rethink {
withTable Table.Comment
getAll postIds (nameof Comment.empty.PostId)
getAll postIds (nameof Comment.Empty.PostId)
delete
write; withRetryOnce; ignoreResult conn
}
// Tag mappings do not have a straightforward webLogId index
do! rethink {
withTable Table.TagMap
between [| webLogId :> obj; r.Minval () |] [| webLogId :> obj; r.Maxval () |]
between [| webLogId :> obj; r.Minval() |] [| webLogId :> obj; r.Maxval() |]
[ Index Index.WebLogAndTag ]
delete
write; withRetryOnce; ignoreResult conn
@ -967,7 +1042,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
// Uploaded files do not have a straightforward webLogId index
do! rethink {
withTable Table.Upload
between [| webLogId :> obj; r.Minval () |] [| webLogId :> obj; r.Maxval () |]
between [| webLogId :> obj; r.Minval() |] [| webLogId :> obj; r.Maxval() |]
[ Index Index.WebLogAndPath ]
delete
write; withRetryOnce; ignoreResult conn
@ -975,7 +1050,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
for table in [ Table.Post; Table.Category; Table.Page; Table.WebLogUser ] do
do! rethink {
withTable table
getAll [ webLogId ] (nameof Post.empty.WebLogId)
getAll [ webLogId ] (nameof Post.Empty.WebLogId)
delete
write; withRetryOnce; ignoreResult conn
}
@ -990,7 +1065,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
member _.FindByHost url =
rethink<WebLog list> {
withTable Table.WebLog
getAll [ url ] (nameof WebLog.empty.UrlBase)
getAll [ url ] (nameof WebLog.Empty.UrlBase)
limit 1
result; withRetryDefault
}
@ -1002,10 +1077,17 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
resultOption; withRetryOptionDefault conn
}
member _.UpdateRedirectRules webLog = rethink {
withTable Table.WebLog
get webLog.Id
update [ nameof WebLog.Empty.RedirectRules, webLog.RedirectRules :> obj ]
write; withRetryDefault; ignoreResult conn
}
member _.UpdateRssOptions webLog = rethink {
withTable Table.WebLog
get webLog.Id
update [ nameof WebLog.empty.Rss, webLog.Rss :> obj ]
update [ nameof WebLog.Empty.Rss, webLog.Rss :> obj ]
write; withRetryDefault; ignoreResult conn
}
@ -1042,22 +1124,22 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
get userId
resultOption; withRetryOptionDefault
}
|> verifyWebLog webLogId (fun u -> u.WebLogId) <| conn
|> verifyWebLog webLogId _.WebLogId <| conn
member this.Delete userId webLogId = backgroundTask {
match! this.FindById userId webLogId with
| Some _ ->
let! pageCount = rethink<int> {
withTable Table.Page
getAll [ webLogId ] (nameof Page.empty.WebLogId)
filter (nameof Page.empty.AuthorId) userId
getAll [ webLogId ] (nameof Page.Empty.WebLogId)
filter (nameof Page.Empty.AuthorId) userId
count
result; withRetryDefault conn
}
let! postCount = rethink<int> {
withTable Table.Post
getAll [ webLogId ] (nameof Post.empty.WebLogId)
filter (nameof Post.empty.AuthorId) userId
getAll [ webLogId ] (nameof Post.Empty.WebLogId)
filter (nameof Post.Empty.AuthorId) userId
count
result; withRetryDefault conn
}
@ -1085,8 +1167,8 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
member _.FindByWebLog webLogId = rethink<WebLogUser list> {
withTable Table.WebLogUser
getAll [ webLogId ] (nameof WebLogUser.empty.WebLogId)
orderByFunc (fun row -> row[nameof WebLogUser.empty.PreferredName].Downcase ())
getAll [ webLogId ] (nameof WebLogUser.Empty.WebLogId)
orderByFunc (fun row -> row[nameof WebLogUser.Empty.PreferredName].Downcase())
result; withRetryDefault conn
}
@ -1094,12 +1176,10 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
let! users = rethink<WebLogUser list> {
withTable Table.WebLogUser
getAll (objList userIds)
filter (nameof WebLogUser.empty.WebLogId) webLogId
filter (nameof WebLogUser.Empty.WebLogId) webLogId
result; withRetryDefault conn
}
return
users
|> List.map (fun u -> { Name = WebLogUserId.toString u.Id; Value = WebLogUser.displayName u })
return users |> List.map (fun u -> { Name = string u.Id; Value = u.DisplayName })
}
member _.Restore users = backgroundTask {
@ -1117,7 +1197,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
do! rethink {
withTable Table.WebLogUser
get userId
update [ nameof WebLogUser.empty.LastSeenOn, Noda.now () :> obj ]
update [ nameof WebLogUser.Empty.LastSeenOn, Noda.now () :> obj ]
write; withRetryOnce; ignoreResult conn
}
| None -> ()
@ -1162,21 +1242,19 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger<R
write; withRetryOnce; ignoreResult conn
}
do! ensureIndexes Table.Category [ nameof Category.empty.WebLogId ]
do! ensureIndexes Table.Comment [ nameof Comment.empty.PostId ]
do! ensureIndexes Table.Page [ nameof Page.empty.WebLogId; nameof Page.empty.AuthorId ]
do! ensureIndexes Table.Post [ nameof Post.empty.WebLogId; nameof Post.empty.AuthorId ]
do! ensureIndexes Table.Category [ nameof Category.Empty.WebLogId ]
do! ensureIndexes Table.Comment [ nameof Comment.Empty.PostId ]
do! ensureIndexes Table.Page [ nameof Page.Empty.WebLogId; nameof Page.Empty.AuthorId ]
do! ensureIndexes Table.Post [ nameof Post.Empty.WebLogId; nameof Post.Empty.AuthorId ]
do! ensureIndexes Table.TagMap []
do! ensureIndexes Table.Upload []
do! ensureIndexes Table.WebLog [ nameof WebLog.empty.UrlBase ]
do! ensureIndexes Table.WebLogUser [ nameof WebLogUser.empty.WebLogId ]
do! ensureIndexes Table.WebLog [ nameof WebLog.Empty.UrlBase ]
do! ensureIndexes Table.WebLogUser [ nameof WebLogUser.Empty.WebLogId ]
let! version = rethink<{| Id : string |} list> {
let! version = rethink<{| Id: string |} list> {
withTable Table.DbVersion
limit 1
result; withRetryOnce conn
}
match List.tryHead version with
| Some v when v.Id = "v2-rc2" -> ()
| it -> do! migrate (it |> Option.map (fun x -> x.Id))
do! migrate (List.tryHead version |> Option.map _.Id)
}

View File

@ -1,314 +0,0 @@
/// Helper functions for the SQLite data implementation
[<AutoOpen>]
module MyWebLog.Data.SQLite.Helpers
open System
open Microsoft.Data.Sqlite
open MyWebLog
open MyWebLog.Data
open NodaTime.Text
/// Run a command that returns a count
let count (cmd : SqliteCommand) = backgroundTask {
let! it = cmd.ExecuteScalarAsync ()
return int (it :?> int64)
}
/// Create a list of items from the given data reader
let toList<'T> (it : SqliteDataReader -> 'T) (rdr : SqliteDataReader) =
seq { while rdr.Read () do it rdr }
|> List.ofSeq
/// Verify that the web log ID matches before returning an item
let verifyWebLog<'T> webLogId (prop : 'T -> WebLogId) (it : SqliteDataReader -> 'T) (rdr : SqliteDataReader) =
if rdr.Read () then
let item = it rdr
if prop item = webLogId then Some item else None
else None
/// Execute a command that returns no data
let write (cmd : SqliteCommand) = backgroundTask {
let! _ = cmd.ExecuteNonQueryAsync ()
()
}
/// Add a possibly-missing parameter, substituting null for None
let maybe<'T> (it : 'T option) : obj = match it with Some x -> x :> obj | None -> DBNull.Value
/// Create a value for a Duration
let durationParam =
DurationPattern.Roundtrip.Format
/// Create a value for an Instant
let instantParam =
InstantPattern.General.Format
/// Create an optional value for a Duration
let maybeDuration =
Option.map durationParam >> maybe
/// Create an optional value for an Instant
let maybeInstant =
Option.map instantParam >> maybe
/// Create the SQL and parameters for an IN clause
let inClause<'T> colNameAndPrefix paramName (valueFunc: 'T -> string) (items : 'T list) =
if List.isEmpty items then "", []
else
let mutable idx = 0
items
|> List.skip 1
|> List.fold (fun (itemS, itemP) it ->
idx <- idx + 1
$"{itemS}, @%s{paramName}{idx}", (SqliteParameter ($"@%s{paramName}{idx}", valueFunc it) :: itemP))
(Seq.ofList items
|> Seq.map (fun it ->
$"%s{colNameAndPrefix} IN (@%s{paramName}0", [ SqliteParameter ($"@%s{paramName}0", valueFunc it) ])
|> Seq.head)
|> function sql, ps -> $"{sql})", ps
/// Functions to map domain items from a data reader
module Map =
open System.IO
/// Get a boolean value from a data reader
let getBoolean col (rdr : SqliteDataReader) = rdr.GetBoolean (rdr.GetOrdinal col)
/// Get a date/time value from a data reader
let getDateTime col (rdr : SqliteDataReader) = rdr.GetDateTime (rdr.GetOrdinal col)
/// Get a Guid value from a data reader
let getGuid col (rdr : SqliteDataReader) = rdr.GetGuid (rdr.GetOrdinal col)
/// Get an int value from a data reader
let getInt col (rdr : SqliteDataReader) = rdr.GetInt32 (rdr.GetOrdinal col)
/// Get a long (64-bit int) value from a data reader
let getLong col (rdr : SqliteDataReader) = rdr.GetInt64 (rdr.GetOrdinal col)
/// Get a BLOB stream value from a data reader
let getStream col (rdr : SqliteDataReader) = rdr.GetStream (rdr.GetOrdinal col)
/// Get a string value from a data reader
let getString col (rdr : SqliteDataReader) = rdr.GetString (rdr.GetOrdinal col)
/// Parse a Duration from the given value
let parseDuration value =
match DurationPattern.Roundtrip.Parse value with
| it when it.Success -> it.Value
| it -> raise it.Exception
/// Get a Duration value from a data reader
let getDuration col rdr =
getString col rdr |> parseDuration
/// Parse an Instant from the given value
let parseInstant value =
match InstantPattern.General.Parse value with
| it when it.Success -> it.Value
| it -> raise it.Exception
/// Get an Instant value from a data reader
let getInstant col rdr =
getString col rdr |> parseInstant
/// Get a timespan value from a data reader
let getTimeSpan col (rdr : SqliteDataReader) = rdr.GetTimeSpan (rdr.GetOrdinal col)
/// Get a possibly null boolean value from a data reader
let tryBoolean col (rdr : SqliteDataReader) =
if rdr.IsDBNull (rdr.GetOrdinal col) then None else Some (getBoolean col rdr)
/// Get a possibly null date/time value from a data reader
let tryDateTime col (rdr : SqliteDataReader) =
if rdr.IsDBNull (rdr.GetOrdinal col) then None else Some (getDateTime col rdr)
/// Get a possibly null Guid value from a data reader
let tryGuid col (rdr : SqliteDataReader) =
if rdr.IsDBNull (rdr.GetOrdinal col) then None else Some (getGuid col rdr)
/// Get a possibly null int value from a data reader
let tryInt col (rdr : SqliteDataReader) =
if rdr.IsDBNull (rdr.GetOrdinal col) then None else Some (getInt col rdr)
/// Get a possibly null string value from a data reader
let tryString col (rdr : SqliteDataReader) =
if rdr.IsDBNull (rdr.GetOrdinal col) then None else Some (getString col rdr)
/// Get a possibly null Duration value from a data reader
let tryDuration col rdr =
tryString col rdr |> Option.map parseDuration
/// Get a possibly null Instant value from a data reader
let tryInstant col rdr =
tryString col rdr |> Option.map parseInstant
/// Get a possibly null timespan value from a data reader
let tryTimeSpan col (rdr : SqliteDataReader) =
if rdr.IsDBNull (rdr.GetOrdinal col) then None else Some (getTimeSpan col rdr)
/// Map an id field to a category ID
let toCategoryId rdr = getString "id" rdr |> CategoryId
/// Create a category from the current row in the given data reader
let toCategory rdr : Category =
{ Id = toCategoryId rdr
WebLogId = getString "web_log_id" rdr |> WebLogId
Name = getString "name" rdr
Slug = getString "slug" rdr
Description = tryString "description" rdr
ParentId = tryString "parent_id" rdr |> Option.map CategoryId
}
/// Create a custom feed from the current row in the given data reader
let toCustomFeed ser rdr : CustomFeed =
{ Id = getString "id" rdr |> CustomFeedId
Source = getString "source" rdr |> CustomFeedSource.parse
Path = getString "path" rdr |> Permalink
Podcast = tryString "podcast" rdr |> Option.map (Utils.deserialize ser)
}
/// Create a permalink from the current row in the given data reader
let toPermalink rdr = getString "permalink" rdr |> Permalink
/// Create a page from the current row in the given data reader
let toPage ser rdr : Page =
{ Page.empty with
Id = getString "id" rdr |> PageId
WebLogId = getString "web_log_id" rdr |> WebLogId
AuthorId = getString "author_id" rdr |> WebLogUserId
Title = getString "title" rdr
Permalink = toPermalink rdr
PublishedOn = getInstant "published_on" rdr
UpdatedOn = getInstant "updated_on" rdr
IsInPageList = getBoolean "is_in_page_list" rdr
Template = tryString "template" rdr
Text = getString "page_text" rdr
Metadata = tryString "meta_items" rdr
|> Option.map (Utils.deserialize ser)
|> Option.defaultValue []
}
/// Create a post from the current row in the given data reader
let toPost ser rdr : Post =
{ Post.empty with
Id = getString "id" rdr |> PostId
WebLogId = getString "web_log_id" rdr |> WebLogId
AuthorId = getString "author_id" rdr |> WebLogUserId
Status = getString "status" rdr |> PostStatus.parse
Title = getString "title" rdr
Permalink = toPermalink rdr
PublishedOn = tryInstant "published_on" rdr
UpdatedOn = getInstant "updated_on" rdr
Template = tryString "template" rdr
Text = getString "post_text" rdr
Episode = tryString "episode" rdr |> Option.map (Utils.deserialize ser)
Metadata = tryString "meta_items" rdr
|> Option.map (Utils.deserialize ser)
|> Option.defaultValue []
}
/// Create a revision from the current row in the given data reader
let toRevision rdr : Revision =
{ AsOf = getInstant "as_of" rdr
Text = getString "revision_text" rdr |> MarkupText.parse
}
/// Create a tag mapping from the current row in the given data reader
let toTagMap rdr : TagMap =
{ Id = getString "id" rdr |> TagMapId
WebLogId = getString "web_log_id" rdr |> WebLogId
Tag = getString "tag" rdr
UrlValue = getString "url_value" rdr
}
/// Create a theme from the current row in the given data reader (excludes templates)
let toTheme rdr : Theme =
{ Theme.empty with
Id = getString "id" rdr |> ThemeId
Name = getString "name" rdr
Version = getString "version" rdr
}
/// Create a theme asset from the current row in the given data reader
let toThemeAsset includeData rdr : ThemeAsset =
let assetData =
if includeData then
use dataStream = new MemoryStream ()
use blobStream = getStream "data" rdr
blobStream.CopyTo dataStream
dataStream.ToArray ()
else
[||]
{ Id = ThemeAssetId (ThemeId (getString "theme_id" rdr), getString "path" rdr)
UpdatedOn = getInstant "updated_on" rdr
Data = assetData
}
/// Create a theme template from the current row in the given data reader
let toThemeTemplate includeText rdr : ThemeTemplate =
{ Name = getString "name" rdr
Text = if includeText then getString "template" rdr else ""
}
/// Create an uploaded file from the current row in the given data reader
let toUpload includeData rdr : Upload =
let data =
if includeData then
use dataStream = new MemoryStream ()
use blobStream = getStream "data" rdr
blobStream.CopyTo dataStream
dataStream.ToArray ()
else
[||]
{ Id = getString "id" rdr |> UploadId
WebLogId = getString "web_log_id" rdr |> WebLogId
Path = getString "path" rdr |> Permalink
UpdatedOn = getInstant "updated_on" rdr
Data = data
}
/// Create a web log from the current row in the given data reader
let toWebLog rdr : WebLog =
{ Id = getString "id" rdr |> WebLogId
Name = getString "name" rdr
Slug = getString "slug" rdr
Subtitle = tryString "subtitle" rdr
DefaultPage = getString "default_page" rdr
PostsPerPage = getInt "posts_per_page" rdr
ThemeId = getString "theme_id" rdr |> ThemeId
UrlBase = getString "url_base" rdr
TimeZone = getString "time_zone" rdr
AutoHtmx = getBoolean "auto_htmx" rdr
Uploads = getString "uploads" rdr |> UploadDestination.parse
Rss = {
IsFeedEnabled = getBoolean "is_feed_enabled" rdr
FeedName = getString "feed_name" rdr
ItemsInFeed = tryInt "items_in_feed" rdr
IsCategoryEnabled = getBoolean "is_category_enabled" rdr
IsTagEnabled = getBoolean "is_tag_enabled" rdr
Copyright = tryString "copyright" rdr
CustomFeeds = []
}
}
/// Create a web log user from the current row in the given data reader
let toWebLogUser rdr : WebLogUser =
{ Id = getString "id" rdr |> WebLogUserId
WebLogId = getString "web_log_id" rdr |> WebLogId
Email = getString "email" rdr
FirstName = getString "first_name" rdr
LastName = getString "last_name" rdr
PreferredName = getString "preferred_name" rdr
PasswordHash = getString "password_hash" rdr
Url = tryString "url" rdr
AccessLevel = getString "access_level" rdr |> AccessLevel.parse
CreatedOn = getInstant "created_on" rdr
LastSeenOn = tryInstant "last_seen_on" rdr
}
/// Add a web log ID parameter
let addWebLogId (cmd : SqliteCommand) webLogId =
cmd.Parameters.AddWithValue ("@webLogId", WebLogId.toString webLogId) |> ignore

View File

@ -1,69 +1,43 @@
namespace MyWebLog.Data.SQLite
open System.Threading.Tasks
open BitBadger.Documents
open BitBadger.Documents.Sqlite
open Microsoft.Data.Sqlite
open Microsoft.Extensions.Logging
open MyWebLog
open MyWebLog.Data
open Newtonsoft.Json
/// SQLite myWebLog category data implementation
type SQLiteCategoryData (conn : SqliteConnection) =
/// SQLite myWebLog category data implementation
type SQLiteCategoryData(conn: SqliteConnection, ser: JsonSerializer, log: ILogger) =
/// Add parameters for category INSERT or UPDATE statements
let addCategoryParameters (cmd : SqliteCommand) (cat : Category) =
[ cmd.Parameters.AddWithValue ("@id", CategoryId.toString cat.Id)
cmd.Parameters.AddWithValue ("@webLogId", WebLogId.toString cat.WebLogId)
cmd.Parameters.AddWithValue ("@name", cat.Name)
cmd.Parameters.AddWithValue ("@slug", cat.Slug)
cmd.Parameters.AddWithValue ("@description", maybe cat.Description)
cmd.Parameters.AddWithValue ("@parentId", maybe (cat.ParentId |> Option.map CategoryId.toString))
] |> ignore
/// Add a category
let add cat = backgroundTask {
use cmd = conn.CreateCommand ()
cmd.CommandText <-
"INSERT INTO category (
id, web_log_id, name, slug, description, parent_id
) VALUES (
@id, @webLogId, @name, @slug, @description, @parentId
)"
addCategoryParameters cmd cat
let! _ = cmd.ExecuteNonQueryAsync ()
()
}
/// The name of the parent ID field
let parentIdField = nameof Category.Empty.ParentId
/// Count all categories for the given web log
let countAll webLogId = backgroundTask {
use cmd = conn.CreateCommand ()
cmd.CommandText <- "SELECT COUNT(id) FROM category WHERE web_log_id = @webLogId"
addWebLogId cmd webLogId
return! count cmd
}
let countAll webLogId =
log.LogTrace "Category.countAll"
Document.countByWebLog Table.Category webLogId conn
/// Count all top-level categories for the given web log
let countTopLevel webLogId = backgroundTask {
use cmd = conn.CreateCommand ()
cmd.CommandText <-
"SELECT COUNT(id) FROM category WHERE web_log_id = @webLogId AND parent_id IS NULL"
addWebLogId cmd webLogId
return! count cmd
}
let countTopLevel webLogId =
log.LogTrace "Category.countTopLevel"
conn.customScalar
$"{Document.Query.countByWebLog Table.Category} AND data ->> '{parentIdField}' IS NULL"
[ webLogParam webLogId ]
(toCount >> int)
/// Find all categories for the given web log
let findByWebLog webLogId =
log.LogTrace "Category.findByWebLog"
Document.findByWebLog<Category> Table.Category webLogId conn
/// Retrieve all categories for the given web log in a DotLiquid-friendly format
let findAllForView webLogId = backgroundTask {
use cmd = conn.CreateCommand ()
cmd.CommandText <- "SELECT * FROM category WHERE web_log_id = @webLogId"
addWebLogId cmd webLogId
use! rdr = cmd.ExecuteReaderAsync ()
let cats =
seq {
while rdr.Read () do
Map.toCategory rdr
}
|> Seq.sortBy (fun cat -> cat.Name.ToLowerInvariant ())
|> List.ofSeq
do! rdr.CloseAsync ()
let ordered = Utils.orderByHierarchy cats None None []
log.LogTrace "Category.findAllForView"
let! cats = findByWebLog webLogId
let ordered = Utils.orderByHierarchy (cats |> List.sortBy _.Name.ToLowerInvariant()) None None []
let! counts =
ordered
|> Seq.map (fun it -> backgroundTask {
@ -71,104 +45,80 @@ type SQLiteCategoryData (conn : SqliteConnection) =
let catSql, catParams =
ordered
|> Seq.filter (fun cat -> cat.ParentNames |> Array.contains it.Name)
|> Seq.map (fun cat -> cat.Id)
|> Seq.map _.Id
|> Seq.append (Seq.singleton it.Id)
|> List.ofSeq
|> inClause "AND pc.category_id" "catId" id
cmd.Parameters.Clear ()
addWebLogId cmd webLogId
cmd.Parameters.AddRange catParams
cmd.CommandText <- $"
SELECT COUNT(DISTINCT p.id)
FROM post p
INNER JOIN post_category pc ON pc.post_id = p.id
WHERE p.web_log_id = @webLogId
AND p.status = 'Published'
{catSql}"
let! postCount = count cmd
return it.Id, postCount
})
|> inJsonArray Table.Post (nameof Post.Empty.CategoryIds) "catId"
let query = $"""
SELECT COUNT(DISTINCT data ->> '{nameof Post.Empty.Id}')
FROM {Table.Post}
WHERE {Document.Query.whereByWebLog}
AND {Query.whereByField (Field.EQ (nameof Post.Empty.Status) "") $"'{string Published}'"}
AND {catSql}"""
let! postCount = conn.customScalar query (webLogParam webLogId :: catParams) toCount
return it.Id, int postCount
})
|> Task.WhenAll
return
ordered
|> Seq.map (fun cat ->
{ cat with
PostCount = counts
|> Array.tryFind (fun c -> fst c = cat.Id)
|> Option.map snd
|> Option.defaultValue 0
PostCount = defaultArg (counts |> Array.tryFind (fun c -> fst c = cat.Id) |> Option.map snd) 0
})
|> Array.ofSeq
}
/// Find a category by its ID for the given web log
let findById catId webLogId = backgroundTask {
use cmd = conn.CreateCommand ()
cmd.CommandText <- "SELECT * FROM category WHERE id = @id"
cmd.Parameters.AddWithValue ("@id", CategoryId.toString catId) |> ignore
use! rdr = cmd.ExecuteReaderAsync ()
return Helpers.verifyWebLog<Category> webLogId (fun c -> c.WebLogId) Map.toCategory rdr
}
/// Find all categories for the given web log
let findByWebLog webLogId = backgroundTask {
use cmd = conn.CreateCommand ()
cmd.CommandText <- "SELECT * FROM category WHERE web_log_id = @webLogId"
cmd.Parameters.AddWithValue ("@webLogId", WebLogId.toString webLogId) |> ignore
use! rdr = cmd.ExecuteReaderAsync ()
return toList Map.toCategory rdr
}
/// Find a category by its ID for the given web log
let findById catId webLogId =
log.LogTrace "Category.findById"
Document.findByIdAndWebLog<CategoryId, Category> Table.Category catId webLogId conn
/// Delete a category
let delete catId webLogId = backgroundTask {
log.LogTrace "Category.delete"
match! findById catId webLogId with
| Some cat ->
use cmd = conn.CreateCommand ()
// Reassign any children to the category's parent category
cmd.CommandText <- "SELECT COUNT(id) FROM category WHERE parent_id = @parentId"
cmd.Parameters.AddWithValue ("@parentId", CategoryId.toString catId) |> ignore
let! children = count cmd
if children > 0 then
cmd.CommandText <- "UPDATE category SET parent_id = @newParentId WHERE parent_id = @parentId"
cmd.Parameters.AddWithValue ("@newParentId", maybe (cat.ParentId |> Option.map CategoryId.toString))
|> ignore
do! write cmd
let! children = conn.countByField Table.Category (Field.EQ parentIdField (string catId))
if children > 0L then
let parent = Field.EQ parentIdField (string catId)
match cat.ParentId with
| Some _ -> do! conn.patchByField Table.Category parent {| ParentId = cat.ParentId |}
| None -> do! conn.removeFieldsByField Table.Category parent [ parentIdField ]
// Delete the category off all posts where it is assigned, and the category itself
cmd.CommandText <-
"DELETE FROM post_category
WHERE category_id = @id
AND post_id IN (SELECT id FROM post WHERE web_log_id = @webLogId);
DELETE FROM category WHERE id = @id"
cmd.Parameters.Clear ()
let _ = cmd.Parameters.AddWithValue ("@id", CategoryId.toString catId)
addWebLogId cmd webLogId
do! write cmd
return if children = 0 then CategoryDeleted else ReassignedChildCategories
let catIdField = nameof Post.Empty.CategoryIds
let! posts =
conn.customList
$"SELECT data ->> '{nameof Post.Empty.Id}', data -> '{catIdField}'
FROM {Table.Post}
WHERE {Document.Query.whereByWebLog}
AND EXISTS
(SELECT 1
FROM json_each({Table.Post}.data -> '{catIdField}')
WHERE json_each.value = @id)"
[ idParam catId; webLogParam webLogId ]
(fun rdr -> rdr.GetString 0, Utils.deserialize<string list> ser (rdr.GetString 1))
for postId, cats in posts do
do! conn.patchById
Table.Post postId {| CategoryIds = cats |> List.filter (fun it -> it <> string catId) |}
do! conn.deleteById Table.Category catId
return if children = 0L then CategoryDeleted else ReassignedChildCategories
| None -> return CategoryNotFound
}
/// Save a category
let save cat =
log.LogTrace "Category.save"
conn.save<Category> Table.Category cat
/// Restore categories from a backup
let restore cats = backgroundTask {
for cat in cats do
do! add cat
}
/// Update a category
let update cat = backgroundTask {
use cmd = conn.CreateCommand ()
cmd.CommandText <-
"UPDATE category
SET name = @name,
slug = @slug,
description = @description,
parent_id = @parentId
WHERE id = @id
AND web_log_id = @webLogId"
addCategoryParameters cmd cat
do! write cmd
log.LogTrace "Category.restore"
for cat in cats do do! save cat
}
interface ICategoryData with
member _.Add cat = add cat
member _.Add cat = save cat
member _.CountAll webLogId = countAll webLogId
member _.CountTopLevel webLogId = countTopLevel webLogId
member _.FindAllForView webLogId = findAllForView webLogId
@ -176,4 +126,4 @@ type SQLiteCategoryData (conn : SqliteConnection) =
member _.FindByWebLog webLogId = findByWebLog webLogId
member _.Delete catId webLogId = delete catId webLogId
member _.Restore cats = restore cats
member _.Update cat = update cat
member _.Update cat = save cat

View File

@ -0,0 +1,307 @@
/// Helper functions for the SQLite data implementation
[<AutoOpen>]
module MyWebLog.Data.SQLite.SQLiteHelpers
/// The table names used in the SQLite implementation
[<RequireQualifiedAccess>]
module Table =
/// Categories
[<Literal>]
let Category = "category"
/// Database Version
[<Literal>]
let DbVersion = "db_version"
/// Pages
[<Literal>]
let Page = "page"
/// Page Revisions
[<Literal>]
let PageRevision = "page_revision"
/// Posts
[<Literal>]
let Post = "post"
/// Post Comments
[<Literal>]
let PostComment = "post_comment"
/// Post Revisions
[<Literal>]
let PostRevision = "post_revision"
/// Tag/URL Mappings
[<Literal>]
let TagMap = "tag_map"
/// Themes
[<Literal>]
let Theme = "theme"
/// Theme Assets
[<Literal>]
let ThemeAsset = "theme_asset"
/// Uploads
[<Literal>]
let Upload = "upload"
/// Web Logs
[<Literal>]
let WebLog = "web_log"
/// Users
[<Literal>]
let WebLogUser = "web_log_user"
open System
open Microsoft.Data.Sqlite
open MyWebLog
open MyWebLog.Data
open NodaTime.Text
/// Execute a command that returns no data
let write (cmd: SqliteCommand) = backgroundTask {
let! _ = cmd.ExecuteNonQueryAsync()
()
}
/// Add a possibly-missing parameter, substituting null for None
let maybe<'T> (it: 'T option) : obj = match it with Some x -> x :> obj | None -> DBNull.Value
/// Create a value for an Instant
let instantParam =
InstantPattern.General.Format
/// Create an optional value for an Instant
let maybeInstant =
Option.map instantParam >> maybe
/// Create the SQL and parameters for an EXISTS applied to a JSON array
let inJsonArray<'T> table jsonField paramName (items: 'T list) =
if List.isEmpty items then "", []
else
let mutable idx = 0
items
|> List.skip 1
|> List.fold (fun (itemS, itemP) it ->
idx <- idx + 1
$"{itemS}, @%s{paramName}{idx}", (SqliteParameter($"@%s{paramName}{idx}", string it) :: itemP))
(Seq.ofList items
|> Seq.map (fun it -> $"(@%s{paramName}0", [ SqliteParameter($"@%s{paramName}0", string it) ])
|> Seq.head)
|> function
sql, ps ->
$"EXISTS (SELECT 1 FROM json_each(%s{table}.data, '$.%s{jsonField}') WHERE value IN {sql}))", ps
/// Create the SQL and parameters for an IN clause
let inClause<'T> colNameAndPrefix paramName (valueFunc: 'T -> string) (items: 'T list) =
if List.isEmpty items then "", []
else
let mutable idx = 0
items
|> List.skip 1
|> List.fold (fun (itemS, itemP) it ->
idx <- idx + 1
$"{itemS}, @%s{paramName}{idx}", (SqliteParameter ($"@%s{paramName}{idx}", valueFunc it) :: itemP))
(Seq.ofList items
|> Seq.map (fun it ->
$"%s{colNameAndPrefix} IN (@%s{paramName}0", [ SqliteParameter ($"@%s{paramName}0", valueFunc it) ])
|> Seq.head)
|> function sql, ps -> $"{sql})", ps
/// Functions to map domain items from a data reader
module Map =
open System.IO
/// Get a boolean value from a data reader
let getBoolean col (rdr: SqliteDataReader) = rdr.GetBoolean(rdr.GetOrdinal col)
/// Get a date/time value from a data reader
let getDateTime col (rdr: SqliteDataReader) = rdr.GetDateTime(rdr.GetOrdinal col)
/// Get a Guid value from a data reader
let getGuid col (rdr: SqliteDataReader) = rdr.GetGuid(rdr.GetOrdinal col)
/// Get an int value from a data reader
let getInt col (rdr: SqliteDataReader) = rdr.GetInt32(rdr.GetOrdinal col)
/// Get a long (64-bit int) value from a data reader
let getLong col (rdr: SqliteDataReader) = rdr.GetInt64(rdr.GetOrdinal col)
/// Get a BLOB stream value from a data reader
let getStream col (rdr: SqliteDataReader) = rdr.GetStream(rdr.GetOrdinal col)
/// Get a string value from a data reader
let getString col (rdr: SqliteDataReader) = rdr.GetString(rdr.GetOrdinal col)
/// Parse an Instant from the given value
let parseInstant value =
match InstantPattern.General.Parse value with
| it when it.Success -> it.Value
| it -> raise it.Exception
/// Get an Instant value from a data reader
let getInstant col rdr =
getString col rdr |> parseInstant
/// Get a timespan value from a data reader
let getTimeSpan col (rdr: SqliteDataReader) = rdr.GetTimeSpan(rdr.GetOrdinal col)
/// Get a possibly null boolean value from a data reader
let tryBoolean col (rdr: SqliteDataReader) =
if rdr.IsDBNull(rdr.GetOrdinal col) then None else Some (getBoolean col rdr)
/// Get a possibly null date/time value from a data reader
let tryDateTime col (rdr: SqliteDataReader) =
if rdr.IsDBNull(rdr.GetOrdinal col) then None else Some (getDateTime col rdr)
/// Get a possibly null Guid value from a data reader
let tryGuid col (rdr: SqliteDataReader) =
if rdr.IsDBNull(rdr.GetOrdinal col) then None else Some (getGuid col rdr)
/// Get a possibly null int value from a data reader
let tryInt col (rdr: SqliteDataReader) =
if rdr.IsDBNull(rdr.GetOrdinal col) then None else Some (getInt col rdr)
/// Get a possibly null string value from a data reader
let tryString col (rdr: SqliteDataReader) =
if rdr.IsDBNull(rdr.GetOrdinal col) then None else Some (getString col rdr)
/// Get a possibly null timespan value from a data reader
let tryTimeSpan col (rdr: SqliteDataReader) =
if rdr.IsDBNull(rdr.GetOrdinal col) then None else Some (getTimeSpan col rdr)
/// Create a permalink from the current row in the given data reader
let toPermalink rdr = getString "permalink" rdr |> Permalink
/// Create a revision from the current row in the given data reader
let toRevision rdr : Revision =
{ AsOf = getInstant "as_of" rdr
Text = getString "revision_text" rdr |> MarkupText.Parse }
/// Create a theme asset from the current row in the given data reader
let toThemeAsset includeData rdr : ThemeAsset =
let assetData =
if includeData then
use dataStream = new MemoryStream()
use blobStream = getStream "data" rdr
blobStream.CopyTo dataStream
dataStream.ToArray()
else
[||]
{ Id = ThemeAssetId (ThemeId (getString "theme_id" rdr), getString "path" rdr)
UpdatedOn = getInstant "updated_on" rdr
Data = assetData }