Compare commits

..

9 Commits
v2.0 ... main

Author SHA1 Message Date
f4be57b665 v2.2
Reviewed-on: #51
2024-06-20 00:08:40 +00:00
7f94e0beef Remove .NET 7 from build target (#48) 2024-06-19 20:07:45 -04:00
f59566a3d3 Create theme dir if needed (#49)
- Use Path.Combine throughout
- Update theme versions
- Update generator version
2024-06-19 17:02:05 -04:00
f2f766fc05 Update htmx to v2.0.0 (#50)
- Also update all other deps
2024-06-19 16:17:45 -04:00
75c4d4f991 Tweaks to v2.2 data migration (#45) 2024-06-19 16:04:53 -04:00
b50d0d9884 Drop .NET 7 support (#48)
- Bump version to 2.2
2024-06-18 22:06:02 -04:00
7ae15b9e93 Force URLs and e-mail to be lowercase (#45)
- Added v2.2 migration to lower existing e-mails
2024-06-18 22:01:41 -04:00
823286255b
Fix PostgreSQL v2.1 migration (#44)
fixes #43
2024-03-28 22:25:09 -04:00
f1a7e55f3e
Version 2.1 (#41)
- Add full chapter support (#6)
- Add built-in redirect functionality (#39)
- Support building Docker containers for release (#38)
- Support canonical domain configuration (#37)
- Add unit tests for domain/models and integration tests for all three data stores
- Convert SQLite storage to use JSON documents, similar to PostgreSQL
- Convert admin templates to Giraffe View Engine (from Liquid)
- Add .NET 8 support
2024-03-26 20:13:28 -04:00
116 changed files with 14932 additions and 8272 deletions

99
.github/workflows/ci.yml vendored Normal file
View File

@ -0,0 +1,99 @@
name: Continuous Integration
on:
push:
branches:
- main
pull_request:
branches:
- main
workflow_dispatch:
env:
MWL_TEST_RETHINK_URI: rethinkdb://localhost/mwl_test
jobs:
build_and_test:
name: Build / Test
runs-on: ubuntu-latest
strategy:
matrix:
dotnet-version:
- "6.0"
- "7.0"
- "8.0"
services:
postgres:
image: postgres:latest
env:
POSTGRES_PASSWORD: postgres
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
rethink:
image: rethinkdb:latest
ports:
- 28015:28015
steps:
- name: Check Out Code
uses: actions/checkout@v4
- name: Setup .NET Core SDK
uses: actions/setup-dotnet@v4.0.0
with:
dotnet-version: 8.x
- name: Restore dependencies
run: dotnet restore src/MyWebLog.sln
- name: Build (${{ matrix.dotnet-version }})
run: dotnet build src/MyWebLog.sln -f net${{ matrix.dotnet-version }}
- name: Test (${{ matrix.dotnet-version }})
run: cd src/MyWebLog.Tests; dotnet run -f net${{ matrix.dotnet-version }}
publish:
name: Publish Packages
runs-on: ubuntu-latest
needs: build_and_test
strategy:
matrix:
ver:
- "net6.0"
- "net7.0"
- "net8.0"
os:
- "linux-x64"
- "win-x64"
include:
- os: "linux-x64"
bz2: true
- os: "win-x64"
zip: true
steps:
- name: Check Out Code
uses: actions/checkout@v4
- name: Setup .NET Core SDK
uses: actions/setup-dotnet@v4.0.0
with:
dotnet-version: 8.x
- name: Publish (Release)
run: dotnet publish -c Release -f ${{ matrix.ver }} -r ${{ matrix.os }} src/MyWebLog/MyWebLog.fsproj
- name: Zip Admin Theme
run: cd src/admin-theme; zip -r ../MyWebLog/bin/Release/${{ matrix.ver }}/${{ matrix.os }}/publish/admin-theme.zip *; cd ../..
- name: Zip Default Theme
run: cd src/default-theme; zip -r ../MyWebLog/bin/Release/${{ matrix.ver }}/${{ matrix.os }}/publish/default-theme.zip *; cd ../..
- if: ${{ matrix.bz2 }}
name: Create .tar.bz2 Archive
run: tar cfj myWebLog-${{ matrix.ver }}-${{ matrix.os }}.tar.bz2 -C src/MyWebLog/bin/Release/${{ matrix.ver }}/${{ matrix.os }}/publish .
- if: ${{ matrix.zip }}
name: Create .zip Archive
run: cd src/MyWebLog/bin/Release/${{ matrix.ver }}/${{ matrix.os }}/publish; zip -r myWebLog-${{ matrix.ver }}-${{ matrix.os }}.zip *; cp myWeb*.zip ../../../../../../..; cd ../../../../../../..
- name: Upload Artifacts
uses: actions/upload-artifact@v4
with:
name: package-${{ matrix.ver }}-${{ matrix.os }}
path: |
*x64.zip
*.bz2

3
.gitignore vendored
View File

@ -261,7 +261,8 @@ src/MyWebLog/wwwroot/img/daniel-j-summers
src/MyWebLog/wwwroot/img/bit-badger
.ionide
.vscode
src/MyWebLog/appsettings.Production.json
# SQLite database files
src/MyWebLog/*.db*
src/MyWebLog/data/*.db*

View File

@ -33,7 +33,7 @@ let zipTheme (name : string) (_ : TargetParameter) =
|> Zip.zipSpec $"{releasePath}/{name}-theme.zip"
/// Frameworks supported by this build
let frameworks = [ "net6.0"; "net7.0" ]
let frameworks = [ "net6.0"; "net8.0" ]
/// Publish the project for the given runtime ID
let publishFor rid (_ : TargetParameter) =

View File

@ -2,7 +2,7 @@
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net7.0</TargetFramework>
<TargetFramework>net8.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
@ -10,11 +10,11 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Fake.Core.Target" Version="5.23.1" />
<PackageReference Include="Fake.DotNet.Cli" Version="5.23.1" />
<PackageReference Include="Fake.IO.FileSystem" Version="5.23.1" />
<PackageReference Include="Fake.IO.Zip" Version="5.23.1" />
<PackageReference Include="MSBuild.StructuredLogger" Version="2.1.768" />
<PackageReference Include="Fake.Core.Target" Version="6.0.0" />
<PackageReference Include="Fake.DotNet.Cli" Version="6.0.0" />
<PackageReference Include="Fake.IO.FileSystem" Version="6.0.0" />
<PackageReference Include="Fake.IO.Zip" Version="6.0.0" />
<PackageReference Include="MSBuild.StructuredLogger" Version="2.2.206" />
</ItemGroup>
</Project>

4
src/.dockerignore Normal file
View File

@ -0,0 +1,4 @@
**/bin
**/obj
**/*.db
**/appsettings.*.json

View File

@ -1,9 +1,9 @@
<Project>
<PropertyGroup>
<TargetFrameworks>net6.0;net7.0</TargetFrameworks>
<TargetFrameworks>net6.0;net8.0</TargetFrameworks>
<DebugType>embedded</DebugType>
<AssemblyVersion>2.0.0.0</AssemblyVersion>
<FileVersion>2.0.0.0</FileVersion>
<Version>2.0.0</Version>
<AssemblyVersion>2.2.0.0</AssemblyVersion>
<FileVersion>2.2.0.0</FileVersion>
<Version>2.2.0</Version>
</PropertyGroup>
</Project>

33
src/Dockerfile Normal file
View File

@ -0,0 +1,33 @@
FROM mcr.microsoft.com/dotnet/sdk:8.0-alpine AS build
WORKDIR /mwl
COPY ./MyWebLog.sln ./
COPY ./Directory.Build.props ./
COPY ./MyWebLog/MyWebLog.fsproj ./MyWebLog/
COPY ./MyWebLog.Data/MyWebLog.Data.fsproj ./MyWebLog.Data/
COPY ./MyWebLog.Domain/MyWebLog.Domain.fsproj ./MyWebLog.Domain/
COPY ./MyWebLog.Tests/MyWebLog.Tests.fsproj ./MyWebLog.Tests/
RUN dotnet restore
COPY . ./
WORKDIR /mwl/MyWebLog
RUN dotnet publish -f net8.0 -c Release -r linux-x64 --no-self-contained -p:PublishSingleFile=false
FROM alpine AS theme
RUN apk add --no-cache zip
WORKDIR /themes/default-theme
COPY ./default-theme ./
RUN zip -r ../default-theme.zip ./*
WORKDIR /themes/admin-theme
COPY ./admin-theme ./
RUN zip -r ../admin-theme.zip ./*
FROM mcr.microsoft.com/dotnet/aspnet:8.0-alpine as final
WORKDIR /app
RUN apk add --no-cache icu-libs
ENV DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=false
COPY --from=build /mwl/MyWebLog/bin/Release/net8.0/linux-x64/publish/ ./
COPY --from=theme /themes/*.zip /app/
RUN mkdir themes
EXPOSE 80
CMD [ "dotnet", "/app/MyWebLog.dll" ]

View File

@ -9,116 +9,123 @@ module Json =
open Newtonsoft.Json
type CategoryIdConverter () =
inherit JsonConverter<CategoryId> ()
override _.WriteJson (writer : JsonWriter, value : CategoryId, _ : JsonSerializer) =
writer.WriteValue (CategoryId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : CategoryId, _ : bool, _ : JsonSerializer) =
type CategoryIdConverter() =
inherit JsonConverter<CategoryId>()
override _.WriteJson(writer: JsonWriter, value: CategoryId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: CategoryId, _: bool, _: JsonSerializer) =
(string >> CategoryId) reader.Value
type CommentIdConverter () =
inherit JsonConverter<CommentId> ()
override _.WriteJson (writer : JsonWriter, value : CommentId, _ : JsonSerializer) =
writer.WriteValue (CommentId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : CommentId, _ : bool, _ : JsonSerializer) =
type CommentIdConverter() =
inherit JsonConverter<CommentId>()
override _.WriteJson(writer: JsonWriter, value: CommentId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: CommentId, _: bool, _: JsonSerializer) =
(string >> CommentId) reader.Value
type CustomFeedIdConverter () =
inherit JsonConverter<CustomFeedId> ()
override _.WriteJson (writer : JsonWriter, value : CustomFeedId, _ : JsonSerializer) =
writer.WriteValue (CustomFeedId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : CustomFeedId, _ : bool, _ : JsonSerializer) =
type CommentStatusConverter() =
inherit JsonConverter<CommentStatus>()
override _.WriteJson(writer: JsonWriter, value: CommentStatus, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: CommentStatus, _: bool, _: JsonSerializer) =
(string >> CommentStatus.Parse) reader.Value
type CustomFeedIdConverter() =
inherit JsonConverter<CustomFeedId>()
override _.WriteJson(writer: JsonWriter, value: CustomFeedId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: CustomFeedId, _: bool, _: JsonSerializer) =
(string >> CustomFeedId) reader.Value
type CustomFeedSourceConverter () =
inherit JsonConverter<CustomFeedSource> ()
override _.WriteJson (writer : JsonWriter, value : CustomFeedSource, _ : JsonSerializer) =
writer.WriteValue (CustomFeedSource.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : CustomFeedSource, _ : bool, _ : JsonSerializer) =
(string >> CustomFeedSource.parse) reader.Value
type CustomFeedSourceConverter() =
inherit JsonConverter<CustomFeedSource>()
override _.WriteJson(writer: JsonWriter, value: CustomFeedSource, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: CustomFeedSource, _: bool, _: JsonSerializer) =
(string >> CustomFeedSource.Parse) reader.Value
type ExplicitRatingConverter () =
inherit JsonConverter<ExplicitRating> ()
override _.WriteJson (writer : JsonWriter, value : ExplicitRating, _ : JsonSerializer) =
writer.WriteValue (ExplicitRating.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : ExplicitRating, _ : bool, _ : JsonSerializer) =
(string >> ExplicitRating.parse) reader.Value
type ExplicitRatingConverter() =
inherit JsonConverter<ExplicitRating>()
override _.WriteJson(writer: JsonWriter, value: ExplicitRating, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: ExplicitRating, _: bool, _: JsonSerializer) =
(string >> ExplicitRating.Parse) reader.Value
type MarkupTextConverter () =
inherit JsonConverter<MarkupText> ()
override _.WriteJson (writer : JsonWriter, value : MarkupText, _ : JsonSerializer) =
writer.WriteValue (MarkupText.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : MarkupText, _ : bool, _ : JsonSerializer) =
(string >> MarkupText.parse) reader.Value
type MarkupTextConverter() =
inherit JsonConverter<MarkupText>()
override _.WriteJson(writer: JsonWriter, value: MarkupText, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: MarkupText, _: bool, _: JsonSerializer) =
(string >> MarkupText.Parse) reader.Value
type PermalinkConverter () =
inherit JsonConverter<Permalink> ()
override _.WriteJson (writer : JsonWriter, value : Permalink, _ : JsonSerializer) =
writer.WriteValue (Permalink.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : Permalink, _ : bool, _ : JsonSerializer) =
type PermalinkConverter() =
inherit JsonConverter<Permalink>()
override _.WriteJson(writer: JsonWriter, value: Permalink, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: Permalink, _: bool, _: JsonSerializer) =
(string >> Permalink) reader.Value
type PageIdConverter () =
inherit JsonConverter<PageId> ()
override _.WriteJson (writer : JsonWriter, value : PageId, _ : JsonSerializer) =
writer.WriteValue (PageId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : PageId, _ : bool, _ : JsonSerializer) =
type PageIdConverter() =
inherit JsonConverter<PageId>()
override _.WriteJson(writer: JsonWriter, value: PageId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: PageId, _: bool, _: JsonSerializer) =
(string >> PageId) reader.Value
type PodcastMediumConverter () =
inherit JsonConverter<PodcastMedium> ()
override _.WriteJson (writer : JsonWriter, value : PodcastMedium, _ : JsonSerializer) =
writer.WriteValue (PodcastMedium.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : PodcastMedium, _ : bool, _ : JsonSerializer) =
(string >> PodcastMedium.parse) reader.Value
type PodcastMediumConverter() =
inherit JsonConverter<PodcastMedium>()
override _.WriteJson(writer: JsonWriter, value: PodcastMedium, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: PodcastMedium, _: bool, _: JsonSerializer) =
(string >> PodcastMedium.Parse) reader.Value
type PostIdConverter () =
inherit JsonConverter<PostId> ()
override _.WriteJson (writer : JsonWriter, value : PostId, _ : JsonSerializer) =
writer.WriteValue (PostId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : PostId, _ : bool, _ : JsonSerializer) =
type PostIdConverter() =
inherit JsonConverter<PostId>()
override _.WriteJson(writer: JsonWriter, value: PostId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: PostId, _: bool, _: JsonSerializer) =
(string >> PostId) reader.Value
type TagMapIdConverter () =
inherit JsonConverter<TagMapId> ()
override _.WriteJson (writer : JsonWriter, value : TagMapId, _ : JsonSerializer) =
writer.WriteValue (TagMapId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : TagMapId, _ : bool, _ : JsonSerializer) =
type TagMapIdConverter() =
inherit JsonConverter<TagMapId>()
override _.WriteJson(writer: JsonWriter, value: TagMapId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: TagMapId, _: bool, _: JsonSerializer) =
(string >> TagMapId) reader.Value
type ThemeAssetIdConverter () =
inherit JsonConverter<ThemeAssetId> ()
override _.WriteJson (writer : JsonWriter, value : ThemeAssetId, _ : JsonSerializer) =
writer.WriteValue (ThemeAssetId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : ThemeAssetId, _ : bool, _ : JsonSerializer) =
(string >> ThemeAssetId.ofString) reader.Value
type ThemeAssetIdConverter() =
inherit JsonConverter<ThemeAssetId>()
override _.WriteJson(writer: JsonWriter, value: ThemeAssetId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: ThemeAssetId, _: bool, _: JsonSerializer) =
(string >> ThemeAssetId.Parse) reader.Value
type ThemeIdConverter () =
inherit JsonConverter<ThemeId> ()
override _.WriteJson (writer : JsonWriter, value : ThemeId, _ : JsonSerializer) =
writer.WriteValue (ThemeId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : ThemeId, _ : bool, _ : JsonSerializer) =
type ThemeIdConverter() =
inherit JsonConverter<ThemeId>()
override _.WriteJson(writer: JsonWriter, value: ThemeId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: ThemeId, _: bool, _: JsonSerializer) =
(string >> ThemeId) reader.Value
type UploadIdConverter () =
inherit JsonConverter<UploadId> ()
override _.WriteJson (writer : JsonWriter, value : UploadId, _ : JsonSerializer) =
writer.WriteValue (UploadId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : UploadId, _ : bool, _ : JsonSerializer) =
type UploadIdConverter() =
inherit JsonConverter<UploadId>()
override _.WriteJson(writer: JsonWriter, value: UploadId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: UploadId, _: bool, _: JsonSerializer) =
(string >> UploadId) reader.Value
type WebLogIdConverter () =
inherit JsonConverter<WebLogId> ()
override _.WriteJson (writer : JsonWriter, value : WebLogId, _ : JsonSerializer) =
writer.WriteValue (WebLogId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : WebLogId, _ : bool, _ : JsonSerializer) =
type WebLogIdConverter() =
inherit JsonConverter<WebLogId>()
override _.WriteJson(writer: JsonWriter, value: WebLogId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: WebLogId, _: bool, _: JsonSerializer) =
(string >> WebLogId) reader.Value
type WebLogUserIdConverter () =
type WebLogUserIdConverter() =
inherit JsonConverter<WebLogUserId> ()
override _.WriteJson (writer : JsonWriter, value : WebLogUserId, _ : JsonSerializer) =
writer.WriteValue (WebLogUserId.toString value)
override _.ReadJson (reader : JsonReader, _ : Type, _ : WebLogUserId, _ : bool, _ : JsonSerializer) =
override _.WriteJson(writer: JsonWriter, value: WebLogUserId, _: JsonSerializer) =
writer.WriteValue(string value)
override _.ReadJson(reader: JsonReader, _: Type, _: WebLogUserId, _: bool, _: JsonSerializer) =
(string >> WebLogUserId) reader.Value
open Microsoft.FSharpLu.Json
@ -128,27 +135,28 @@ module Json =
/// Configure a serializer to use these converters
let configure (ser : JsonSerializer) =
// Our converters
[ CategoryIdConverter () :> JsonConverter
CommentIdConverter ()
CustomFeedIdConverter ()
CustomFeedSourceConverter ()
ExplicitRatingConverter ()
MarkupTextConverter ()
PermalinkConverter ()
PageIdConverter ()
PodcastMediumConverter ()
PostIdConverter ()
TagMapIdConverter ()
ThemeAssetIdConverter ()
ThemeIdConverter ()
UploadIdConverter ()
WebLogIdConverter ()
WebLogUserIdConverter ()
] |> List.iter ser.Converters.Add
[ CategoryIdConverter() :> JsonConverter
CommentIdConverter()
CommentStatusConverter()
CustomFeedIdConverter()
CustomFeedSourceConverter()
ExplicitRatingConverter()
MarkupTextConverter()
PermalinkConverter()
PageIdConverter()
PodcastMediumConverter()
PostIdConverter()
TagMapIdConverter()
ThemeAssetIdConverter()
ThemeIdConverter()
UploadIdConverter()
WebLogIdConverter()
WebLogUserIdConverter() ]
|> List.iter ser.Converters.Add
// NodaTime
let _ = ser.ConfigureForNodaTime DateTimeZoneProviders.Tzdb
// Handles DUs with no associated data, as well as option fields
ser.Converters.Add (CompactUnionJsonConverter ())
ser.Converters.Add(CompactUnionJsonConverter())
ser.NullValueHandling <- NullValueHandling.Ignore
ser.MissingMemberHandling <- MissingMemberHandling.Ignore
ser

View File

@ -7,6 +7,7 @@ open Newtonsoft.Json
open NodaTime
/// The result of a category deletion attempt
[<Struct>]
type CategoryDeleteResult =
/// The category was deleted successfully
| CategoryDeleted
@ -32,7 +33,7 @@ type ICategoryData =
abstract member Delete : CategoryId -> WebLogId -> Task<CategoryDeleteResult>
/// Find all categories for a web log, sorted alphabetically and grouped by hierarchy
abstract member FindAllForView : WebLogId -> Task<DisplayCategory[]>
abstract member FindAllForView : WebLogId -> Task<DisplayCategory array>
/// Find a category by its ID
abstract member FindById : CategoryId -> WebLogId -> Task<Category option>
@ -53,7 +54,7 @@ type IPageData =
/// Add a page
abstract member Add : Page -> Task<unit>
/// Get all pages for the web log (excluding meta items, text, revisions, and prior permalinks)
/// Get all pages for the web log (excluding text, metadata, revisions, and prior permalinks)
abstract member All : WebLogId -> Task<Page list>
/// Count all pages for the given web log
@ -84,7 +85,7 @@ type IPageData =
abstract member FindListed : WebLogId -> Task<Page list>
/// Find a page of pages (displayed in admin section) (excluding meta items, revisions and prior permalinks)
abstract member FindPageOfPages : WebLogId -> pageNbr : int -> Task<Page list>
abstract member FindPageOfPages : WebLogId -> pageNbr: int -> Task<Page list>
/// Restore pages from a backup
abstract member Restore : Page list -> Task<unit>
@ -125,20 +126,20 @@ type IPostData =
/// Find posts to be displayed on a category list page (excluding revisions and prior permalinks)
abstract member FindPageOfCategorizedPosts :
WebLogId -> CategoryId list -> pageNbr : int -> postsPerPage : int -> Task<Post list>
WebLogId -> CategoryId list -> pageNbr: int -> postsPerPage: int -> Task<Post list>
/// Find posts to be displayed on an admin page (excluding revisions and prior permalinks)
abstract member FindPageOfPosts : WebLogId -> pageNbr : int -> postsPerPage : int -> Task<Post list>
/// Find posts to be displayed on an admin page (excluding text, revisions, and prior permalinks)
abstract member FindPageOfPosts : WebLogId -> pageNbr: int -> postsPerPage: int -> Task<Post list>
/// Find posts to be displayed on a page (excluding revisions and prior permalinks)
abstract member FindPageOfPublishedPosts : WebLogId -> pageNbr : int -> postsPerPage : int -> Task<Post list>
abstract member FindPageOfPublishedPosts : WebLogId -> pageNbr: int -> postsPerPage: int -> Task<Post list>
/// Find posts to be displayed on a tag list page (excluding revisions and prior permalinks)
abstract member FindPageOfTaggedPosts :
WebLogId -> tag : string -> pageNbr : int -> postsPerPage : int -> Task<Post list>
WebLogId -> tag : string -> pageNbr: int -> postsPerPage: int -> Task<Post list>
/// Find the next older and newer post for the given published date/time (excluding revisions and prior permalinks)
abstract member FindSurroundingPosts : WebLogId -> publishedOn : Instant -> Task<Post option * Post option>
abstract member FindSurroundingPosts : WebLogId -> publishedOn: Instant -> Task<Post option * Post option>
/// Restore posts from a backup
abstract member Restore : Post list -> Task<unit>
@ -259,6 +260,9 @@ type IWebLogData =
/// Find a web log by its ID
abstract member FindById : WebLogId -> Task<WebLog option>
/// Update redirect rules for a web log
abstract member UpdateRedirectRules : WebLog -> Task<unit>
/// Update RSS options for a web log
abstract member UpdateRssOptions : WebLog -> Task<unit>

View File

@ -5,24 +5,25 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="BitBadger.Npgsql.FSharp.Documents" Version="1.0.0-beta2" />
<PackageReference Include="Microsoft.Data.Sqlite" Version="7.0.3" />
<PackageReference Include="Microsoft.Extensions.Caching.Abstractions" Version="7.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="7.0.0" />
<PackageReference Include="BitBadger.Documents.Postgres" Version="3.1.0" />
<PackageReference Include="BitBadger.Documents.Sqlite" Version="3.1.0" />
<PackageReference Include="Microsoft.Data.Sqlite" Version="8.0.6" />
<PackageReference Include="Microsoft.Extensions.Caching.Abstractions" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="8.0.0" />
<PackageReference Include="Microsoft.FSharpLu.Json" Version="0.11.7" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.2" />
<PackageReference Include="NodaTime.Serialization.JsonNet" Version="3.0.1" />
<PackageReference Include="Npgsql.NodaTime" Version="7.0.2" />
<PackageReference Include="NodaTime.Serialization.JsonNet" Version="3.1.0" />
<PackageReference Include="Npgsql.NodaTime" Version="8.0.3" />
<PackageReference Include="RethinkDb.Driver" Version="2.3.150" />
<PackageReference Include="RethinkDb.Driver.FSharp" Version="0.9.0-beta-07" />
<PackageReference Update="FSharp.Core" Version="8.0.300" />
</ItemGroup>
<ItemGroup>
<Compile Include="Converters.fs" />
<Compile Include="Interfaces.fs" />
<Compile Include="Utils.fs" />
<Compile Include="RethinkDbData.fs" />
<Compile Include="SQLite\Helpers.fs" />
<Compile Include="SQLite\SQLiteHelpers.fs" />
<Compile Include="SQLite\SQLiteCategoryData.fs" />
<Compile Include="SQLite\SQLitePageData.fs" />
<Compile Include="SQLite\SQLitePostData.fs" />
@ -42,7 +43,13 @@
<Compile Include="Postgres\PostgresUploadData.fs" />
<Compile Include="Postgres\PostgresWebLogData.fs" />
<Compile Include="Postgres\PostgresWebLogUserData.fs" />
<Compile Include="PostgresData.fs" />
<Compile Include="PostgresData.fs" />
</ItemGroup>
<ItemGroup>
<AssemblyAttribute Include="System.Runtime.CompilerServices.InternalsVisibleToAttribute">
<_Parameter1>MyWebLog.Tests</_Parameter1>
</AssemblyAttribute>
</ItemGroup>
</Project>

View File

@ -2,38 +2,37 @@ namespace MyWebLog.Data.Postgres
open System.Threading
open System.Threading.Tasks
open BitBadger.Npgsql.FSharp.Documents
open BitBadger.Documents.Postgres
open Microsoft.Extensions.Caching.Distributed
open NodaTime
open Npgsql.FSharp
/// Helper types and functions for the cache
[<AutoOpen>]
module private Helpers =
/// The cache entry
type Entry =
{ /// The ID of the cache entry
Id : string
/// The value to be cached
Payload : byte[]
/// When this entry will expire
ExpireAt : Instant
/// The duration by which the expiration should be pushed out when being refreshed
SlidingExpiration : Duration option
/// The must-expire-by date/time for the cache entry
AbsoluteExpiration : Instant option
}
type Entry = {
/// The ID of the cache entry
Id: string
/// The value to be cached
Payload: byte array
/// When this entry will expire
ExpireAt: Instant
/// The duration by which the expiration should be pushed out when being refreshed
SlidingExpiration: Duration option
/// The must-expire-by date/time for the cache entry
AbsoluteExpiration: Instant option
}
/// Run a task synchronously
let sync<'T> (it : Task<'T>) = it |> (Async.AwaitTask >> Async.RunSynchronously)
let sync<'T> (it: Task<'T>) = it |> (Async.AwaitTask >> Async.RunSynchronously)
/// Get the current instant
let getNow () = SystemClock.Instance.GetCurrentInstant ()
let getNow () = SystemClock.Instance.GetCurrentInstant()
/// Create a parameter for the expire-at time
let expireParam =
@ -49,9 +48,11 @@ type DistributedCache () =
task {
let! exists =
Custom.scalar
$"SELECT EXISTS
(SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = 'session')
AS {existsName}" [] Map.toExists
"SELECT EXISTS
(SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = 'session')
AS it"
[]
toExists
if not exists then
do! Custom.nonQuery
"CREATE TABLE session (
@ -69,13 +70,15 @@ type DistributedCache () =
let getEntry key = backgroundTask {
let idParam = "@id", Sql.string key
let! tryEntry =
Custom.single "SELECT * FROM session WHERE id = @id" [ idParam ]
(fun row ->
{ Id = row.string "id"
Payload = row.bytea "payload"
ExpireAt = row.fieldValue<Instant> "expire_at"
SlidingExpiration = row.fieldValueOrNone<Duration> "sliding_expiration"
AbsoluteExpiration = row.fieldValueOrNone<Instant> "absolute_expiration" })
Custom.single
"SELECT * FROM session WHERE id = @id"
[ idParam ]
(fun row ->
{ Id = row.string "id"
Payload = row.bytea "payload"
ExpireAt = row.fieldValue<Instant> "expire_at"
SlidingExpiration = row.fieldValueOrNone<Duration> "sliding_expiration"
AbsoluteExpiration = row.fieldValueOrNone<Instant> "absolute_expiration" })
match tryEntry with
| Some entry ->
let now = getNow ()
@ -88,8 +91,9 @@ type DistributedCache () =
true, { entry with ExpireAt = absExp }
else true, { entry with ExpireAt = now.Plus slideExp }
if needsRefresh then
do! Custom.nonQuery "UPDATE session SET expire_at = @expireAt WHERE id = @id"
[ expireParam item.ExpireAt; idParam ]
do! Custom.nonQuery
"UPDATE session SET expire_at = @expireAt WHERE id = @id"
[ expireParam item.ExpireAt; idParam ]
()
return if item.ExpireAt > now then Some entry else None
| None -> return None
@ -101,17 +105,17 @@ type DistributedCache () =
/// Purge expired entries every 30 minutes
let purge () = backgroundTask {
let now = getNow ()
if lastPurge.Plus (Duration.FromMinutes 30L) < now then
if lastPurge.Plus(Duration.FromMinutes 30L) < now then
do! Custom.nonQuery "DELETE FROM session WHERE expire_at < @expireAt" [ expireParam now ]
lastPurge <- now
}
/// Remove a cache entry
let removeEntry key =
Delete.byId "session" key
Custom.nonQuery "DELETE FROM session WHERE id = @id" [ "@id", Sql.string key ]
/// Save an entry
let saveEntry (opts : DistributedCacheEntryOptions) key payload =
let saveEntry (opts: DistributedCacheEntryOptions) key payload =
let now = getNow ()
let expireAt, slideExp, absExp =
if opts.SlidingExpiration.HasValue then
@ -121,7 +125,7 @@ type DistributedCache () =
let exp = Instant.FromDateTimeOffset opts.AbsoluteExpiration.Value
exp, None, Some exp
elif opts.AbsoluteExpirationRelativeToNow.HasValue then
let exp = now.Plus (Duration.FromTimeSpan opts.AbsoluteExpirationRelativeToNow.Value)
let exp = now.Plus(Duration.FromTimeSpan opts.AbsoluteExpirationRelativeToNow.Value)
exp, None, Some exp
else
// Default to 1 hour sliding expiration
@ -146,7 +150,7 @@ type DistributedCache () =
// ~~~ IMPLEMENTATION FUNCTIONS ~~~
/// Retrieve the data for a cache entry
let get key (_ : CancellationToken) = backgroundTask {
let get key (_: CancellationToken) = backgroundTask {
match! getEntry key with
| Some entry ->
do! purge ()
@ -155,29 +159,29 @@ type DistributedCache () =
}
/// Refresh an entry
let refresh key (cancelToken : CancellationToken) = backgroundTask {
let refresh key (cancelToken: CancellationToken) = backgroundTask {
let! _ = get key cancelToken
()
}
/// Remove an entry
let remove key (_ : CancellationToken) = backgroundTask {
let remove key (_: CancellationToken) = backgroundTask {
do! removeEntry key
do! purge ()
}
/// Set an entry
let set key value options (_ : CancellationToken) = backgroundTask {
let set key value options (_: CancellationToken) = backgroundTask {
do! saveEntry options key value
do! purge ()
}
interface IDistributedCache with
member _.Get key = get key CancellationToken.None |> sync
member _.GetAsync (key, token) = get key token
member _.GetAsync(key, token) = get key token
member _.Refresh key = refresh key CancellationToken.None |> sync
member _.RefreshAsync (key, token) = refresh key token
member _.RefreshAsync(key, token) = refresh key token
member _.Remove key = remove key CancellationToken.None |> sync
member _.RemoveAsync (key, token) = remove key token
member _.Set (key, value, options) = set key value options CancellationToken.None |> sync
member _.SetAsync (key, value, options, token) = set key value options token
member _.RemoveAsync(key, token) = remove key token
member _.Set(key, value, options) = set key value options CancellationToken.None |> sync
member _.SetAsync(key, value, options, token) = set key value options token

View File

@ -1,13 +1,14 @@
namespace MyWebLog.Data.Postgres
open BitBadger.Npgsql.FSharp.Documents
open BitBadger.Documents
open BitBadger.Documents.Postgres
open Microsoft.Extensions.Logging
open MyWebLog
open MyWebLog.Data
open Npgsql.FSharp
/// PostgreSQL myWebLog category data implementation
type PostgresCategoryData (log : ILogger) =
type PostgresCategoryData(log: ILogger) =
/// Count all categories for the given web log
let countAll webLogId =
@ -17,14 +18,20 @@ type PostgresCategoryData (log : ILogger) =
/// Count all top-level categories for the given web log
let countTopLevel webLogId =
log.LogTrace "Category.countTopLevel"
Count.byContains Table.Category {| webLogDoc webLogId with ParentId = None |}
Custom.scalar
$"""{Query.Count.byContains Table.Category}
AND {Query.whereByField (Field.NEX (nameof Category.Empty.ParentId)) ""}"""
[ webLogContains webLogId ]
toCount
/// Retrieve all categories for the given web log in a DotLiquid-friendly format
let findAllForView webLogId = backgroundTask {
log.LogTrace "Category.findAllForView"
let! cats =
Custom.list $"{selectWithCriteria Table.Category} ORDER BY LOWER(data ->> '{nameof Category.empty.Name}')"
[ webLogContains webLogId ] fromData<Category>
Custom.list
$"{selectWithCriteria Table.Category} ORDER BY LOWER(data ->> '{nameof Category.Empty.Name}')"
[ webLogContains webLogId ]
fromData<Category>
let ordered = Utils.orderByHierarchy cats None None []
let counts =
ordered
@ -33,20 +40,18 @@ type PostgresCategoryData (log : ILogger) =
let catIdSql, catIdParams =
ordered
|> Seq.filter (fun cat -> cat.ParentNames |> Array.contains it.Name)
|> Seq.map (fun cat -> cat.Id)
|> Seq.map _.Id
|> Seq.append (Seq.singleton it.Id)
|> List.ofSeq
|> arrayContains (nameof Post.empty.CategoryIds) id
|> arrayContains (nameof Post.Empty.CategoryIds) id
let postCount =
Custom.scalar
$"""SELECT COUNT(DISTINCT id) AS {countName}
$"""SELECT COUNT(DISTINCT data ->> '{nameof Post.Empty.Id}') AS it
FROM {Table.Post}
WHERE {Query.whereDataContains "@criteria"}
AND {catIdSql}"""
[ "@criteria",
Query.jsonbDocParam {| webLogDoc webLogId with Status = PostStatus.toString Published |}
catIdParams
] Map.toCount
[ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |}; catIdParams ]
toCount
|> Async.AwaitTask
|> Async.RunSynchronously
it.Id, postCount)
@ -58,71 +63,72 @@ type PostgresCategoryData (log : ILogger) =
PostCount = counts
|> List.tryFind (fun c -> fst c = cat.Id)
|> Option.map snd
|> Option.defaultValue 0
})
|> Option.defaultValue 0 })
|> Array.ofSeq
}
/// Find a category by its ID for the given web log
let findById catId webLogId =
log.LogTrace "Category.findById"
Document.findByIdAndWebLog<CategoryId, Category> Table.Category catId CategoryId.toString webLogId
Document.findByIdAndWebLog<CategoryId, Category> Table.Category catId webLogId
/// Find all categories for the given web log
let findByWebLog webLogId =
log.LogTrace "Category.findByWebLog"
Document.findByWebLog<Category> Table.Category webLogId
/// Create parameters for a category insert / update
let catParameters (cat : Category) =
Query.docParameters (CategoryId.toString cat.Id) cat
/// Delete a category
let delete catId webLogId = backgroundTask {
log.LogTrace "Category.delete"
match! findById catId webLogId with
| Some cat ->
// Reassign any children to the category's parent category
let! children = Find.byContains<Category> Table.Category {| ParentId = CategoryId.toString catId |}
let! children = Find.byContains<Category> Table.Category {| ParentId = catId |}
let hasChildren = not (List.isEmpty children)
if hasChildren then
let childQuery, childParams =
if cat.ParentId.IsSome then
Query.Patch.byId Table.Category,
children
|> List.map (fun child -> [ idParam child.Id; jsonParam "@data" {| ParentId = cat.ParentId |} ])
else
Query.RemoveFields.byId Table.Category,
children
|> List.map (fun child ->
[ idParam child.Id; fieldNameParam [ nameof Category.Empty.ParentId ] ])
let! _ =
Configuration.dataSource ()
|> Sql.fromDataSource
|> Sql.executeTransactionAsync [
Query.Update.partialById Table.Category,
children |> List.map (fun child -> [
"@id", Sql.string (CategoryId.toString child.Id)
"@data", Query.jsonbDocParam {| ParentId = cat.ParentId |}
])
]
|> Sql.executeTransactionAsync [ childQuery, childParams ]
()
// Delete the category off all posts where it is assigned
let! posts =
Custom.list $"SELECT data FROM {Table.Post} WHERE data -> '{nameof Post.empty.CategoryIds}' @> @id"
[ "@id", Query.jsonbDocParam [| CategoryId.toString catId |] ] fromData<Post>
Custom.list
$"SELECT data FROM {Table.Post} WHERE data -> '{nameof Post.Empty.CategoryIds}' @> @id"
[ jsonParam "@id" [| string catId |] ]
fromData<Post>
if not (List.isEmpty posts) then
let! _ =
Configuration.dataSource ()
|> Sql.fromDataSource
|> Sql.executeTransactionAsync [
Query.Update.partialById Table.Post,
posts |> List.map (fun post -> [
"@id", Sql.string (PostId.toString post.Id)
"@data", Query.jsonbDocParam
{| CategoryIds = post.CategoryIds |> List.filter (fun cat -> cat <> catId) |}
])
]
|> Sql.executeTransactionAsync
[ Query.Patch.byId Table.Post,
posts
|> List.map (fun post ->
[ idParam post.Id
jsonParam
"@data"
{| CategoryIds = post.CategoryIds |> List.filter (fun cat -> cat <> catId) |} ]) ]
()
// Delete the category itself
do! Delete.byId Table.Category (CategoryId.toString catId)
do! Delete.byId Table.Category catId
return if hasChildren then ReassignedChildCategories else CategoryDeleted
| None -> return CategoryNotFound
}
/// Save a category
let save (cat : Category) = backgroundTask {
let save (cat: Category) = backgroundTask {
log.LogTrace "Category.save"
do! save Table.Category (CategoryId.toString cat.Id) cat
do! save Table.Category cat
}
/// Restore categories from a backup
@ -132,7 +138,7 @@ type PostgresCategoryData (log : ILogger) =
Configuration.dataSource ()
|> Sql.fromDataSource
|> Sql.executeTransactionAsync [
Query.insert Table.Category, cats |> List.map catParameters
Query.insert Table.Category, cats |> List.map (fun c -> [ jsonParam "@data" c ])
]
()
}

View File

@ -61,7 +61,8 @@ module Table =
open System
open System.Threading.Tasks
open BitBadger.Npgsql.FSharp.Documents
open BitBadger.Documents
open BitBadger.Documents.Postgres
open MyWebLog
open MyWebLog.Data
open NodaTime
@ -69,29 +70,23 @@ open Npgsql
open Npgsql.FSharp
/// Create a SQL parameter for the web log ID
let webLogIdParam webLogId =
"@webLogId", Sql.string (WebLogId.toString webLogId)
let webLogIdParam (webLogId: WebLogId) =
"@webLogId", Sql.string (string webLogId)
/// Create an anonymous record with the given web log ID
let webLogDoc (webLogId : WebLogId) =
let webLogDoc (webLogId: WebLogId) =
{| WebLogId = webLogId |}
/// Create a parameter for a web log document-contains query
let webLogContains webLogId =
"@criteria", Query.jsonbDocParam (webLogDoc webLogId)
/// The name of the field to select to be able to use Map.toCount
let countName = "the_count"
/// The name of the field to select to be able to use Map.toExists
let existsName = "does_exist"
jsonParam "@criteria" (webLogDoc webLogId)
/// A SQL string to select data from a table with the given JSON document contains criteria
let selectWithCriteria tableName =
$"""{Query.selectFromTable tableName} WHERE {Query.whereDataContains "@criteria"}"""
/// Create the SQL and parameters for an IN clause
let inClause<'T> colNameAndPrefix paramName (valueFunc: 'T -> string) (items : 'T list) =
let inClause<'T> colNameAndPrefix paramName (items: 'T list) =
if List.isEmpty items then "", []
else
let mutable idx = 0
@ -99,114 +94,109 @@ let inClause<'T> colNameAndPrefix paramName (valueFunc: 'T -> string) (items : '
|> List.skip 1
|> List.fold (fun (itemS, itemP) it ->
idx <- idx + 1
$"{itemS}, @%s{paramName}{idx}", ($"@%s{paramName}{idx}", Sql.string (valueFunc it)) :: itemP)
$"{itemS}, @%s{paramName}{idx}", ($"@%s{paramName}{idx}", Sql.string (string it)) :: itemP)
(Seq.ofList items
|> Seq.map (fun it ->
$"%s{colNameAndPrefix} IN (@%s{paramName}0", [ $"@%s{paramName}0", Sql.string (valueFunc it) ])
$"%s{colNameAndPrefix} IN (@%s{paramName}0", [ $"@%s{paramName}0", Sql.string (string it) ])
|> Seq.head)
|> function sql, ps -> $"{sql})", ps
/// Create the SQL and parameters for match-any array query
let arrayContains<'T> name (valueFunc : 'T -> string) (items : 'T list) =
let arrayContains<'T> name (valueFunc: 'T -> string) (items: 'T list) =
$"data['{name}'] ?| @{name}Values",
($"@{name}Values", Sql.stringArray (items |> List.map valueFunc |> Array.ofList))
/// Get the first result of the given query
let tryHead<'T> (query : Task<'T list>) = backgroundTask {
let tryHead<'T> (query: Task<'T list>) = backgroundTask {
let! results = query
return List.tryHead results
}
/// Create a parameter for a non-standard type
let typedParam<'T> name (it : 'T) =
$"@%s{name}", Sql.parameter (NpgsqlParameter ($"@{name}", it))
let typedParam<'T> name (it: 'T) =
$"@%s{name}", Sql.parameter (NpgsqlParameter($"@{name}", it))
/// Create a parameter for a possibly-missing non-standard type
let optParam<'T> name (it : 'T option) =
let p = NpgsqlParameter ($"@%s{name}", if Option.isSome it then box it.Value else DBNull.Value)
let optParam<'T> name (it: 'T option) =
let p = NpgsqlParameter($"@%s{name}", if Option.isSome it then box it.Value else DBNull.Value)
p.ParameterName, Sql.parameter p
/// Mapping functions for SQL queries
module Map =
/// Get a count from a row
let toCount (row : RowReader) =
row.int countName
/// Get a true/false value as to whether an item exists
let toExists (row : RowReader) =
row.bool existsName
/// Create a permalink from the current row
let toPermalink (row : RowReader) =
let toPermalink (row: RowReader) =
Permalink (row.string "permalink")
/// Create a revision from the current row
let toRevision (row : RowReader) : Revision =
{ AsOf = row.fieldValue<Instant> "as_of"
Text = row.string "revision_text" |> MarkupText.parse
}
let toRevision (row: RowReader) : Revision =
{ AsOf = row.fieldValue<Instant> "as_of"
Text = row.string "revision_text" |> MarkupText.Parse }
/// Create a theme asset from the current row
let toThemeAsset includeData (row : RowReader) : ThemeAsset =
{ Id = ThemeAssetId (ThemeId (row.string "theme_id"), row.string "path")
UpdatedOn = row.fieldValue<Instant> "updated_on"
Data = if includeData then row.bytea "data" else [||]
}
let toThemeAsset includeData (row: RowReader) : ThemeAsset =
{ Id = ThemeAssetId (ThemeId (row.string "theme_id"), row.string "path")
UpdatedOn = row.fieldValue<Instant> "updated_on"
Data = if includeData then row.bytea "data" else [||] }
/// Create an uploaded file from the current row
let toUpload includeData (row : RowReader) : Upload =
{ Id = row.string "id" |> UploadId
WebLogId = row.string "web_log_id" |> WebLogId
Path = row.string "path" |> Permalink
UpdatedOn = row.fieldValue<Instant> "updated_on"
Data = if includeData then row.bytea "data" else [||]
}
let toUpload includeData (row: RowReader) : Upload =
{ Id = row.string "id" |> UploadId
WebLogId = row.string "web_log_id" |> WebLogId
Path = row.string "path" |> Permalink
UpdatedOn = row.fieldValue<Instant> "updated_on"
Data = if includeData then row.bytea "data" else [||] }
/// Document manipulation functions
module Document =
/// Determine whether a document exists with the given key for the given web log
let existsByWebLog<'TKey> table (key : 'TKey) (keyFunc : 'TKey -> string) webLogId =
let existsByWebLog<'TKey> table (key: 'TKey) webLogId =
Custom.scalar
$""" SELECT EXISTS (
SELECT 1 FROM %s{table} WHERE id = @id AND {Query.whereDataContains "@criteria"}
) AS {existsName}"""
[ "@id", Sql.string (keyFunc key); webLogContains webLogId ] Map.toExists
$"""SELECT EXISTS (
SELECT 1 FROM %s{table} WHERE {Query.whereById "@id"} AND {Query.whereDataContains "@criteria"}
) AS it"""
[ "@id", Sql.string (string key); webLogContains webLogId ]
toExists
/// Find a document by its ID for the given web log
let findByIdAndWebLog<'TKey, 'TDoc> table (key : 'TKey) (keyFunc : 'TKey -> string) webLogId =
Custom.single $"""{Query.selectFromTable table} WHERE id = @id AND {Query.whereDataContains "@criteria"}"""
[ "@id", Sql.string (keyFunc key); webLogContains webLogId ] fromData<'TDoc>
let findByIdAndWebLog<'TKey, 'TDoc> table (key: 'TKey) webLogId =
Custom.single
$"""{Query.selectFromTable table} WHERE {Query.whereById "@id"} AND {Query.whereDataContains "@criteria"}"""
[ "@id", Sql.string (string key); webLogContains webLogId ]
fromData<'TDoc>
/// Find a document by its ID for the given web log
/// Find documents for the given web log
let findByWebLog<'TDoc> table webLogId : Task<'TDoc list> =
Find.byContains table (webLogDoc webLogId)
/// Functions to support revisions
module Revisions =
/// Find all revisions for the given entity
let findByEntityId<'TKey> revTable entityTable (key : 'TKey) (keyFunc : 'TKey -> string) =
Custom.list $"SELECT as_of, revision_text FROM %s{revTable} WHERE %s{entityTable}_id = @id ORDER BY as_of DESC"
[ "@id", Sql.string (keyFunc key) ] Map.toRevision
let findByEntityId<'TKey> revTable entityTable (key: 'TKey) =
Custom.list
$"SELECT as_of, revision_text FROM %s{revTable} WHERE %s{entityTable}_id = @id ORDER BY as_of DESC"
[ "@id", Sql.string (string key) ]
Map.toRevision
/// Find all revisions for all posts for the given web log
let findByWebLog<'TKey> revTable entityTable (keyFunc : string -> 'TKey) webLogId =
let findByWebLog<'TKey> revTable entityTable (keyFunc: string -> 'TKey) webLogId =
Custom.list
$"""SELECT pr.*
FROM %s{revTable} pr