diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..92a79d7 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,99 @@ +name: Continuous Integration +on: + push: + branches: + - main + pull_request: + branches: + - main + workflow_dispatch: +env: + MWL_TEST_RETHINK_URI: rethinkdb://localhost/mwl_test +jobs: + build_and_test: + name: Build / Test + runs-on: ubuntu-latest + + strategy: + matrix: + dotnet-version: + - "6.0" + - "7.0" + - "8.0" + + services: + postgres: + image: postgres:latest + env: + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + rethink: + image: rethinkdb:latest + ports: + - 28015:28015 + + steps: + - name: Check Out Code + uses: actions/checkout@v4 + - name: Setup .NET Core SDK + uses: actions/setup-dotnet@v4.0.0 + with: + dotnet-version: 8.x + - name: Restore dependencies + run: dotnet restore src/MyWebLog.sln + - name: Build (${{ matrix.dotnet-version }}) + run: dotnet build src/MyWebLog.sln -f net${{ matrix.dotnet-version }} + - name: Test (${{ matrix.dotnet-version }}) + run: cd src/MyWebLog.Tests; dotnet run -f net${{ matrix.dotnet-version }} + + publish: + name: Publish Packages + runs-on: ubuntu-latest + needs: build_and_test + + strategy: + matrix: + ver: + - "net6.0" + - "net7.0" + - "net8.0" + os: + - "linux-x64" + - "win-x64" + include: + - os: "linux-x64" + bz2: true + - os: "win-x64" + zip: true + steps: + - name: Check Out Code + uses: actions/checkout@v4 + - name: Setup .NET Core SDK + uses: actions/setup-dotnet@v4.0.0 + with: + dotnet-version: 8.x + - name: Publish (Release) + run: dotnet publish -c Release -f ${{ matrix.ver }} -r ${{ matrix.os }} src/MyWebLog/MyWebLog.fsproj + - name: Zip Admin Theme + run: cd src/admin-theme; zip -r ../MyWebLog/bin/Release/${{ matrix.ver }}/${{ matrix.os }}/publish/admin-theme.zip *; cd ../.. + - name: Zip Default Theme + run: cd src/default-theme; zip -r ../MyWebLog/bin/Release/${{ matrix.ver }}/${{ matrix.os }}/publish/default-theme.zip *; cd ../.. + - if: ${{ matrix.bz2 }} + name: Create .tar.bz2 Archive + run: tar cfj myWebLog-${{ matrix.ver }}-${{ matrix.os }}.tar.bz2 -C src/MyWebLog/bin/Release/${{ matrix.ver }}/${{ matrix.os }}/publish . + - if: ${{ matrix.zip }} + name: Create .zip Archive + run: cd src/MyWebLog/bin/Release/${{ matrix.ver }}/${{ matrix.os }}/publish; zip -r myWebLog-${{ matrix.ver }}-${{ matrix.os }}.zip *; cp myWeb*.zip ../../../../../../..; cd ../../../../../../.. + - name: Upload Artifacts + uses: actions/upload-artifact@v4 + with: + name: package-${{ matrix.ver }}-${{ matrix.os }} + path: | + *x64.zip + *.bz2 diff --git a/.gitignore b/.gitignore index 170e429..1f86ad5 100644 --- a/.gitignore +++ b/.gitignore @@ -261,7 +261,8 @@ src/MyWebLog/wwwroot/img/daniel-j-summers src/MyWebLog/wwwroot/img/bit-badger .ionide +.vscode src/MyWebLog/appsettings.Production.json # SQLite database files -src/MyWebLog/*.db* +src/MyWebLog/data/*.db* diff --git a/build.fs b/build.fs index e841ab6..9e17e7f 100644 --- a/build.fs +++ b/build.fs @@ -33,7 +33,7 @@ let zipTheme (name : string) (_ : TargetParameter) = |> Zip.zipSpec $"{releasePath}/{name}-theme.zip" /// Frameworks supported by this build -let frameworks = [ "net6.0"; "net7.0" ] +let frameworks = [ "net6.0"; "net7.0"; "net8.0" ] /// Publish the project for the given runtime ID let publishFor rid (_ : TargetParameter) = diff --git a/build.fsproj b/build.fsproj index 449dd30..5821d28 100644 --- a/build.fsproj +++ b/build.fsproj @@ -2,7 +2,7 @@ Exe - net7.0 + net8.0 @@ -10,11 +10,11 @@ - - - - - + + + + + diff --git a/src/.dockerignore b/src/.dockerignore new file mode 100644 index 0000000..147d38c --- /dev/null +++ b/src/.dockerignore @@ -0,0 +1,4 @@ +**/bin +**/obj +**/*.db +**/appsettings.*.json diff --git a/src/Directory.Build.props b/src/Directory.Build.props index 5529e72..548e37b 100644 --- a/src/Directory.Build.props +++ b/src/Directory.Build.props @@ -1,9 +1,9 @@ - net6.0;net7.0 + net6.0;net7.0;net8.0 embedded - 2.0.0.0 - 2.0.0.0 - 2.0.0 + 2.1.0.0 + 2.1.0.0 + 2.1.0 diff --git a/src/Dockerfile b/src/Dockerfile new file mode 100644 index 0000000..89387d5 --- /dev/null +++ b/src/Dockerfile @@ -0,0 +1,33 @@ +FROM mcr.microsoft.com/dotnet/sdk:8.0-alpine AS build +WORKDIR /mwl +COPY ./MyWebLog.sln ./ +COPY ./Directory.Build.props ./ +COPY ./MyWebLog/MyWebLog.fsproj ./MyWebLog/ +COPY ./MyWebLog.Data/MyWebLog.Data.fsproj ./MyWebLog.Data/ +COPY ./MyWebLog.Domain/MyWebLog.Domain.fsproj ./MyWebLog.Domain/ +COPY ./MyWebLog.Tests/MyWebLog.Tests.fsproj ./MyWebLog.Tests/ +RUN dotnet restore + +COPY . ./ +WORKDIR /mwl/MyWebLog +RUN dotnet publish -f net8.0 -c Release -r linux-x64 --no-self-contained -p:PublishSingleFile=false + +FROM alpine AS theme +RUN apk add --no-cache zip +WORKDIR /themes/default-theme +COPY ./default-theme ./ +RUN zip -r ../default-theme.zip ./* +WORKDIR /themes/admin-theme +COPY ./admin-theme ./ +RUN zip -r ../admin-theme.zip ./* + +FROM mcr.microsoft.com/dotnet/aspnet:8.0-alpine as final +WORKDIR /app +RUN apk add --no-cache icu-libs +ENV DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=false +COPY --from=build /mwl/MyWebLog/bin/Release/net8.0/linux-x64/publish/ ./ +COPY --from=theme /themes/*.zip /app/ +RUN mkdir themes + +EXPOSE 80 +CMD [ "dotnet", "/app/MyWebLog.dll" ] diff --git a/src/MyWebLog.Data/Converters.fs b/src/MyWebLog.Data/Converters.fs index 52a132c..ad14d54 100644 --- a/src/MyWebLog.Data/Converters.fs +++ b/src/MyWebLog.Data/Converters.fs @@ -9,116 +9,123 @@ module Json = open Newtonsoft.Json - type CategoryIdConverter () = - inherit JsonConverter () - override _.WriteJson (writer : JsonWriter, value : CategoryId, _ : JsonSerializer) = - writer.WriteValue (CategoryId.toString value) - override _.ReadJson (reader : JsonReader, _ : Type, _ : CategoryId, _ : bool, _ : JsonSerializer) = + type CategoryIdConverter() = + inherit JsonConverter() + override _.WriteJson(writer: JsonWriter, value: CategoryId, _: JsonSerializer) = + writer.WriteValue(string value) + override _.ReadJson(reader: JsonReader, _: Type, _: CategoryId, _: bool, _: JsonSerializer) = (string >> CategoryId) reader.Value - type CommentIdConverter () = - inherit JsonConverter () - override _.WriteJson (writer : JsonWriter, value : CommentId, _ : JsonSerializer) = - writer.WriteValue (CommentId.toString value) - override _.ReadJson (reader : JsonReader, _ : Type, _ : CommentId, _ : bool, _ : JsonSerializer) = + type CommentIdConverter() = + inherit JsonConverter() + override _.WriteJson(writer: JsonWriter, value: CommentId, _: JsonSerializer) = + writer.WriteValue(string value) + override _.ReadJson(reader: JsonReader, _: Type, _: CommentId, _: bool, _: JsonSerializer) = (string >> CommentId) reader.Value - type CustomFeedIdConverter () = - inherit JsonConverter () - override _.WriteJson (writer : JsonWriter, value : CustomFeedId, _ : JsonSerializer) = - writer.WriteValue (CustomFeedId.toString value) - override _.ReadJson (reader : JsonReader, _ : Type, _ : CustomFeedId, _ : bool, _ : JsonSerializer) = + type CommentStatusConverter() = + inherit JsonConverter() + override _.WriteJson(writer: JsonWriter, value: CommentStatus, _: JsonSerializer) = + writer.WriteValue(string value) + override _.ReadJson(reader: JsonReader, _: Type, _: CommentStatus, _: bool, _: JsonSerializer) = + (string >> CommentStatus.Parse) reader.Value + + type CustomFeedIdConverter() = + inherit JsonConverter() + override _.WriteJson(writer: JsonWriter, value: CustomFeedId, _: JsonSerializer) = + writer.WriteValue(string value) + override _.ReadJson(reader: JsonReader, _: Type, _: CustomFeedId, _: bool, _: JsonSerializer) = (string >> CustomFeedId) reader.Value - type CustomFeedSourceConverter () = - inherit JsonConverter () - override _.WriteJson (writer : JsonWriter, value : CustomFeedSource, _ : JsonSerializer) = - writer.WriteValue (CustomFeedSource.toString value) - override _.ReadJson (reader : JsonReader, _ : Type, _ : CustomFeedSource, _ : bool, _ : JsonSerializer) = - (string >> CustomFeedSource.parse) reader.Value + type CustomFeedSourceConverter() = + inherit JsonConverter() + override _.WriteJson(writer: JsonWriter, value: CustomFeedSource, _: JsonSerializer) = + writer.WriteValue(string value) + override _.ReadJson(reader: JsonReader, _: Type, _: CustomFeedSource, _: bool, _: JsonSerializer) = + (string >> CustomFeedSource.Parse) reader.Value - type ExplicitRatingConverter () = - inherit JsonConverter () - override _.WriteJson (writer : JsonWriter, value : ExplicitRating, _ : JsonSerializer) = - writer.WriteValue (ExplicitRating.toString value) - override _.ReadJson (reader : JsonReader, _ : Type, _ : ExplicitRating, _ : bool, _ : JsonSerializer) = - (string >> ExplicitRating.parse) reader.Value + type ExplicitRatingConverter() = + inherit JsonConverter() + override _.WriteJson(writer: JsonWriter, value: ExplicitRating, _: JsonSerializer) = + writer.WriteValue(string value) + override _.ReadJson(reader: JsonReader, _: Type, _: ExplicitRating, _: bool, _: JsonSerializer) = + (string >> ExplicitRating.Parse) reader.Value - type MarkupTextConverter () = - inherit JsonConverter () - override _.WriteJson (writer : JsonWriter, value : MarkupText, _ : JsonSerializer) = - writer.WriteValue (MarkupText.toString value) - override _.ReadJson (reader : JsonReader, _ : Type, _ : MarkupText, _ : bool, _ : JsonSerializer) = - (string >> MarkupText.parse) reader.Value + type MarkupTextConverter() = + inherit JsonConverter() + override _.WriteJson(writer: JsonWriter, value: MarkupText, _: JsonSerializer) = + writer.WriteValue(string value) + override _.ReadJson(reader: JsonReader, _: Type, _: MarkupText, _: bool, _: JsonSerializer) = + (string >> MarkupText.Parse) reader.Value - type PermalinkConverter () = - inherit JsonConverter () - override _.WriteJson (writer : JsonWriter, value : Permalink, _ : JsonSerializer) = - writer.WriteValue (Permalink.toString value) - override _.ReadJson (reader : JsonReader, _ : Type, _ : Permalink, _ : bool, _ : JsonSerializer) = + type PermalinkConverter() = + inherit JsonConverter() + override _.WriteJson(writer: JsonWriter, value: Permalink, _: JsonSerializer) = + writer.WriteValue(string value) + override _.ReadJson(reader: JsonReader, _: Type, _: Permalink, _: bool, _: JsonSerializer) = (string >> Permalink) reader.Value - type PageIdConverter () = - inherit JsonConverter () - override _.WriteJson (writer : JsonWriter, value : PageId, _ : JsonSerializer) = - writer.WriteValue (PageId.toString value) - override _.ReadJson (reader : JsonReader, _ : Type, _ : PageId, _ : bool, _ : JsonSerializer) = + type PageIdConverter() = + inherit JsonConverter() + override _.WriteJson(writer: JsonWriter, value: PageId, _: JsonSerializer) = + writer.WriteValue(string value) + override _.ReadJson(reader: JsonReader, _: Type, _: PageId, _: bool, _: JsonSerializer) = (string >> PageId) reader.Value - type PodcastMediumConverter () = - inherit JsonConverter () - override _.WriteJson (writer : JsonWriter, value : PodcastMedium, _ : JsonSerializer) = - writer.WriteValue (PodcastMedium.toString value) - override _.ReadJson (reader : JsonReader, _ : Type, _ : PodcastMedium, _ : bool, _ : JsonSerializer) = - (string >> PodcastMedium.parse) reader.Value + type PodcastMediumConverter() = + inherit JsonConverter() + override _.WriteJson(writer: JsonWriter, value: PodcastMedium, _: JsonSerializer) = + writer.WriteValue(string value) + override _.ReadJson(reader: JsonReader, _: Type, _: PodcastMedium, _: bool, _: JsonSerializer) = + (string >> PodcastMedium.Parse) reader.Value - type PostIdConverter () = - inherit JsonConverter () - override _.WriteJson (writer : JsonWriter, value : PostId, _ : JsonSerializer) = - writer.WriteValue (PostId.toString value) - override _.ReadJson (reader : JsonReader, _ : Type, _ : PostId, _ : bool, _ : JsonSerializer) = + type PostIdConverter() = + inherit JsonConverter() + override _.WriteJson(writer: JsonWriter, value: PostId, _: JsonSerializer) = + writer.WriteValue(string value) + override _.ReadJson(reader: JsonReader, _: Type, _: PostId, _: bool, _: JsonSerializer) = (string >> PostId) reader.Value - type TagMapIdConverter () = - inherit JsonConverter () - override _.WriteJson (writer : JsonWriter, value : TagMapId, _ : JsonSerializer) = - writer.WriteValue (TagMapId.toString value) - override _.ReadJson (reader : JsonReader, _ : Type, _ : TagMapId, _ : bool, _ : JsonSerializer) = + type TagMapIdConverter() = + inherit JsonConverter() + override _.WriteJson(writer: JsonWriter, value: TagMapId, _: JsonSerializer) = + writer.WriteValue(string value) + override _.ReadJson(reader: JsonReader, _: Type, _: TagMapId, _: bool, _: JsonSerializer) = (string >> TagMapId) reader.Value - type ThemeAssetIdConverter () = - inherit JsonConverter () - override _.WriteJson (writer : JsonWriter, value : ThemeAssetId, _ : JsonSerializer) = - writer.WriteValue (ThemeAssetId.toString value) - override _.ReadJson (reader : JsonReader, _ : Type, _ : ThemeAssetId, _ : bool, _ : JsonSerializer) = - (string >> ThemeAssetId.ofString) reader.Value + type ThemeAssetIdConverter() = + inherit JsonConverter() + override _.WriteJson(writer: JsonWriter, value: ThemeAssetId, _: JsonSerializer) = + writer.WriteValue(string value) + override _.ReadJson(reader: JsonReader, _: Type, _: ThemeAssetId, _: bool, _: JsonSerializer) = + (string >> ThemeAssetId.Parse) reader.Value - type ThemeIdConverter () = - inherit JsonConverter () - override _.WriteJson (writer : JsonWriter, value : ThemeId, _ : JsonSerializer) = - writer.WriteValue (ThemeId.toString value) - override _.ReadJson (reader : JsonReader, _ : Type, _ : ThemeId, _ : bool, _ : JsonSerializer) = + type ThemeIdConverter() = + inherit JsonConverter() + override _.WriteJson(writer: JsonWriter, value: ThemeId, _: JsonSerializer) = + writer.WriteValue(string value) + override _.ReadJson(reader: JsonReader, _: Type, _: ThemeId, _: bool, _: JsonSerializer) = (string >> ThemeId) reader.Value - type UploadIdConverter () = - inherit JsonConverter () - override _.WriteJson (writer : JsonWriter, value : UploadId, _ : JsonSerializer) = - writer.WriteValue (UploadId.toString value) - override _.ReadJson (reader : JsonReader, _ : Type, _ : UploadId, _ : bool, _ : JsonSerializer) = + type UploadIdConverter() = + inherit JsonConverter() + override _.WriteJson(writer: JsonWriter, value: UploadId, _: JsonSerializer) = + writer.WriteValue(string value) + override _.ReadJson(reader: JsonReader, _: Type, _: UploadId, _: bool, _: JsonSerializer) = (string >> UploadId) reader.Value - type WebLogIdConverter () = - inherit JsonConverter () - override _.WriteJson (writer : JsonWriter, value : WebLogId, _ : JsonSerializer) = - writer.WriteValue (WebLogId.toString value) - override _.ReadJson (reader : JsonReader, _ : Type, _ : WebLogId, _ : bool, _ : JsonSerializer) = + type WebLogIdConverter() = + inherit JsonConverter() + override _.WriteJson(writer: JsonWriter, value: WebLogId, _: JsonSerializer) = + writer.WriteValue(string value) + override _.ReadJson(reader: JsonReader, _: Type, _: WebLogId, _: bool, _: JsonSerializer) = (string >> WebLogId) reader.Value - type WebLogUserIdConverter () = + type WebLogUserIdConverter() = inherit JsonConverter () - override _.WriteJson (writer : JsonWriter, value : WebLogUserId, _ : JsonSerializer) = - writer.WriteValue (WebLogUserId.toString value) - override _.ReadJson (reader : JsonReader, _ : Type, _ : WebLogUserId, _ : bool, _ : JsonSerializer) = + override _.WriteJson(writer: JsonWriter, value: WebLogUserId, _: JsonSerializer) = + writer.WriteValue(string value) + override _.ReadJson(reader: JsonReader, _: Type, _: WebLogUserId, _: bool, _: JsonSerializer) = (string >> WebLogUserId) reader.Value open Microsoft.FSharpLu.Json @@ -128,27 +135,28 @@ module Json = /// Configure a serializer to use these converters let configure (ser : JsonSerializer) = // Our converters - [ CategoryIdConverter () :> JsonConverter - CommentIdConverter () - CustomFeedIdConverter () - CustomFeedSourceConverter () - ExplicitRatingConverter () - MarkupTextConverter () - PermalinkConverter () - PageIdConverter () - PodcastMediumConverter () - PostIdConverter () - TagMapIdConverter () - ThemeAssetIdConverter () - ThemeIdConverter () - UploadIdConverter () - WebLogIdConverter () - WebLogUserIdConverter () - ] |> List.iter ser.Converters.Add + [ CategoryIdConverter() :> JsonConverter + CommentIdConverter() + CommentStatusConverter() + CustomFeedIdConverter() + CustomFeedSourceConverter() + ExplicitRatingConverter() + MarkupTextConverter() + PermalinkConverter() + PageIdConverter() + PodcastMediumConverter() + PostIdConverter() + TagMapIdConverter() + ThemeAssetIdConverter() + ThemeIdConverter() + UploadIdConverter() + WebLogIdConverter() + WebLogUserIdConverter() ] + |> List.iter ser.Converters.Add // NodaTime let _ = ser.ConfigureForNodaTime DateTimeZoneProviders.Tzdb // Handles DUs with no associated data, as well as option fields - ser.Converters.Add (CompactUnionJsonConverter ()) + ser.Converters.Add(CompactUnionJsonConverter()) ser.NullValueHandling <- NullValueHandling.Ignore ser.MissingMemberHandling <- MissingMemberHandling.Ignore ser diff --git a/src/MyWebLog.Data/Interfaces.fs b/src/MyWebLog.Data/Interfaces.fs index f064cc4..6fd9207 100644 --- a/src/MyWebLog.Data/Interfaces.fs +++ b/src/MyWebLog.Data/Interfaces.fs @@ -7,6 +7,7 @@ open Newtonsoft.Json open NodaTime /// The result of a category deletion attempt +[] type CategoryDeleteResult = /// The category was deleted successfully | CategoryDeleted @@ -32,7 +33,7 @@ type ICategoryData = abstract member Delete : CategoryId -> WebLogId -> Task /// Find all categories for a web log, sorted alphabetically and grouped by hierarchy - abstract member FindAllForView : WebLogId -> Task + abstract member FindAllForView : WebLogId -> Task /// Find a category by its ID abstract member FindById : CategoryId -> WebLogId -> Task @@ -53,7 +54,7 @@ type IPageData = /// Add a page abstract member Add : Page -> Task - /// Get all pages for the web log (excluding meta items, text, revisions, and prior permalinks) + /// Get all pages for the web log (excluding text, metadata, revisions, and prior permalinks) abstract member All : WebLogId -> Task /// Count all pages for the given web log @@ -84,7 +85,7 @@ type IPageData = abstract member FindListed : WebLogId -> Task /// Find a page of pages (displayed in admin section) (excluding meta items, revisions and prior permalinks) - abstract member FindPageOfPages : WebLogId -> pageNbr : int -> Task + abstract member FindPageOfPages : WebLogId -> pageNbr: int -> Task /// Restore pages from a backup abstract member Restore : Page list -> Task @@ -125,20 +126,20 @@ type IPostData = /// Find posts to be displayed on a category list page (excluding revisions and prior permalinks) abstract member FindPageOfCategorizedPosts : - WebLogId -> CategoryId list -> pageNbr : int -> postsPerPage : int -> Task + WebLogId -> CategoryId list -> pageNbr: int -> postsPerPage: int -> Task - /// Find posts to be displayed on an admin page (excluding revisions and prior permalinks) - abstract member FindPageOfPosts : WebLogId -> pageNbr : int -> postsPerPage : int -> Task + /// Find posts to be displayed on an admin page (excluding text, revisions, and prior permalinks) + abstract member FindPageOfPosts : WebLogId -> pageNbr: int -> postsPerPage: int -> Task /// Find posts to be displayed on a page (excluding revisions and prior permalinks) - abstract member FindPageOfPublishedPosts : WebLogId -> pageNbr : int -> postsPerPage : int -> Task + abstract member FindPageOfPublishedPosts : WebLogId -> pageNbr: int -> postsPerPage: int -> Task /// Find posts to be displayed on a tag list page (excluding revisions and prior permalinks) abstract member FindPageOfTaggedPosts : - WebLogId -> tag : string -> pageNbr : int -> postsPerPage : int -> Task + WebLogId -> tag : string -> pageNbr: int -> postsPerPage: int -> Task /// Find the next older and newer post for the given published date/time (excluding revisions and prior permalinks) - abstract member FindSurroundingPosts : WebLogId -> publishedOn : Instant -> Task + abstract member FindSurroundingPosts : WebLogId -> publishedOn: Instant -> Task /// Restore posts from a backup abstract member Restore : Post list -> Task @@ -259,6 +260,9 @@ type IWebLogData = /// Find a web log by its ID abstract member FindById : WebLogId -> Task + /// Update redirect rules for a web log + abstract member UpdateRedirectRules : WebLog -> Task + /// Update RSS options for a web log abstract member UpdateRssOptions : WebLog -> Task diff --git a/src/MyWebLog.Data/MyWebLog.Data.fsproj b/src/MyWebLog.Data/MyWebLog.Data.fsproj index 1f1cf76..a3d9a77 100644 --- a/src/MyWebLog.Data/MyWebLog.Data.fsproj +++ b/src/MyWebLog.Data/MyWebLog.Data.fsproj @@ -5,24 +5,25 @@ - - - - + + + + + - - - + + + - + - + @@ -42,7 +43,13 @@ - + + + + + + <_Parameter1>MyWebLog.Tests + diff --git a/src/MyWebLog.Data/Postgres/PostgresCache.fs b/src/MyWebLog.Data/Postgres/PostgresCache.fs index a7b0280..2a8b9c8 100644 --- a/src/MyWebLog.Data/Postgres/PostgresCache.fs +++ b/src/MyWebLog.Data/Postgres/PostgresCache.fs @@ -2,38 +2,37 @@ namespace MyWebLog.Data.Postgres open System.Threading open System.Threading.Tasks -open BitBadger.Npgsql.FSharp.Documents +open BitBadger.Documents.Postgres open Microsoft.Extensions.Caching.Distributed open NodaTime -open Npgsql.FSharp /// Helper types and functions for the cache [] module private Helpers = /// The cache entry - type Entry = - { /// The ID of the cache entry - Id : string - - /// The value to be cached - Payload : byte[] - - /// When this entry will expire - ExpireAt : Instant - - /// The duration by which the expiration should be pushed out when being refreshed - SlidingExpiration : Duration option - - /// The must-expire-by date/time for the cache entry - AbsoluteExpiration : Instant option - } + type Entry = { + /// The ID of the cache entry + Id: string + + /// The value to be cached + Payload: byte array + + /// When this entry will expire + ExpireAt: Instant + + /// The duration by which the expiration should be pushed out when being refreshed + SlidingExpiration: Duration option + + /// The must-expire-by date/time for the cache entry + AbsoluteExpiration: Instant option + } /// Run a task synchronously - let sync<'T> (it : Task<'T>) = it |> (Async.AwaitTask >> Async.RunSynchronously) + let sync<'T> (it: Task<'T>) = it |> (Async.AwaitTask >> Async.RunSynchronously) /// Get the current instant - let getNow () = SystemClock.Instance.GetCurrentInstant () + let getNow () = SystemClock.Instance.GetCurrentInstant() /// Create a parameter for the expire-at time let expireParam = @@ -49,9 +48,11 @@ type DistributedCache () = task { let! exists = Custom.scalar - $"SELECT EXISTS - (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = 'session') - AS {existsName}" [] Map.toExists + "SELECT EXISTS + (SELECT 1 FROM pg_tables WHERE schemaname = 'public' AND tablename = 'session') + AS it" + [] + toExists if not exists then do! Custom.nonQuery "CREATE TABLE session ( @@ -69,13 +70,15 @@ type DistributedCache () = let getEntry key = backgroundTask { let idParam = "@id", Sql.string key let! tryEntry = - Custom.single "SELECT * FROM session WHERE id = @id" [ idParam ] - (fun row -> - { Id = row.string "id" - Payload = row.bytea "payload" - ExpireAt = row.fieldValue "expire_at" - SlidingExpiration = row.fieldValueOrNone "sliding_expiration" - AbsoluteExpiration = row.fieldValueOrNone "absolute_expiration" }) + Custom.single + "SELECT * FROM session WHERE id = @id" + [ idParam ] + (fun row -> + { Id = row.string "id" + Payload = row.bytea "payload" + ExpireAt = row.fieldValue "expire_at" + SlidingExpiration = row.fieldValueOrNone "sliding_expiration" + AbsoluteExpiration = row.fieldValueOrNone "absolute_expiration" }) match tryEntry with | Some entry -> let now = getNow () @@ -88,8 +91,9 @@ type DistributedCache () = true, { entry with ExpireAt = absExp } else true, { entry with ExpireAt = now.Plus slideExp } if needsRefresh then - do! Custom.nonQuery "UPDATE session SET expire_at = @expireAt WHERE id = @id" - [ expireParam item.ExpireAt; idParam ] + do! Custom.nonQuery + "UPDATE session SET expire_at = @expireAt WHERE id = @id" + [ expireParam item.ExpireAt; idParam ] () return if item.ExpireAt > now then Some entry else None | None -> return None @@ -101,17 +105,17 @@ type DistributedCache () = /// Purge expired entries every 30 minutes let purge () = backgroundTask { let now = getNow () - if lastPurge.Plus (Duration.FromMinutes 30L) < now then + if lastPurge.Plus(Duration.FromMinutes 30L) < now then do! Custom.nonQuery "DELETE FROM session WHERE expire_at < @expireAt" [ expireParam now ] lastPurge <- now } /// Remove a cache entry let removeEntry key = - Delete.byId "session" key + Custom.nonQuery "DELETE FROM session WHERE id = @id" [ "@id", Sql.string key ] /// Save an entry - let saveEntry (opts : DistributedCacheEntryOptions) key payload = + let saveEntry (opts: DistributedCacheEntryOptions) key payload = let now = getNow () let expireAt, slideExp, absExp = if opts.SlidingExpiration.HasValue then @@ -121,7 +125,7 @@ type DistributedCache () = let exp = Instant.FromDateTimeOffset opts.AbsoluteExpiration.Value exp, None, Some exp elif opts.AbsoluteExpirationRelativeToNow.HasValue then - let exp = now.Plus (Duration.FromTimeSpan opts.AbsoluteExpirationRelativeToNow.Value) + let exp = now.Plus(Duration.FromTimeSpan opts.AbsoluteExpirationRelativeToNow.Value) exp, None, Some exp else // Default to 1 hour sliding expiration @@ -146,7 +150,7 @@ type DistributedCache () = // ~~~ IMPLEMENTATION FUNCTIONS ~~~ /// Retrieve the data for a cache entry - let get key (_ : CancellationToken) = backgroundTask { + let get key (_: CancellationToken) = backgroundTask { match! getEntry key with | Some entry -> do! purge () @@ -155,29 +159,29 @@ type DistributedCache () = } /// Refresh an entry - let refresh key (cancelToken : CancellationToken) = backgroundTask { + let refresh key (cancelToken: CancellationToken) = backgroundTask { let! _ = get key cancelToken () } /// Remove an entry - let remove key (_ : CancellationToken) = backgroundTask { + let remove key (_: CancellationToken) = backgroundTask { do! removeEntry key do! purge () } /// Set an entry - let set key value options (_ : CancellationToken) = backgroundTask { + let set key value options (_: CancellationToken) = backgroundTask { do! saveEntry options key value do! purge () } interface IDistributedCache with member _.Get key = get key CancellationToken.None |> sync - member _.GetAsync (key, token) = get key token + member _.GetAsync(key, token) = get key token member _.Refresh key = refresh key CancellationToken.None |> sync - member _.RefreshAsync (key, token) = refresh key token + member _.RefreshAsync(key, token) = refresh key token member _.Remove key = remove key CancellationToken.None |> sync - member _.RemoveAsync (key, token) = remove key token - member _.Set (key, value, options) = set key value options CancellationToken.None |> sync - member _.SetAsync (key, value, options, token) = set key value options token + member _.RemoveAsync(key, token) = remove key token + member _.Set(key, value, options) = set key value options CancellationToken.None |> sync + member _.SetAsync(key, value, options, token) = set key value options token diff --git a/src/MyWebLog.Data/Postgres/PostgresCategoryData.fs b/src/MyWebLog.Data/Postgres/PostgresCategoryData.fs index 244faed..6909684 100644 --- a/src/MyWebLog.Data/Postgres/PostgresCategoryData.fs +++ b/src/MyWebLog.Data/Postgres/PostgresCategoryData.fs @@ -1,13 +1,14 @@ namespace MyWebLog.Data.Postgres -open BitBadger.Npgsql.FSharp.Documents +open BitBadger.Documents +open BitBadger.Documents.Postgres open Microsoft.Extensions.Logging open MyWebLog open MyWebLog.Data open Npgsql.FSharp /// PostgreSQL myWebLog category data implementation -type PostgresCategoryData (log : ILogger) = +type PostgresCategoryData(log: ILogger) = /// Count all categories for the given web log let countAll webLogId = @@ -17,14 +18,20 @@ type PostgresCategoryData (log : ILogger) = /// Count all top-level categories for the given web log let countTopLevel webLogId = log.LogTrace "Category.countTopLevel" - Count.byContains Table.Category {| webLogDoc webLogId with ParentId = None |} + Custom.scalar + $"""{Query.Count.byContains Table.Category} + AND {Query.whereByField (Field.NEX (nameof Category.Empty.ParentId)) ""}""" + [ webLogContains webLogId ] + toCount /// Retrieve all categories for the given web log in a DotLiquid-friendly format let findAllForView webLogId = backgroundTask { log.LogTrace "Category.findAllForView" let! cats = - Custom.list $"{selectWithCriteria Table.Category} ORDER BY LOWER(data ->> '{nameof Category.empty.Name}')" - [ webLogContains webLogId ] fromData + Custom.list + $"{selectWithCriteria Table.Category} ORDER BY LOWER(data ->> '{nameof Category.Empty.Name}')" + [ webLogContains webLogId ] + fromData let ordered = Utils.orderByHierarchy cats None None [] let counts = ordered @@ -33,20 +40,18 @@ type PostgresCategoryData (log : ILogger) = let catIdSql, catIdParams = ordered |> Seq.filter (fun cat -> cat.ParentNames |> Array.contains it.Name) - |> Seq.map (fun cat -> cat.Id) + |> Seq.map _.Id |> Seq.append (Seq.singleton it.Id) |> List.ofSeq - |> arrayContains (nameof Post.empty.CategoryIds) id + |> arrayContains (nameof Post.Empty.CategoryIds) id let postCount = Custom.scalar - $"""SELECT COUNT(DISTINCT id) AS {countName} + $"""SELECT COUNT(DISTINCT data ->> '{nameof Post.Empty.Id}') AS it FROM {Table.Post} WHERE {Query.whereDataContains "@criteria"} AND {catIdSql}""" - [ "@criteria", - Query.jsonbDocParam {| webLogDoc webLogId with Status = PostStatus.toString Published |} - catIdParams - ] Map.toCount + [ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |}; catIdParams ] + toCount |> Async.AwaitTask |> Async.RunSynchronously it.Id, postCount) @@ -58,71 +63,72 @@ type PostgresCategoryData (log : ILogger) = PostCount = counts |> List.tryFind (fun c -> fst c = cat.Id) |> Option.map snd - |> Option.defaultValue 0 - }) + |> Option.defaultValue 0 }) |> Array.ofSeq } /// Find a category by its ID for the given web log let findById catId webLogId = log.LogTrace "Category.findById" - Document.findByIdAndWebLog Table.Category catId CategoryId.toString webLogId + Document.findByIdAndWebLog Table.Category catId webLogId /// Find all categories for the given web log let findByWebLog webLogId = log.LogTrace "Category.findByWebLog" Document.findByWebLog Table.Category webLogId - /// Create parameters for a category insert / update - let catParameters (cat : Category) = - Query.docParameters (CategoryId.toString cat.Id) cat - /// Delete a category let delete catId webLogId = backgroundTask { log.LogTrace "Category.delete" match! findById catId webLogId with | Some cat -> // Reassign any children to the category's parent category - let! children = Find.byContains Table.Category {| ParentId = CategoryId.toString catId |} + let! children = Find.byContains Table.Category {| ParentId = catId |} let hasChildren = not (List.isEmpty children) if hasChildren then + let childQuery, childParams = + if cat.ParentId.IsSome then + Query.Patch.byId Table.Category, + children + |> List.map (fun child -> [ idParam child.Id; jsonParam "@data" {| ParentId = cat.ParentId |} ]) + else + Query.RemoveFields.byId Table.Category, + children + |> List.map (fun child -> + [ idParam child.Id; fieldNameParam [ nameof Category.Empty.ParentId ] ]) let! _ = Configuration.dataSource () |> Sql.fromDataSource - |> Sql.executeTransactionAsync [ - Query.Update.partialById Table.Category, - children |> List.map (fun child -> [ - "@id", Sql.string (CategoryId.toString child.Id) - "@data", Query.jsonbDocParam {| ParentId = cat.ParentId |} - ]) - ] + |> Sql.executeTransactionAsync [ childQuery, childParams ] () // Delete the category off all posts where it is assigned let! posts = - Custom.list $"SELECT data FROM {Table.Post} WHERE data -> '{nameof Post.empty.CategoryIds}' @> @id" - [ "@id", Query.jsonbDocParam [| CategoryId.toString catId |] ] fromData + Custom.list + $"SELECT data FROM {Table.Post} WHERE data -> '{nameof Post.Empty.CategoryIds}' @> @id" + [ jsonParam "@id" [| string catId |] ] + fromData if not (List.isEmpty posts) then let! _ = Configuration.dataSource () |> Sql.fromDataSource - |> Sql.executeTransactionAsync [ - Query.Update.partialById Table.Post, - posts |> List.map (fun post -> [ - "@id", Sql.string (PostId.toString post.Id) - "@data", Query.jsonbDocParam - {| CategoryIds = post.CategoryIds |> List.filter (fun cat -> cat <> catId) |} - ]) - ] + |> Sql.executeTransactionAsync + [ Query.Patch.byId Table.Post, + posts + |> List.map (fun post -> + [ idParam post.Id + jsonParam + "@data" + {| CategoryIds = post.CategoryIds |> List.filter (fun cat -> cat <> catId) |} ]) ] () // Delete the category itself - do! Delete.byId Table.Category (CategoryId.toString catId) + do! Delete.byId Table.Category catId return if hasChildren then ReassignedChildCategories else CategoryDeleted | None -> return CategoryNotFound } /// Save a category - let save (cat : Category) = backgroundTask { + let save (cat: Category) = backgroundTask { log.LogTrace "Category.save" - do! save Table.Category (CategoryId.toString cat.Id) cat + do! save Table.Category cat } /// Restore categories from a backup @@ -132,7 +138,7 @@ type PostgresCategoryData (log : ILogger) = Configuration.dataSource () |> Sql.fromDataSource |> Sql.executeTransactionAsync [ - Query.insert Table.Category, cats |> List.map catParameters + Query.insert Table.Category, cats |> List.map (fun c -> [ jsonParam "@data" c ]) ] () } diff --git a/src/MyWebLog.Data/Postgres/PostgresHelpers.fs b/src/MyWebLog.Data/Postgres/PostgresHelpers.fs index 765e669..b9e8bff 100644 --- a/src/MyWebLog.Data/Postgres/PostgresHelpers.fs +++ b/src/MyWebLog.Data/Postgres/PostgresHelpers.fs @@ -61,7 +61,8 @@ module Table = open System open System.Threading.Tasks -open BitBadger.Npgsql.FSharp.Documents +open BitBadger.Documents +open BitBadger.Documents.Postgres open MyWebLog open MyWebLog.Data open NodaTime @@ -69,29 +70,23 @@ open Npgsql open Npgsql.FSharp /// Create a SQL parameter for the web log ID -let webLogIdParam webLogId = - "@webLogId", Sql.string (WebLogId.toString webLogId) +let webLogIdParam (webLogId: WebLogId) = + "@webLogId", Sql.string (string webLogId) /// Create an anonymous record with the given web log ID -let webLogDoc (webLogId : WebLogId) = +let webLogDoc (webLogId: WebLogId) = {| WebLogId = webLogId |} /// Create a parameter for a web log document-contains query let webLogContains webLogId = - "@criteria", Query.jsonbDocParam (webLogDoc webLogId) - -/// The name of the field to select to be able to use Map.toCount -let countName = "the_count" - -/// The name of the field to select to be able to use Map.toExists -let existsName = "does_exist" + jsonParam "@criteria" (webLogDoc webLogId) /// A SQL string to select data from a table with the given JSON document contains criteria let selectWithCriteria tableName = $"""{Query.selectFromTable tableName} WHERE {Query.whereDataContains "@criteria"}""" /// Create the SQL and parameters for an IN clause -let inClause<'T> colNameAndPrefix paramName (valueFunc: 'T -> string) (items : 'T list) = +let inClause<'T> colNameAndPrefix paramName (items: 'T list) = if List.isEmpty items then "", [] else let mutable idx = 0 @@ -99,114 +94,109 @@ let inClause<'T> colNameAndPrefix paramName (valueFunc: 'T -> string) (items : ' |> List.skip 1 |> List.fold (fun (itemS, itemP) it -> idx <- idx + 1 - $"{itemS}, @%s{paramName}{idx}", ($"@%s{paramName}{idx}", Sql.string (valueFunc it)) :: itemP) + $"{itemS}, @%s{paramName}{idx}", ($"@%s{paramName}{idx}", Sql.string (string it)) :: itemP) (Seq.ofList items |> Seq.map (fun it -> - $"%s{colNameAndPrefix} IN (@%s{paramName}0", [ $"@%s{paramName}0", Sql.string (valueFunc it) ]) + $"%s{colNameAndPrefix} IN (@%s{paramName}0", [ $"@%s{paramName}0", Sql.string (string it) ]) |> Seq.head) |> function sql, ps -> $"{sql})", ps /// Create the SQL and parameters for match-any array query -let arrayContains<'T> name (valueFunc : 'T -> string) (items : 'T list) = +let arrayContains<'T> name (valueFunc: 'T -> string) (items: 'T list) = $"data['{name}'] ?| @{name}Values", ($"@{name}Values", Sql.stringArray (items |> List.map valueFunc |> Array.ofList)) /// Get the first result of the given query -let tryHead<'T> (query : Task<'T list>) = backgroundTask { +let tryHead<'T> (query: Task<'T list>) = backgroundTask { let! results = query return List.tryHead results } /// Create a parameter for a non-standard type -let typedParam<'T> name (it : 'T) = - $"@%s{name}", Sql.parameter (NpgsqlParameter ($"@{name}", it)) +let typedParam<'T> name (it: 'T) = + $"@%s{name}", Sql.parameter (NpgsqlParameter($"@{name}", it)) /// Create a parameter for a possibly-missing non-standard type -let optParam<'T> name (it : 'T option) = - let p = NpgsqlParameter ($"@%s{name}", if Option.isSome it then box it.Value else DBNull.Value) +let optParam<'T> name (it: 'T option) = + let p = NpgsqlParameter($"@%s{name}", if Option.isSome it then box it.Value else DBNull.Value) p.ParameterName, Sql.parameter p /// Mapping functions for SQL queries module Map = - /// Get a count from a row - let toCount (row : RowReader) = - row.int countName - - /// Get a true/false value as to whether an item exists - let toExists (row : RowReader) = - row.bool existsName - /// Create a permalink from the current row - let toPermalink (row : RowReader) = + let toPermalink (row: RowReader) = Permalink (row.string "permalink") /// Create a revision from the current row - let toRevision (row : RowReader) : Revision = - { AsOf = row.fieldValue "as_of" - Text = row.string "revision_text" |> MarkupText.parse - } + let toRevision (row: RowReader) : Revision = + { AsOf = row.fieldValue "as_of" + Text = row.string "revision_text" |> MarkupText.Parse } /// Create a theme asset from the current row - let toThemeAsset includeData (row : RowReader) : ThemeAsset = - { Id = ThemeAssetId (ThemeId (row.string "theme_id"), row.string "path") - UpdatedOn = row.fieldValue "updated_on" - Data = if includeData then row.bytea "data" else [||] - } + let toThemeAsset includeData (row: RowReader) : ThemeAsset = + { Id = ThemeAssetId (ThemeId (row.string "theme_id"), row.string "path") + UpdatedOn = row.fieldValue "updated_on" + Data = if includeData then row.bytea "data" else [||] } /// Create an uploaded file from the current row - let toUpload includeData (row : RowReader) : Upload = - { Id = row.string "id" |> UploadId - WebLogId = row.string "web_log_id" |> WebLogId - Path = row.string "path" |> Permalink - UpdatedOn = row.fieldValue "updated_on" - Data = if includeData then row.bytea "data" else [||] - } + let toUpload includeData (row: RowReader) : Upload = + { Id = row.string "id" |> UploadId + WebLogId = row.string "web_log_id" |> WebLogId + Path = row.string "path" |> Permalink + UpdatedOn = row.fieldValue "updated_on" + Data = if includeData then row.bytea "data" else [||] } /// Document manipulation functions module Document = /// Determine whether a document exists with the given key for the given web log - let existsByWebLog<'TKey> table (key : 'TKey) (keyFunc : 'TKey -> string) webLogId = + let existsByWebLog<'TKey> table (key: 'TKey) webLogId = Custom.scalar - $""" SELECT EXISTS ( - SELECT 1 FROM %s{table} WHERE id = @id AND {Query.whereDataContains "@criteria"} - ) AS {existsName}""" - [ "@id", Sql.string (keyFunc key); webLogContains webLogId ] Map.toExists + $"""SELECT EXISTS ( + SELECT 1 FROM %s{table} WHERE {Query.whereById "@id"} AND {Query.whereDataContains "@criteria"} + ) AS it""" + [ "@id", Sql.string (string key); webLogContains webLogId ] + toExists /// Find a document by its ID for the given web log - let findByIdAndWebLog<'TKey, 'TDoc> table (key : 'TKey) (keyFunc : 'TKey -> string) webLogId = - Custom.single $"""{Query.selectFromTable table} WHERE id = @id AND {Query.whereDataContains "@criteria"}""" - [ "@id", Sql.string (keyFunc key); webLogContains webLogId ] fromData<'TDoc> + let findByIdAndWebLog<'TKey, 'TDoc> table (key: 'TKey) webLogId = + Custom.single + $"""{Query.selectFromTable table} WHERE {Query.whereById "@id"} AND {Query.whereDataContains "@criteria"}""" + [ "@id", Sql.string (string key); webLogContains webLogId ] + fromData<'TDoc> - /// Find a document by its ID for the given web log + /// Find documents for the given web log let findByWebLog<'TDoc> table webLogId : Task<'TDoc list> = Find.byContains table (webLogDoc webLogId) - + /// Functions to support revisions module Revisions = /// Find all revisions for the given entity - let findByEntityId<'TKey> revTable entityTable (key : 'TKey) (keyFunc : 'TKey -> string) = - Custom.list $"SELECT as_of, revision_text FROM %s{revTable} WHERE %s{entityTable}_id = @id ORDER BY as_of DESC" - [ "@id", Sql.string (keyFunc key) ] Map.toRevision + let findByEntityId<'TKey> revTable entityTable (key: 'TKey) = + Custom.list + $"SELECT as_of, revision_text FROM %s{revTable} WHERE %s{entityTable}_id = @id ORDER BY as_of DESC" + [ "@id", Sql.string (string key) ] + Map.toRevision /// Find all revisions for all posts for the given web log - let findByWebLog<'TKey> revTable entityTable (keyFunc : string -> 'TKey) webLogId = + let findByWebLog<'TKey> revTable entityTable (keyFunc: string -> 'TKey) webLogId = Custom.list $"""SELECT pr.* FROM %s{revTable} pr - INNER JOIN %s{entityTable} p ON p.id = pr.{entityTable}_id + INNER JOIN %s{entityTable} p ON p.data ->> '{nameof Post.Empty.Id}' = pr.{entityTable}_id WHERE p.{Query.whereDataContains "@criteria"} ORDER BY as_of DESC""" - [ webLogContains webLogId ] (fun row -> keyFunc (row.string $"{entityTable}_id"), Map.toRevision row) + [ webLogContains webLogId ] + (fun row -> keyFunc (row.string $"{entityTable}_id"), Map.toRevision row) /// Parameters for a revision INSERT statement - let revParams<'TKey> (key : 'TKey) (keyFunc : 'TKey -> string) rev = [ + let revParams<'TKey> (key: 'TKey) rev = [ typedParam "asOf" rev.AsOf - "@id", Sql.string (keyFunc key) - "@text", Sql.string (MarkupText.toString rev.Text) + "@id", Sql.string (string key) + "@text", Sql.string (string rev.Text) ] /// The SQL statement to insert a revision @@ -214,23 +204,20 @@ module Revisions = $"INSERT INTO %s{table} VALUES (@id, @asOf, @text)" /// Update a page's revisions - let update<'TKey> revTable entityTable (key : 'TKey) (keyFunc : 'TKey -> string) oldRevs newRevs = backgroundTask { + let update<'TKey> revTable entityTable (key: 'TKey) oldRevs newRevs = backgroundTask { let toDelete, toAdd = Utils.diffRevisions oldRevs newRevs if not (List.isEmpty toDelete) || not (List.isEmpty toAdd) then let! _ = Configuration.dataSource () |> Sql.fromDataSource - |> Sql.executeTransactionAsync [ - if not (List.isEmpty toDelete) then + |> Sql.executeTransactionAsync + [ if not (List.isEmpty toDelete) then $"DELETE FROM %s{revTable} WHERE %s{entityTable}_id = @id AND as_of = @asOf", toDelete - |> List.map (fun it -> [ - "@id", Sql.string (keyFunc key) - typedParam "asOf" it.AsOf - ]) - if not (List.isEmpty toAdd) then - insertSql revTable, toAdd |> List.map (revParams key keyFunc) - ] + |> List.map (fun it -> + [ "@id", Sql.string (string key) + typedParam "asOf" it.AsOf ]) + if not (List.isEmpty toAdd) then + insertSql revTable, toAdd |> List.map (revParams key) ] () } - diff --git a/src/MyWebLog.Data/Postgres/PostgresPageData.fs b/src/MyWebLog.Data/Postgres/PostgresPageData.fs index faa4c79..3c6e023 100644 --- a/src/MyWebLog.Data/Postgres/PostgresPageData.fs +++ b/src/MyWebLog.Data/Postgres/PostgresPageData.fs @@ -1,44 +1,55 @@ namespace MyWebLog.Data.Postgres -open BitBadger.Npgsql.FSharp.Documents +open BitBadger.Documents +open BitBadger.Documents.Postgres open Microsoft.Extensions.Logging open MyWebLog open MyWebLog.Data open Npgsql.FSharp -/// PostgreSQL myWebLog page data implementation -type PostgresPageData (log : ILogger) = +/// PostgreSQL myWebLog page data implementation +type PostgresPageData(log: ILogger) = // SUPPORT FUNCTIONS /// Append revisions to a page - let appendPageRevisions (page : Page) = backgroundTask { + let appendPageRevisions (page: Page) = backgroundTask { log.LogTrace "Page.appendPageRevisions" - let! revisions = Revisions.findByEntityId Table.PageRevision Table.Page page.Id PageId.toString + let! revisions = Revisions.findByEntityId Table.PageRevision Table.Page page.Id return { page with Revisions = revisions } } /// Return a page with no text or revisions - let pageWithoutText (row : RowReader) = + let pageWithoutText (row: RowReader) = { fromData row with Text = "" } /// Update a page's revisions - let updatePageRevisions pageId oldRevs newRevs = + let updatePageRevisions (pageId: PageId) oldRevs newRevs = log.LogTrace "Page.updatePageRevisions" - Revisions.update Table.PageRevision Table.Page pageId PageId.toString oldRevs newRevs + Revisions.update Table.PageRevision Table.Page pageId oldRevs newRevs /// Does the given page exist? - let pageExists pageId webLogId = + let pageExists (pageId: PageId) webLogId = log.LogTrace "Page.pageExists" - Document.existsByWebLog Table.Page pageId PageId.toString webLogId + Document.existsByWebLog Table.Page pageId webLogId // IMPLEMENTATION FUNCTIONS - /// Get all pages for a web log (without text or revisions) + /// Add a page + let add (page: Page) = backgroundTask { + log.LogTrace "Page.add" + do! insert Table.Page { page with Revisions = [] } + do! updatePageRevisions page.Id [] page.Revisions + () + } + + /// Get all pages for a web log (without text, metadata, revisions, or prior permalinks) let all webLogId = log.LogTrace "Page.all" - Custom.list $"{selectWithCriteria Table.Page} ORDER BY LOWER(data ->> '{nameof Page.empty.Title}')" - [ webLogContains webLogId ] fromData + Custom.list + $"{selectWithCriteria Table.Page} ORDER BY LOWER(data ->> '{nameof Page.Empty.Title}')" + [ webLogContains webLogId ] + (fun row -> { fromData row with Text = ""; Metadata = []; PriorPermalinks = [] }) /// Count all pages for the given web log let countAll webLogId = @@ -50,50 +61,61 @@ type PostgresPageData (log : ILogger) = log.LogTrace "Page.countListed" Count.byContains Table.Page {| webLogDoc webLogId with IsInPageList = true |} - /// Find a page by its ID (without revisions) - let findById pageId webLogId = + /// Find a page by its ID (without revisions or prior permalinks) + let findById pageId webLogId = backgroundTask { log.LogTrace "Page.findById" - Document.findByIdAndWebLog Table.Page pageId PageId.toString webLogId + match! Document.findByIdAndWebLog Table.Page pageId webLogId with + | Some page -> return Some { page with PriorPermalinks = [] } + | None -> return None + } /// Find a complete page by its ID let findFullById pageId webLogId = backgroundTask { log.LogTrace "Page.findFullById" - match! findById pageId webLogId with + match! Document.findByIdAndWebLog Table.Page pageId webLogId with | Some page -> let! withMore = appendPageRevisions page return Some withMore | None -> return None } + // TODO: need to handle when the page being deleted is the home page /// Delete a page by its ID let delete pageId webLogId = backgroundTask { log.LogTrace "Page.delete" match! pageExists pageId webLogId with | true -> - do! Delete.byId Table.Page (PageId.toString pageId) + do! Custom.nonQuery + $"""DELETE FROM {Table.PageRevision} WHERE page_id = @id; + DELETE FROM {Table.Page} WHERE {Query.whereById "@id"}""" + [ idParam pageId ] return true | false -> return false } /// Find a page by its permalink for the given web log - let findByPermalink permalink webLogId = + let findByPermalink (permalink: Permalink) webLogId = backgroundTask { log.LogTrace "Page.findByPermalink" - Find.byContains Table.Page {| webLogDoc webLogId with Permalink = Permalink.toString permalink |} - |> tryHead + let! page = + Find.byContains Table.Page {| webLogDoc webLogId with Permalink = permalink |} + |> tryHead + return page |> Option.map (fun pg -> { pg with PriorPermalinks = [] }) + } /// Find the current permalink within a set of potential prior permalinks for the given web log - let findCurrentPermalink permalinks webLogId = backgroundTask { + let findCurrentPermalink (permalinks: Permalink list) webLogId = backgroundTask { log.LogTrace "Page.findCurrentPermalink" if List.isEmpty permalinks then return None else - let linkSql, linkParam = - arrayContains (nameof Page.empty.PriorPermalinks) Permalink.toString permalinks + let linkSql, linkParam = arrayContains (nameof Page.Empty.PriorPermalinks) string permalinks return! Custom.single - $"""SELECT data ->> '{nameof Page.empty.Permalink}' AS permalink + $"""SELECT data ->> '{nameof Page.Empty.Permalink}' AS permalink FROM page WHERE {Query.whereDataContains "@criteria"} - AND {linkSql}""" [ webLogContains webLogId; linkParam ] Map.toPermalink + AND {linkSql}""" + [ webLogContains webLogId; linkParam ] + Map.toPermalink } /// Get all complete pages for the given web log @@ -110,58 +132,59 @@ type PostgresPageData (log : ILogger) = /// Get all listed pages for the given web log (without revisions or text) let findListed webLogId = log.LogTrace "Page.findListed" - Custom.list $"{selectWithCriteria Table.Page} ORDER BY LOWER(data ->> '{nameof Page.empty.Title}')" - [ "@criteria", Query.jsonbDocParam {| webLogDoc webLogId with IsInPageList = true |} ] - pageWithoutText + Custom.list + $"{selectWithCriteria Table.Page} ORDER BY LOWER(data ->> '{nameof Page.Empty.Title}')" + [ jsonParam "@criteria" {| webLogDoc webLogId with IsInPageList = true |} ] + pageWithoutText /// Get a page of pages for the given web log (without revisions) let findPageOfPages webLogId pageNbr = log.LogTrace "Page.findPageOfPages" Custom.list $"{selectWithCriteria Table.Page} - ORDER BY LOWER(data->>'{nameof Page.empty.Title}') + ORDER BY LOWER(data->>'{nameof Page.Empty.Title}') LIMIT @pageSize OFFSET @toSkip" [ webLogContains webLogId; "@pageSize", Sql.int 26; "@toSkip", Sql.int ((pageNbr - 1) * 25) ] - fromData + (fun row -> { fromData row with Metadata = []; PriorPermalinks = [] }) /// Restore pages from a backup - let restore (pages : Page list) = backgroundTask { + let restore (pages: Page list) = backgroundTask { log.LogTrace "Page.restore" let revisions = pages |> List.collect (fun p -> p.Revisions |> List.map (fun r -> p.Id, r)) let! _ = Configuration.dataSource () |> Sql.fromDataSource - |> Sql.executeTransactionAsync [ - Query.insert Table.Page, - pages - |> List.map (fun page -> Query.docParameters (PageId.toString page.Id) { page with Revisions = [] }) - Revisions.insertSql Table.PageRevision, - revisions |> List.map (fun (pageId, rev) -> Revisions.revParams pageId PageId.toString rev) - ] + |> Sql.executeTransactionAsync + [ Query.insert Table.Page, + pages |> List.map (fun page -> [ jsonParam "@data" { page with Revisions = [] } ]) + Revisions.insertSql Table.PageRevision, + revisions |> List.map (fun (pageId, rev) -> Revisions.revParams pageId rev) ] () } - /// Save a page - let save (page : Page) = backgroundTask { - log.LogTrace "Page.save" - let! oldPage = findFullById page.Id page.WebLogId - do! save Table.Page (PageId.toString page.Id) { page with Revisions = [] } - do! updatePageRevisions page.Id (match oldPage with Some p -> p.Revisions | None -> []) page.Revisions + /// Update a page + let update (page: Page) = backgroundTask { + log.LogTrace "Page.update" + match! findFullById page.Id page.WebLogId with + | Some oldPage -> + do! Update.byId Table.Page page.Id { page with Revisions = [] } + do! updatePageRevisions page.Id oldPage.Revisions page.Revisions + | None -> () () } /// Update a page's prior permalinks - let updatePriorPermalinks pageId webLogId permalinks = backgroundTask { + let updatePriorPermalinks pageId webLogId (permalinks: Permalink list) = backgroundTask { log.LogTrace "Page.updatePriorPermalinks" match! pageExists pageId webLogId with | true -> - do! Update.partialById Table.Page (PageId.toString pageId) {| PriorPermalinks = permalinks |} + do! Patch.byId Table.Page pageId {| PriorPermalinks = permalinks |} return true | false -> return false } interface IPageData with - member _.Add page = save page + member _.Add page = add page member _.All webLogId = all webLogId member _.CountAll webLogId = countAll webLogId member _.CountListed webLogId = countListed webLogId @@ -174,5 +197,5 @@ type PostgresPageData (log : ILogger) = member _.FindListed webLogId = findListed webLogId member _.FindPageOfPages webLogId pageNbr = findPageOfPages webLogId pageNbr member _.Restore pages = restore pages - member _.Update page = save page + member _.Update page = update page member _.UpdatePriorPermalinks pageId webLogId permalinks = updatePriorPermalinks pageId webLogId permalinks diff --git a/src/MyWebLog.Data/Postgres/PostgresPostData.fs b/src/MyWebLog.Data/Postgres/PostgresPostData.fs index d3791de..7724ca0 100644 --- a/src/MyWebLog.Data/Postgres/PostgresPostData.fs +++ b/src/MyWebLog.Data/Postgres/PostgresPostData.fs @@ -1,62 +1,77 @@ namespace MyWebLog.Data.Postgres -open BitBadger.Npgsql.FSharp.Documents +open BitBadger.Documents +open BitBadger.Documents.Postgres open Microsoft.Extensions.Logging open MyWebLog open MyWebLog.Data -open NodaTime.Text +open NodaTime open Npgsql.FSharp -/// PostgreSQL myWebLog post data implementation -type PostgresPostData (log : ILogger) = +/// PostgreSQL myWebLog post data implementation +type PostgresPostData(log: ILogger) = // SUPPORT FUNCTIONS /// Append revisions to a post - let appendPostRevisions (post : Post) = backgroundTask { + let appendPostRevisions (post: Post) = backgroundTask { log.LogTrace "Post.appendPostRevisions" - let! revisions = Revisions.findByEntityId Table.PostRevision Table.Post post.Id PostId.toString + let! revisions = Revisions.findByEntityId Table.PostRevision Table.Post post.Id return { post with Revisions = revisions } } + /// Return a post with no revisions or prior permalinks + let postWithoutLinks row = + { fromData row with PriorPermalinks = [] } + /// Return a post with no revisions, prior permalinks, or text let postWithoutText row = - { fromData row with Text = "" } + { postWithoutLinks row with Text = "" } /// Update a post's revisions - let updatePostRevisions postId oldRevs newRevs = + let updatePostRevisions (postId: PostId) oldRevs newRevs = log.LogTrace "Post.updatePostRevisions" - Revisions.update Table.PostRevision Table.Post postId PostId.toString oldRevs newRevs + Revisions.update Table.PostRevision Table.Post postId oldRevs newRevs /// Does the given post exist? - let postExists postId webLogId = + let postExists (postId: PostId) webLogId = log.LogTrace "Post.postExists" - Document.existsByWebLog Table.Post postId PostId.toString webLogId + Document.existsByWebLog Table.Post postId webLogId // IMPLEMENTATION FUNCTIONS + /// Add a post + let add (post : Post) = backgroundTask { + log.LogTrace "Post.add" + do! insert Table.Post { post with Revisions = [] } + do! updatePostRevisions post.Id [] post.Revisions + } + /// Count posts in a status for the given web log - let countByStatus status webLogId = + let countByStatus (status: PostStatus) webLogId = log.LogTrace "Post.countByStatus" - Count.byContains Table.Post {| webLogDoc webLogId with Status = PostStatus.toString status |} + Count.byContains Table.Post {| webLogDoc webLogId with Status = status |} /// Find a post by its ID for the given web log (excluding revisions) - let findById postId webLogId = + let findById postId webLogId = backgroundTask { log.LogTrace "Post.findById" - Document.findByIdAndWebLog Table.Post postId PostId.toString webLogId + match! Document.findByIdAndWebLog Table.Post postId webLogId with + | Some post -> return Some { post with PriorPermalinks = [] } + | None -> return None + } - /// Find a post by its permalink for the given web log (excluding revisions and prior permalinks) - let findByPermalink permalink webLogId = + /// Find a post by its permalink for the given web log (excluding revisions) + let findByPermalink (permalink: Permalink) webLogId = log.LogTrace "Post.findByPermalink" - Custom.single (selectWithCriteria Table.Post) - [ "@criteria", - Query.jsonbDocParam {| webLogDoc webLogId with Permalink = Permalink.toString permalink |} - ] fromData + Custom.single + (selectWithCriteria Table.Post) + [ jsonParam "@criteria" {| webLogDoc webLogId with Permalink = permalink |} ] + postWithoutLinks /// Find a complete post by its ID for the given web log let findFullById postId webLogId = backgroundTask { log.LogTrace "Post.findFullById" - match! findById postId webLogId with + match! Document.findByIdAndWebLog Table.Post postId webLogId with | Some post -> let! withRevisions = appendPostRevisions post return Some withRevisions @@ -68,28 +83,29 @@ type PostgresPostData (log : ILogger) = log.LogTrace "Post.delete" match! postExists postId webLogId with | true -> - let theId = PostId.toString postId do! Custom.nonQuery - $"""DELETE FROM {Table.PostComment} WHERE {Query.whereDataContains "@criteria"}; - DELETE FROM {Table.Post} WHERE id = @id""" - [ "@id", Sql.string theId; "@criteria", Query.jsonbDocParam {| PostId = theId |} ] + $"""DELETE FROM {Table.PostComment} WHERE {Query.whereDataContains "@criteria"}; + DELETE FROM {Table.PostRevision} WHERE post_id = @id; + DELETE FROM {Table.Post} WHERE {Query.whereById "@id"}""" + [ idParam postId; jsonParam "@criteria" {| PostId = postId |} ] return true | false -> return false } /// Find the current permalink from a list of potential prior permalinks for the given web log - let findCurrentPermalink permalinks webLogId = backgroundTask { + let findCurrentPermalink (permalinks: Permalink list) webLogId = backgroundTask { log.LogTrace "Post.findCurrentPermalink" if List.isEmpty permalinks then return None else - let linkSql, linkParam = - arrayContains (nameof Post.empty.PriorPermalinks) Permalink.toString permalinks + let linkSql, linkParam = arrayContains (nameof Post.Empty.PriorPermalinks) string permalinks return! Custom.single - $"""SELECT data ->> '{nameof Post.empty.Permalink}' AS permalink + $"""SELECT data ->> '{nameof Post.Empty.Permalink}' AS permalink FROM {Table.Post} WHERE {Query.whereDataContains "@criteria"} - AND {linkSql}""" [ webLogContains webLogId; linkParam ] Map.toPermalink + AND {linkSql}""" + [ webLogContains webLogId; linkParam ] + Map.toPermalink } /// Get all complete posts for the given web log @@ -104,79 +120,73 @@ type PostgresPostData (log : ILogger) = } /// Get a page of categorized posts for the given web log (excludes revisions) - let findPageOfCategorizedPosts webLogId categoryIds pageNbr postsPerPage = + let findPageOfCategorizedPosts webLogId (categoryIds: CategoryId list) pageNbr postsPerPage = log.LogTrace "Post.findPageOfCategorizedPosts" - let catSql, catParam = arrayContains (nameof Post.empty.CategoryIds) CategoryId.toString categoryIds + let catSql, catParam = arrayContains (nameof Post.Empty.CategoryIds) string categoryIds Custom.list $"{selectWithCriteria Table.Post} AND {catSql} - ORDER BY data ->> '{nameof Post.empty.PublishedOn}' DESC + ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' DESC LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}" - [ "@criteria", Query.jsonbDocParam {| webLogDoc webLogId with Status = PostStatus.toString Published |} - catParam - ] fromData + [ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |}; catParam ] + postWithoutLinks /// Get a page of posts for the given web log (excludes text and revisions) let findPageOfPosts webLogId pageNbr postsPerPage = log.LogTrace "Post.findPageOfPosts" Custom.list $"{selectWithCriteria Table.Post} - ORDER BY data ->> '{nameof Post.empty.PublishedOn}' DESC NULLS FIRST, - data ->> '{nameof Post.empty.UpdatedOn}' + ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' DESC NULLS FIRST, + data ->> '{nameof Post.Empty.UpdatedOn}' LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}" - [ webLogContains webLogId ] postWithoutText + [ webLogContains webLogId ] + postWithoutText /// Get a page of published posts for the given web log (excludes revisions) let findPageOfPublishedPosts webLogId pageNbr postsPerPage = log.LogTrace "Post.findPageOfPublishedPosts" Custom.list $"{selectWithCriteria Table.Post} - ORDER BY data ->> '{nameof Post.empty.PublishedOn}' DESC + ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' DESC LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}" - [ "@criteria", Query.jsonbDocParam {| webLogDoc webLogId with Status = PostStatus.toString Published |} ] - fromData + [ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |} ] + postWithoutLinks /// Get a page of tagged posts for the given web log (excludes revisions and prior permalinks) - let findPageOfTaggedPosts webLogId (tag : string) pageNbr postsPerPage = + let findPageOfTaggedPosts webLogId (tag: string) pageNbr postsPerPage = log.LogTrace "Post.findPageOfTaggedPosts" Custom.list $"{selectWithCriteria Table.Post} - AND data['{nameof Post.empty.Tags}'] @> @tag - ORDER BY data ->> '{nameof Post.empty.PublishedOn}' DESC + AND data['{nameof Post.Empty.Tags}'] @> @tag + ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' DESC LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}" - [ "@criteria", Query.jsonbDocParam {| webLogDoc webLogId with Status = PostStatus.toString Published |} - "@tag", Query.jsonbDocParam [| tag |] - ] fromData + [ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |}; jsonParam "@tag" [| tag |] ] + postWithoutLinks /// Find the next newest and oldest post from a publish date for the given web log - let findSurroundingPosts webLogId publishedOn = backgroundTask { + let findSurroundingPosts webLogId (publishedOn: Instant) = backgroundTask { log.LogTrace "Post.findSurroundingPosts" - let queryParams () = [ - "@criteria", Query.jsonbDocParam {| webLogDoc webLogId with Status = PostStatus.toString Published |} - "@publishedOn", Sql.string ((InstantPattern.General.Format publishedOn).Substring (0, 19)) - ] - let pubField = nameof Post.empty.PublishedOn - let! older = - Custom.list - $"{selectWithCriteria Table.Post} - AND SUBSTR(data ->> '{pubField}', 1, 19) < @publishedOn - ORDER BY data ->> '{pubField}' DESC - LIMIT 1" (queryParams ()) fromData - let! newer = - Custom.list - $"{selectWithCriteria Table.Post} - AND SUBSTR(data ->> '{pubField}', 1, 19) > @publishedOn - ORDER BY data ->> '{pubField}' - LIMIT 1" (queryParams ()) fromData + let queryParams () = + [ jsonParam "@criteria" {| webLogDoc webLogId with Status = Published |} + "@publishedOn", Sql.timestamptz (publishedOn.ToDateTimeOffset()) ] + let query op direction = + $"{selectWithCriteria Table.Post} + AND (data ->> '{nameof Post.Empty.PublishedOn}')::timestamp with time zone %s{op} @publishedOn + ORDER BY data ->> '{nameof Post.Empty.PublishedOn}' %s{direction} + LIMIT 1" + let! older = Custom.list (query "<" "DESC") (queryParams ()) postWithoutLinks + let! newer = Custom.list (query ">" "") (queryParams ()) postWithoutLinks return List.tryHead older, List.tryHead newer } - /// Save a post - let save (post : Post) = backgroundTask { + /// Update a post + let update (post : Post) = backgroundTask { log.LogTrace "Post.save" - let! oldPost = findFullById post.Id post.WebLogId - do! save Table.Post (PostId.toString post.Id) { post with Revisions = [] } - do! updatePostRevisions post.Id (match oldPost with Some p -> p.Revisions | None -> []) post.Revisions + match! findFullById post.Id post.WebLogId with + | Some oldPost -> + do! Update.byId Table.Post post.Id { post with Revisions = [] } + do! updatePostRevisions post.Id oldPost.Revisions post.Revisions + | None -> () } /// Restore posts from a backup @@ -186,28 +196,26 @@ type PostgresPostData (log : ILogger) = let! _ = Configuration.dataSource () |> Sql.fromDataSource - |> Sql.executeTransactionAsync [ - Query.insert Table.Post, - posts - |> List.map (fun post -> Query.docParameters (PostId.toString post.Id) { post with Revisions = [] }) - Revisions.insertSql Table.PostRevision, - revisions |> List.map (fun (postId, rev) -> Revisions.revParams postId PostId.toString rev) - ] + |> Sql.executeTransactionAsync + [ Query.insert Table.Post, + posts |> List.map (fun post -> [ jsonParam "@data" { post with Revisions = [] } ]) + Revisions.insertSql Table.PostRevision, + revisions |> List.map (fun (postId, rev) -> Revisions.revParams postId rev) ] () } /// Update prior permalinks for a post - let updatePriorPermalinks postId webLogId permalinks = backgroundTask { + let updatePriorPermalinks postId webLogId (permalinks: Permalink list) = backgroundTask { log.LogTrace "Post.updatePriorPermalinks" match! postExists postId webLogId with | true -> - do! Update.partialById Table.Post (PostId.toString postId) {| PriorPermalinks = permalinks |} + do! Patch.byId Table.Post postId {| PriorPermalinks = permalinks |} return true | false -> return false } interface IPostData with - member _.Add post = save post + member _.Add post = add post member _.CountByStatus status webLogId = countByStatus status webLogId member _.Delete postId webLogId = delete postId webLogId member _.FindById postId webLogId = findById postId webLogId @@ -224,5 +232,5 @@ type PostgresPostData (log : ILogger) = findPageOfTaggedPosts webLogId tag pageNbr postsPerPage member _.FindSurroundingPosts webLogId publishedOn = findSurroundingPosts webLogId publishedOn member _.Restore posts = restore posts - member _.Update post = save post + member _.Update post = update post member _.UpdatePriorPermalinks postId webLogId permalinks = updatePriorPermalinks postId webLogId permalinks diff --git a/src/MyWebLog.Data/Postgres/PostgresTagMapData.fs b/src/MyWebLog.Data/Postgres/PostgresTagMapData.fs index 6c0aa52..2369a6f 100644 --- a/src/MyWebLog.Data/Postgres/PostgresTagMapData.fs +++ b/src/MyWebLog.Data/Postgres/PostgresTagMapData.fs @@ -1,62 +1,65 @@ namespace MyWebLog.Data.Postgres -open BitBadger.Npgsql.FSharp.Documents +open BitBadger.Documents +open BitBadger.Documents.Postgres open Microsoft.Extensions.Logging open MyWebLog open MyWebLog.Data open Npgsql.FSharp -/// PostgreSQL myWebLog tag mapping data implementation -type PostgresTagMapData (log : ILogger) = +/// PostgreSQL myWebLog tag mapping data implementation +type PostgresTagMapData(log: ILogger) = /// Find a tag mapping by its ID for the given web log let findById tagMapId webLogId = log.LogTrace "TagMap.findById" - Document.findByIdAndWebLog Table.TagMap tagMapId TagMapId.toString webLogId + Document.findByIdAndWebLog Table.TagMap tagMapId webLogId /// Delete a tag mapping for the given web log - let delete tagMapId webLogId = backgroundTask { + let delete (tagMapId: TagMapId) webLogId = backgroundTask { log.LogTrace "TagMap.delete" - let! exists = Document.existsByWebLog Table.TagMap tagMapId TagMapId.toString webLogId + let! exists = Document.existsByWebLog Table.TagMap tagMapId webLogId if exists then - do! Delete.byId Table.TagMap (TagMapId.toString tagMapId) + do! Delete.byId Table.TagMap tagMapId return true else return false } /// Find a tag mapping by its URL value for the given web log - let findByUrlValue (urlValue : string) webLogId = + let findByUrlValue (urlValue: string) webLogId = log.LogTrace "TagMap.findByUrlValue" - Custom.single (selectWithCriteria Table.TagMap) - [ "@criteria", Query.jsonbDocParam {| webLogDoc webLogId with UrlValue = urlValue |} ] - fromData + Find.firstByContains Table.TagMap {| webLogDoc webLogId with UrlValue = urlValue |} /// Get all tag mappings for the given web log let findByWebLog webLogId = log.LogTrace "TagMap.findByWebLog" - Custom.list $"{selectWithCriteria Table.TagMap} ORDER BY data ->> 'tag'" [ webLogContains webLogId ] - fromData + Custom.list + $"{selectWithCriteria Table.TagMap} ORDER BY data ->> 'tag'" + [ webLogContains webLogId ] + fromData /// Find any tag mappings in a list of tags for the given web log let findMappingForTags tags webLogId = log.LogTrace "TagMap.findMappingForTags" - let tagSql, tagParam = arrayContains (nameof TagMap.empty.Tag) id tags - Custom.list $"{selectWithCriteria Table.TagMap} AND {tagSql}" [ webLogContains webLogId; tagParam ] - fromData + let tagSql, tagParam = arrayContains (nameof TagMap.Empty.Tag) id tags + Custom.list + $"{selectWithCriteria Table.TagMap} AND {tagSql}" + [ webLogContains webLogId; tagParam ] + fromData /// Save a tag mapping - let save (tagMap : TagMap) = - save Table.TagMap (TagMapId.toString tagMap.Id) tagMap + let save (tagMap: TagMap) = + log.LogTrace "TagMap.save" + save Table.TagMap tagMap /// Restore tag mappings from a backup - let restore (tagMaps : TagMap list) = backgroundTask { + let restore (tagMaps: TagMap list) = backgroundTask { let! _ = Configuration.dataSource () |> Sql.fromDataSource - |> Sql.executeTransactionAsync [ - Query.insert Table.TagMap, - tagMaps |> List.map (fun tagMap -> Query.docParameters (TagMapId.toString tagMap.Id) tagMap) - ] + |> Sql.executeTransactionAsync + [ Query.insert Table.TagMap, + tagMaps |> List.map (fun tagMap -> [ jsonParam "@data" tagMap ]) ] () } diff --git a/src/MyWebLog.Data/Postgres/PostgresThemeData.fs b/src/MyWebLog.Data/Postgres/PostgresThemeData.fs index 00af329..1f4d089 100644 --- a/src/MyWebLog.Data/Postgres/PostgresThemeData.fs +++ b/src/MyWebLog.Data/Postgres/PostgresThemeData.fs @@ -1,13 +1,13 @@ namespace MyWebLog.Data.Postgres -open BitBadger.Npgsql.FSharp.Documents +open BitBadger.Documents +open BitBadger.Documents.Postgres open Microsoft.Extensions.Logging open MyWebLog open MyWebLog.Data -open Npgsql.FSharp -/// PostreSQL myWebLog theme data implementation -type PostgresThemeData (log : ILogger) = +/// PostreSQL myWebLog theme data implementation +type PostgresThemeData(log: ILogger) = /// Clear out the template text from a theme let withoutTemplateText row = @@ -17,40 +17,48 @@ type PostgresThemeData (log : ILogger) = /// Retrieve all themes (except 'admin'; excludes template text) let all () = log.LogTrace "Theme.all" - Custom.list $"{Query.selectFromTable Table.Theme} WHERE id <> 'admin' ORDER BY id" [] withoutTemplateText + Custom.list + $"{Query.selectFromTable Table.Theme} + WHERE data ->> '{nameof Theme.Empty.Id}' <> 'admin' + ORDER BY data ->> '{nameof Theme.Empty.Id}'" + [] + withoutTemplateText /// Does a given theme exist? - let exists themeId = + let exists (themeId: ThemeId) = log.LogTrace "Theme.exists" - Exists.byId Table.Theme (ThemeId.toString themeId) + Exists.byId Table.Theme themeId /// Find a theme by its ID - let findById themeId = + let findById (themeId: ThemeId) = log.LogTrace "Theme.findById" - Find.byId Table.Theme (ThemeId.toString themeId) + Find.byId Table.Theme themeId /// Find a theme by its ID (excludes the text of templates) - let findByIdWithoutText themeId = + let findByIdWithoutText (themeId: ThemeId) = log.LogTrace "Theme.findByIdWithoutText" - Custom.single (Query.Find.byId Table.Theme) [ "@id", Sql.string (ThemeId.toString themeId) ] withoutTemplateText + Custom.single (Query.Find.byId Table.Theme) [ idParam themeId ] withoutTemplateText /// Delete a theme by its ID let delete themeId = backgroundTask { log.LogTrace "Theme.delete" match! exists themeId with | true -> - do! Delete.byId Table.Theme (ThemeId.toString themeId) + do! Custom.nonQuery + $"""DELETE FROM {Table.ThemeAsset} WHERE theme_id = @id; + DELETE FROM {Table.Theme} WHERE {Query.whereById "@id"}""" + [ idParam themeId ] return true | false -> return false } /// Save a theme - let save (theme : Theme) = + let save (theme: Theme) = log.LogTrace "Theme.save" - save Table.Theme (ThemeId.toString theme.Id) theme + save Table.Theme theme interface IThemeData with - member _.All () = all () + member _.All() = all () member _.Delete themeId = delete themeId member _.Exists themeId = exists themeId member _.FindById themeId = findById themeId @@ -58,8 +66,8 @@ type PostgresThemeData (log : ILogger) = member _.Save theme = save theme -/// PostreSQL myWebLog theme data implementation -type PostgresThemeAssetData (log : ILogger) = +/// PostreSQL myWebLog theme data implementation +type PostgresThemeAssetData(log: ILogger) = /// Get all theme assets (excludes data) let all () = @@ -67,32 +75,34 @@ type PostgresThemeAssetData (log : ILogger) = Custom.list $"SELECT theme_id, path, updated_on FROM {Table.ThemeAsset}" [] (Map.toThemeAsset false) /// Delete all assets for the given theme - let deleteByTheme themeId = + let deleteByTheme (themeId: ThemeId) = log.LogTrace "ThemeAsset.deleteByTheme" - Custom.nonQuery $"DELETE FROM {Table.ThemeAsset} WHERE theme_id = @themeId" - [ "@themeId", Sql.string (ThemeId.toString themeId) ] + Custom.nonQuery $"DELETE FROM {Table.ThemeAsset} WHERE theme_id = @id" [ idParam themeId ] /// Find a theme asset by its ID let findById assetId = log.LogTrace "ThemeAsset.findById" let (ThemeAssetId (ThemeId themeId, path)) = assetId - Custom.single $"SELECT * FROM {Table.ThemeAsset} WHERE theme_id = @themeId AND path = @path" - [ "@themeId", Sql.string themeId; "@path", Sql.string path ] (Map.toThemeAsset true) + Custom.single + $"SELECT * FROM {Table.ThemeAsset} WHERE theme_id = @id AND path = @path" + [ idParam themeId; "@path", Sql.string path ] + (Map.toThemeAsset true) /// Get theme assets for the given theme (excludes data) - let findByTheme themeId = + let findByTheme (themeId: ThemeId) = log.LogTrace "ThemeAsset.findByTheme" - Custom.list $"SELECT theme_id, path, updated_on FROM {Table.ThemeAsset} WHERE theme_id = @themeId" - [ "@themeId", Sql.string (ThemeId.toString themeId) ] (Map.toThemeAsset false) + Custom.list + $"SELECT theme_id, path, updated_on FROM {Table.ThemeAsset} WHERE theme_id = @id" + [ idParam themeId ] + (Map.toThemeAsset false) /// Get theme assets for the given theme - let findByThemeWithData themeId = + let findByThemeWithData (themeId: ThemeId) = log.LogTrace "ThemeAsset.findByThemeWithData" - Custom.list $"SELECT * FROM {Table.ThemeAsset} WHERE theme_id = @themeId" - [ "@themeId", Sql.string (ThemeId.toString themeId) ] (Map.toThemeAsset true) + Custom.list $"SELECT * FROM {Table.ThemeAsset} WHERE theme_id = @id" [ idParam themeId ] (Map.toThemeAsset true) /// Save a theme asset - let save (asset : ThemeAsset) = + let save (asset: ThemeAsset) = log.LogTrace "ThemeAsset.save" let (ThemeAssetId (ThemeId themeId, path)) = asset.Id Custom.nonQuery @@ -103,13 +113,13 @@ type PostgresThemeAssetData (log : ILogger) = ) ON CONFLICT (theme_id, path) DO UPDATE SET updated_on = EXCLUDED.updated_on, data = EXCLUDED.data" - [ "@themeId", Sql.string themeId - "@path", Sql.string path - "@data", Sql.bytea asset.Data - typedParam "updatedOn" asset.UpdatedOn ] + [ "@themeId", Sql.string themeId + "@path", Sql.string path + "@data", Sql.bytea asset.Data + typedParam "updatedOn" asset.UpdatedOn ] interface IThemeAssetData with - member _.All () = all () + member _.All() = all () member _.DeleteByTheme themeId = deleteByTheme themeId member _.FindById assetId = findById assetId member _.FindByTheme themeId = findByTheme themeId diff --git a/src/MyWebLog.Data/Postgres/PostgresUploadData.fs b/src/MyWebLog.Data/Postgres/PostgresUploadData.fs index 97e36eb..1ceda0f 100644 --- a/src/MyWebLog.Data/Postgres/PostgresUploadData.fs +++ b/src/MyWebLog.Data/Postgres/PostgresUploadData.fs @@ -1,13 +1,13 @@ namespace MyWebLog.Data.Postgres -open BitBadger.Npgsql.FSharp.Documents +open BitBadger.Documents.Postgres open Microsoft.Extensions.Logging open MyWebLog open MyWebLog.Data open Npgsql.FSharp -/// PostgreSQL myWebLog uploaded file data implementation -type PostgresUploadData (log : ILogger) = +/// PostgreSQL myWebLog uploaded file data implementation +type PostgresUploadData(log: ILogger) = /// The INSERT statement for an uploaded file let upInsert = $" @@ -18,13 +18,12 @@ type PostgresUploadData (log : ILogger) = )" /// Parameters for adding an uploaded file - let upParams (upload : Upload) = [ - webLogIdParam upload.WebLogId - typedParam "updatedOn" upload.UpdatedOn - "@id", Sql.string (UploadId.toString upload.Id) - "@path", Sql.string (Permalink.toString upload.Path) - "@data", Sql.bytea upload.Data - ] + let upParams (upload: Upload) = + [ webLogIdParam upload.WebLogId + typedParam "updatedOn" upload.UpdatedOn + idParam upload.Id + "@path", Sql.string (string upload.Path) + "@data", Sql.bytea upload.Data ] /// Save an uploaded file let add upload = @@ -34,33 +33,41 @@ type PostgresUploadData (log : ILogger) = /// Delete an uploaded file by its ID let delete uploadId webLogId = backgroundTask { log.LogTrace "Upload.delete" - let idParam = [ "@id", Sql.string (UploadId.toString uploadId) ] + let idParam = [ idParam uploadId ] let! path = - Custom.single $"SELECT path FROM {Table.Upload} WHERE id = @id AND web_log_id = @webLogId" - (webLogIdParam webLogId :: idParam) (fun row -> row.string "path") + Custom.single + $"SELECT path FROM {Table.Upload} WHERE id = @id AND web_log_id = @webLogId" + (webLogIdParam webLogId :: idParam) + (fun row -> row.string "path") if Option.isSome path then - do! Custom.nonQuery (Query.Delete.byId Table.Upload) idParam + do! Custom.nonQuery $"DELETE FROM {Table.Upload} WHERE id = @id" idParam return Ok path.Value - else return Error $"""Upload ID {UploadId.toString uploadId} not found""" + else return Error $"Upload ID {uploadId} not found" } /// Find an uploaded file by its path for the given web log let findByPath path webLogId = log.LogTrace "Upload.findByPath" - Custom.single $"SELECT * FROM {Table.Upload} WHERE web_log_id = @webLogId AND path = @path" - [ webLogIdParam webLogId; "@path", Sql.string path ] (Map.toUpload true) + Custom.single + $"SELECT * FROM {Table.Upload} WHERE web_log_id = @webLogId AND path = @path" + [ webLogIdParam webLogId; "@path", Sql.string path ] + (Map.toUpload true) /// Find all uploaded files for the given web log (excludes data) let findByWebLog webLogId = log.LogTrace "Upload.findByWebLog" - Custom.list $"SELECT id, web_log_id, path, updated_on FROM {Table.Upload} WHERE web_log_id = @webLogId" - [ webLogIdParam webLogId ] (Map.toUpload false) + Custom.list + $"SELECT id, web_log_id, path, updated_on FROM {Table.Upload} WHERE web_log_id = @webLogId" + [ webLogIdParam webLogId ] + (Map.toUpload false) /// Find all uploaded files for the given web log let findByWebLogWithData webLogId = log.LogTrace "Upload.findByWebLogWithData" - Custom.list $"SELECT * FROM {Table.Upload} WHERE web_log_id = @webLogId" [ webLogIdParam webLogId ] - (Map.toUpload true) + Custom.list + $"SELECT * FROM {Table.Upload} WHERE web_log_id = @webLogId" + [ webLogIdParam webLogId ] + (Map.toUpload true) /// Restore uploads from a backup let restore uploads = backgroundTask { diff --git a/src/MyWebLog.Data/Postgres/PostgresWebLogData.fs b/src/MyWebLog.Data/Postgres/PostgresWebLogData.fs index 713005b..5ec39d4 100644 --- a/src/MyWebLog.Data/Postgres/PostgresWebLogData.fs +++ b/src/MyWebLog.Data/Postgres/PostgresWebLogData.fs @@ -1,17 +1,18 @@ namespace MyWebLog.Data.Postgres -open BitBadger.Npgsql.FSharp.Documents +open BitBadger.Documents +open BitBadger.Documents.Postgres open Microsoft.Extensions.Logging open MyWebLog open MyWebLog.Data -/// PostgreSQL myWebLog web log data implementation -type PostgresWebLogData (log : ILogger) = +/// PostgreSQL myWebLog web log data implementation +type PostgresWebLogData(log: ILogger) = /// Add a web log - let add (webLog : WebLog) = + let add (webLog: WebLog) = log.LogTrace "WebLog.add" - insert Table.WebLog (WebLogId.toString webLog.Id) webLog + insert Table.WebLog webLog /// Retrieve all web logs let all () = @@ -23,46 +24,60 @@ type PostgresWebLogData (log : ILogger) = log.LogTrace "WebLog.delete" Custom.nonQuery $"""DELETE FROM {Table.PostComment} - WHERE data ->> '{nameof Comment.empty.PostId}' IN - (SELECT id FROM {Table.Post} WHERE {Query.whereDataContains "@criteria"}); + WHERE data ->> '{nameof Comment.Empty.PostId}' + IN (SELECT data ->> '{nameof Post.Empty.Id}' + FROM {Table.Post} + WHERE {Query.whereDataContains "@criteria"}); + DELETE FROM {Table.PostRevision} + WHERE post_id IN (SELECT data ->> 'Id' FROM {Table.Post} WHERE {Query.whereDataContains "@criteria"}); + DELETE FROM {Table.PageRevision} + WHERE page_id IN (SELECT data ->> 'Id' FROM {Table.Page} WHERE {Query.whereDataContains "@criteria"}); {Query.Delete.byContains Table.Post}; {Query.Delete.byContains Table.Page}; {Query.Delete.byContains Table.Category}; {Query.Delete.byContains Table.TagMap}; {Query.Delete.byContains Table.WebLogUser}; DELETE FROM {Table.Upload} WHERE web_log_id = @webLogId; - DELETE FROM {Table.WebLog} WHERE id = @webLogId""" + DELETE FROM {Table.WebLog} WHERE {Query.whereById "@webLogId"}""" [ webLogIdParam webLogId; webLogContains webLogId ] /// Find a web log by its host (URL base) - let findByHost (url : string) = + let findByHost (url: string) = log.LogTrace "WebLog.findByHost" - Custom.single (selectWithCriteria Table.WebLog) [ "@criteria", Query.jsonbDocParam {| UrlBase = url |} ] - fromData + Find.firstByContains Table.WebLog {| UrlBase = url |} /// Find a web log by its ID - let findById webLogId = + let findById (webLogId: WebLogId) = log.LogTrace "WebLog.findById" - Find.byId Table.WebLog (WebLogId.toString webLogId) + Find.byId Table.WebLog webLogId - /// Update settings for a web log - let updateSettings (webLog : WebLog) = - log.LogTrace "WebLog.updateSettings" - Update.full Table.WebLog (WebLogId.toString webLog.Id) webLog - - /// Update RSS options for a web log - let updateRssOptions (webLog : WebLog) = backgroundTask { - log.LogTrace "WebLog.updateRssOptions" + /// Update redirect rules for a web log + let updateRedirectRules (webLog: WebLog) = backgroundTask { + log.LogTrace "WebLog.updateRedirectRules" match! findById webLog.Id with - | Some _ -> do! Update.partialById Table.WebLog (WebLogId.toString webLog.Id) {| Rss = webLog.Rss |} + | Some _ -> do! Patch.byId Table.WebLog webLog.Id {| RedirectRules = webLog.RedirectRules |} | None -> () } + /// Update RSS options for a web log + let updateRssOptions (webLog: WebLog) = backgroundTask { + log.LogTrace "WebLog.updateRssOptions" + match! findById webLog.Id with + | Some _ -> do! Patch.byId Table.WebLog webLog.Id {| Rss = webLog.Rss |} + | None -> () + } + + /// Update settings for a web log + let updateSettings (webLog: WebLog) = + log.LogTrace "WebLog.updateSettings" + Update.byId Table.WebLog webLog.Id webLog + interface IWebLogData with member _.Add webLog = add webLog - member _.All () = all () + member _.All() = all () member _.Delete webLogId = delete webLogId member _.FindByHost url = findByHost url member _.FindById webLogId = findById webLogId - member _.UpdateSettings webLog = updateSettings webLog + member _.UpdateRedirectRules webLog = updateRedirectRules webLog member _.UpdateRssOptions webLog = updateRssOptions webLog + member _.UpdateSettings webLog = updateSettings webLog diff --git a/src/MyWebLog.Data/Postgres/PostgresWebLogUserData.fs b/src/MyWebLog.Data/Postgres/PostgresWebLogUserData.fs index 80eeee3..8ccf5be 100644 --- a/src/MyWebLog.Data/Postgres/PostgresWebLogUserData.fs +++ b/src/MyWebLog.Data/Postgres/PostgresWebLogUserData.fs @@ -1,18 +1,24 @@ namespace MyWebLog.Data.Postgres -open BitBadger.Npgsql.FSharp.Documents +open BitBadger.Documents +open BitBadger.Documents.Postgres open Microsoft.Extensions.Logging open MyWebLog open MyWebLog.Data open Npgsql.FSharp -/// PostgreSQL myWebLog user data implementation -type PostgresWebLogUserData (log : ILogger) = +/// PostgreSQL myWebLog user data implementation +type PostgresWebLogUserData(log: ILogger) = + + /// Add a user + let add (user: WebLogUser) = + log.LogTrace "WebLogUser.add" + insert Table.WebLogUser user /// Find a user by their ID for the given web log let findById userId webLogId = log.LogTrace "WebLogUser.findById" - Document.findByIdAndWebLog Table.WebLogUser userId WebLogUserId.toString webLogId + Document.findByIdAndWebLog Table.WebLogUser userId webLogId /// Delete a user if they have no posts or pages let delete userId webLogId = backgroundTask { @@ -22,73 +28,70 @@ type PostgresWebLogUserData (log : ILogger) = let criteria = Query.whereDataContains "@criteria" let! isAuthor = Custom.scalar - $" SELECT ( EXISTS (SELECT 1 FROM {Table.Page} WHERE {criteria} + $" SELECT ( EXISTS (SELECT 1 FROM {Table.Page} WHERE {criteria}) OR EXISTS (SELECT 1 FROM {Table.Post} WHERE {criteria}) - ) AS {existsName}" - [ "@criteria", Query.jsonbDocParam {| AuthorId = userId |} ] Map.toExists + ) AS it" + [ jsonParam "@criteria" {| AuthorId = userId |} ] + toExists if isAuthor then return Error "User has pages or posts; cannot delete" else - do! Delete.byId Table.WebLogUser (WebLogUserId.toString userId) + do! Delete.byId Table.WebLogUser userId return Ok true | None -> return Error "User does not exist" } /// Find a user by their e-mail address for the given web log - let findByEmail (email : string) webLogId = + let findByEmail (email: string) webLogId = log.LogTrace "WebLogUser.findByEmail" - Custom.single (selectWithCriteria Table.WebLogUser) - [ "@criteria", Query.jsonbDocParam {| webLogDoc webLogId with Email = email |} ] - fromData + Find.firstByContains Table.WebLogUser {| webLogDoc webLogId with Email = email |} /// Get all users for the given web log let findByWebLog webLogId = log.LogTrace "WebLogUser.findByWebLog" Custom.list - $"{selectWithCriteria Table.WebLogUser} ORDER BY LOWER(data->>'{nameof WebLogUser.empty.PreferredName}')" - [ webLogContains webLogId ] fromData + $"{selectWithCriteria Table.WebLogUser} ORDER BY LOWER(data ->> '{nameof WebLogUser.Empty.PreferredName}')" + [ webLogContains webLogId ] + fromData /// Find the names of users by their IDs for the given web log - let findNames webLogId userIds = backgroundTask { + let findNames webLogId (userIds: WebLogUserId list) = backgroundTask { log.LogTrace "WebLogUser.findNames" - let idSql, idParams = inClause "AND id" "id" WebLogUserId.toString userIds + let idSql, idParams = inClause $"AND data ->> '{nameof WebLogUser.Empty.Id}'" "id" userIds let! users = - Custom.list $"{selectWithCriteria Table.WebLogUser} {idSql}" (webLogContains webLogId :: idParams) - fromData - return - users - |> List.map (fun u -> { Name = WebLogUserId.toString u.Id; Value = WebLogUser.displayName u }) + Custom.list + $"{selectWithCriteria Table.WebLogUser} {idSql}" + (webLogContains webLogId :: idParams) + fromData + return users |> List.map (fun u -> { Name = string u.Id; Value = u.DisplayName }) } /// Restore users from a backup - let restore (users : WebLogUser list) = backgroundTask { + let restore (users: WebLogUser list) = backgroundTask { log.LogTrace "WebLogUser.restore" let! _ = Configuration.dataSource () |> Sql.fromDataSource - |> Sql.executeTransactionAsync [ - Query.insert Table.WebLogUser, - users |> List.map (fun user -> Query.docParameters (WebLogUserId.toString user.Id) user) - ] + |> Sql.executeTransactionAsync + [ Query.insert Table.WebLogUser, users |> List.map (fun user -> [ jsonParam "@data" user ]) ] () } /// Set a user's last seen date/time to now - let setLastSeen userId webLogId = backgroundTask { + let setLastSeen (userId: WebLogUserId) webLogId = backgroundTask { log.LogTrace "WebLogUser.setLastSeen" - match! Document.existsByWebLog Table.WebLogUser userId WebLogUserId.toString webLogId with - | true -> - do! Update.partialById Table.WebLogUser (WebLogUserId.toString userId) {| LastSeenOn = Some (Noda.now ()) |} + match! Document.existsByWebLog Table.WebLogUser userId webLogId with + | true -> do! Patch.byId Table.WebLogUser userId {| LastSeenOn = Some (Noda.now ()) |} | false -> () } - /// Save a user - let save (user : WebLogUser) = - log.LogTrace "WebLogUser.save" - save Table.WebLogUser (WebLogUserId.toString user.Id) user + /// Update a user + let update (user: WebLogUser) = + log.LogTrace "WebLogUser.update" + Update.byId Table.WebLogUser user.Id user interface IWebLogUserData with - member _.Add user = save user + member _.Add user = add user member _.Delete userId webLogId = delete userId webLogId member _.FindByEmail email webLogId = findByEmail email webLogId member _.FindById userId webLogId = findById userId webLogId @@ -96,5 +99,4 @@ type PostgresWebLogUserData (log : ILogger) = member _.FindNames webLogId userIds = findNames webLogId userIds member _.Restore users = restore users member _.SetLastSeen userId webLogId = setLastSeen userId webLogId - member _.Update user = save user - + member _.Update user = update user diff --git a/src/MyWebLog.Data/PostgresData.fs b/src/MyWebLog.Data/PostgresData.fs index 0650379..43d9bef 100644 --- a/src/MyWebLog.Data/PostgresData.fs +++ b/src/MyWebLog.Data/PostgresData.fs @@ -1,43 +1,34 @@ namespace MyWebLog.Data -open BitBadger.Npgsql.Documents -open BitBadger.Npgsql.FSharp.Documents +open BitBadger.Documents +open BitBadger.Documents.Postgres open Microsoft.Extensions.Logging open MyWebLog open MyWebLog.Data.Postgres open Newtonsoft.Json -open Npgsql open Npgsql.FSharp /// Data implementation for PostgreSQL -type PostgresData (source : NpgsqlDataSource, log : ILogger, ser : JsonSerializer) = +type PostgresData(log: ILogger, ser: JsonSerializer) = /// Create any needed tables let ensureTables () = backgroundTask { // Set up the PostgreSQL document store - Configuration.useDataSource source - Configuration.useSerializer - { new IDocumentSerializer with - member _.Serialize<'T> (it : 'T) : string = Utils.serialize ser it - member _.Deserialize<'T> (it : string) : 'T = Utils.deserialize ser it - } + Configuration.useSerializer (Utils.createDocumentSerializer ser) let! tables = - Sql.fromDataSource source - |> Sql.query "SELECT tablename FROM pg_tables WHERE schemaname = 'public'" - |> Sql.executeAsync (fun row -> row.string "tablename") + Custom.list + "SELECT tablename FROM pg_tables WHERE schemaname = 'public'" [] (fun row -> row.string "tablename") let needsTable table = not (List.contains table tables) - // Create a document table - let mutable isNew = false let sql = seq { // Theme tables if needsTable Table.Theme then - isNew <- true - Definition.createTable Table.Theme + Query.Definition.ensureTable Table.Theme + Query.Definition.ensureKey Table.Theme if needsTable Table.ThemeAsset then $"CREATE TABLE {Table.ThemeAsset} ( - theme_id TEXT NOT NULL REFERENCES {Table.Theme} (id) ON DELETE CASCADE, + theme_id TEXT NOT NULL, path TEXT NOT NULL, updated_on TIMESTAMPTZ NOT NULL, data BYTEA NOT NULL, @@ -45,84 +36,90 @@ type PostgresData (source : NpgsqlDataSource, log : ILogger, ser : // Web log table if needsTable Table.WebLog then - Definition.createTable Table.WebLog - Definition.createIndex Table.WebLog Optimized + Query.Definition.ensureTable Table.WebLog + Query.Definition.ensureKey Table.WebLog + Query.Definition.ensureDocumentIndex Table.WebLog Optimized // Category table if needsTable Table.Category then - Definition.createTable Table.Category - Definition.createIndex Table.Category Optimized + Query.Definition.ensureTable Table.Category + Query.Definition.ensureKey Table.Category + Query.Definition.ensureDocumentIndex Table.Category Optimized // Web log user table if needsTable Table.WebLogUser then - Definition.createTable Table.WebLogUser - Definition.createIndex Table.WebLogUser Optimized + Query.Definition.ensureTable Table.WebLogUser + Query.Definition.ensureKey Table.WebLogUser + Query.Definition.ensureDocumentIndex Table.WebLogUser Optimized // Page tables if needsTable Table.Page then - Definition.createTable Table.Page - $"CREATE INDEX page_web_log_idx ON {Table.Page} ((data ->> '{nameof Page.empty.WebLogId}'))" - $"CREATE INDEX page_author_idx ON {Table.Page} ((data ->> '{nameof Page.empty.AuthorId}'))" - $"CREATE INDEX page_permalink_idx ON {Table.Page} - ((data ->> '{nameof Page.empty.WebLogId}'), (data ->> '{nameof Page.empty.Permalink}'))" + Query.Definition.ensureTable Table.Page + Query.Definition.ensureKey Table.Page + Query.Definition.ensureIndexOn Table.Page "author" [ nameof Page.Empty.AuthorId ] + Query.Definition.ensureIndexOn + Table.Page "permalink" [ nameof Page.Empty.WebLogId; nameof Page.Empty.Permalink ] if needsTable Table.PageRevision then $"CREATE TABLE {Table.PageRevision} ( - page_id TEXT NOT NULL REFERENCES {Table.Page} (id) ON DELETE CASCADE, + page_id TEXT NOT NULL, as_of TIMESTAMPTZ NOT NULL, revision_text TEXT NOT NULL, PRIMARY KEY (page_id, as_of))" // Post tables if needsTable Table.Post then - Definition.createTable Table.Post - $"CREATE INDEX post_web_log_idx ON {Table.Post} ((data ->> '{nameof Post.empty.WebLogId}'))" - $"CREATE INDEX post_author_idx ON {Table.Post} ((data ->> '{nameof Post.empty.AuthorId}'))" - $"CREATE INDEX post_status_idx ON {Table.Post} - ((data ->> '{nameof Post.empty.WebLogId}'), (data ->> '{nameof Post.empty.Status}'), - (data ->> '{nameof Post.empty.UpdatedOn}'))" - $"CREATE INDEX post_permalink_idx ON {Table.Post} - ((data ->> '{nameof Post.empty.WebLogId}'), (data ->> '{nameof Post.empty.Permalink}'))" - $"CREATE INDEX post_category_idx ON {Table.Post} USING GIN ((data['{nameof Post.empty.CategoryIds}']))" - $"CREATE INDEX post_tag_idx ON {Table.Post} USING GIN ((data['{nameof Post.empty.Tags}']))" + Query.Definition.ensureTable Table.Post + Query.Definition.ensureKey Table.Post + Query.Definition.ensureIndexOn Table.Post "author" [ nameof Post.Empty.AuthorId ] + Query.Definition.ensureIndexOn + Table.Post "permalink" [ nameof Post.Empty.WebLogId; nameof Post.Empty.Permalink ] + Query.Definition.ensureIndexOn + Table.Post + "status" + [ nameof Post.Empty.WebLogId; nameof Post.Empty.Status; nameof Post.Empty.UpdatedOn ] + $"CREATE INDEX idx_post_category ON {Table.Post} USING GIN ((data['{nameof Post.Empty.CategoryIds}']))" + $"CREATE INDEX idx_post_tag ON {Table.Post} USING GIN ((data['{nameof Post.Empty.Tags}']))" if needsTable Table.PostRevision then $"CREATE TABLE {Table.PostRevision} ( - post_id TEXT NOT NULL REFERENCES {Table.Post} (id) ON DELETE CASCADE, + post_id TEXT NOT NULL, as_of TIMESTAMPTZ NOT NULL, revision_text TEXT NOT NULL, PRIMARY KEY (post_id, as_of))" if needsTable Table.PostComment then - Definition.createTable Table.PostComment - $"CREATE INDEX post_comment_post_idx ON {Table.PostComment} - ((data ->> '{nameof Comment.empty.PostId}'))" + Query.Definition.ensureTable Table.PostComment + Query.Definition.ensureKey Table.PostComment + Query.Definition.ensureIndexOn Table.PostComment "post" [ nameof Comment.Empty.PostId ] // Tag map table if needsTable Table.TagMap then - Definition.createTable Table.TagMap - Definition.createIndex Table.TagMap Optimized + Query.Definition.ensureTable Table.TagMap + Query.Definition.ensureKey Table.TagMap + Query.Definition.ensureDocumentIndex Table.TagMap Optimized // Uploaded file table if needsTable Table.Upload then $"CREATE TABLE {Table.Upload} ( id TEXT NOT NULL PRIMARY KEY, - web_log_id TEXT NOT NULL REFERENCES {Table.WebLog} (id), + web_log_id TEXT NOT NULL, path TEXT NOT NULL, updated_on TIMESTAMPTZ NOT NULL, data BYTEA NOT NULL)" - $"CREATE INDEX upload_web_log_idx ON {Table.Upload} (web_log_id)" - $"CREATE INDEX upload_path_idx ON {Table.Upload} (web_log_id, path)" + $"CREATE INDEX idx_upload_web_log ON {Table.Upload} (web_log_id)" + $"CREATE INDEX idx_upload_path ON {Table.Upload} (web_log_id, path)" // Database version table if needsTable Table.DbVersion then $"CREATE TABLE {Table.DbVersion} (id TEXT NOT NULL PRIMARY KEY)" - $"INSERT INTO {Table.DbVersion} VALUES ('{Utils.currentDbVersion}')" + $"INSERT INTO {Table.DbVersion} VALUES ('{Utils.Migration.currentDbVersion}')" } - Sql.fromDataSource source + Configuration.dataSource () + |> Sql.fromDataSource |> Sql.executeTransactionAsync (sql |> Seq.map (fun s -> let parts = s.Replace(" IF NOT EXISTS", "", System.StringComparison.OrdinalIgnoreCase).Split ' ' - if parts[1].ToLowerInvariant () = "table" then + if parts[1].ToLowerInvariant() = "table" then log.LogInformation $"Creating {parts[2]} table..." s, [ [] ]) |> List.ofSeq) @@ -137,40 +134,72 @@ type PostgresData (source : NpgsqlDataSource, log : ILogger, ser : /// Migrate from v2-rc2 to v2 (manual migration required) let migrateV2Rc2ToV2 () = backgroundTask { - Utils.logMigrationStep log "v2-rc2 to v2" "Requires user action" - let! webLogs = - Configuration.dataSource () - |> Sql.fromDataSource - |> Sql.query $"SELECT url_base, slug FROM {Table.WebLog}" - |> Sql.executeAsync (fun row -> row.string "url_base", row.string "slug") + Custom.list + $"SELECT url_base, slug FROM {Table.WebLog}" [] (fun row -> row.string "url_base", row.string "slug") + Utils.Migration.backupAndRestoreRequired log "v2-rc2" "v2" webLogs + } + + /// Migrate from v2 to v2.1 + let migrateV2ToV2point1 () = backgroundTask { + let migration = "v2 to v2.1" + Utils.Migration.logStep log migration "Adding empty redirect rule set to all weblogs" + do! Custom.nonQuery $"""UPDATE {Table.WebLog} SET data = data + '{{ "RedirectRules": [] }}'::json""" [] + + let tables = + [ Table.Category; Table.Page; Table.Post; Table.PostComment; Table.TagMap; Table.Theme; Table.WebLog + Table.WebLogUser ] - [ "** MANUAL DATABASE UPGRADE REQUIRED **"; "" - "The data structure for PostgreSQL changed significantly between v2-rc2 and v2." - "To migrate your data:" - " - Use a v2-rc2 executable to back up each web log" - " - Drop all tables from the database" - " - Use this executable to restore each backup"; "" - "Commands to back up all web logs:" - yield! webLogs |> List.map (fun (url, slug) -> sprintf "./myWebLog backup %s v2-rc2.%s.json" url slug) - ] - |> String.concat "\n" - |> log.LogWarning + Utils.Migration.logStep log migration "Adding unique indexes on ID fields" + do! Custom.nonQuery (tables |> List.map Query.Definition.ensureKey |> String.concat "; ") [] - log.LogCritical "myWebLog will now exit" - exit 1 + Utils.Migration.logStep log migration "Dropping old ID columns" + do! Custom.nonQuery (tables |> List.map (sprintf "ALTER TABLE %s DROP COLUMN id") |> String.concat "; ") [] + + Utils.Migration.logStep log migration "Adjusting indexes" + let toDrop = [ "page_web_log_idx"; "post_web_log_idx" ] + do! Custom.nonQuery (toDrop |> List.map (sprintf "DROP INDEX %s") |> String.concat "; ") [] + + let toRename = + [ "idx_category", "idx_category_document" + "idx_tag_map", "idx_tag_map_document" + "idx_web_log", "idx_web_log_document" + "idx_web_log_user", "idx_web_log_user_document" + "page_author_idx", "idx_page_author" + "page_permalink_idx", "idx_page_permalink" + "post_author_idx", "idx_post_author" + "post_status_idx", "idx_post_status" + "post_permalink_idx", "idx_post_permalink" + "post_category_idx", "idx_post_category" + "post_tag_idx", "idx_post_tag" + "post_comment_post_idx", "idx_post_comment_post" + "upload_web_log_idx", "idx_upload_web_log" + "upload_path_idx", "idx_upload_path" ] + do! Custom.nonQuery + (toRename + |> List.map (fun (oldName, newName) -> $"ALTER INDEX {oldName} RENAME TO {newName}") + |> String.concat "; ") + [] + + Utils.Migration.logStep log migration "Setting database to version 2.1" + do! setDbVersion "v2.1" } /// Do required data migration between versions let migrate version = backgroundTask { - match version with - | Some "v2" -> () - | Some "v2-rc2" -> do! migrateV2Rc2ToV2 () - // Future versions will be inserted here - | Some _ - | None -> - log.LogWarning $"Unknown database version; assuming {Utils.currentDbVersion}" - do! setDbVersion Utils.currentDbVersion + let mutable v = defaultArg version "" + + if v = "v2-rc2" then + do! migrateV2Rc2ToV2 () + v <- "v2" + + if v = "v2" then + do! migrateV2ToV2point1 () + v <- "v2.1" + + if v <> Utils.Migration.currentDbVersion then + log.LogWarning $"Unknown database version; assuming {Utils.Migration.currentDbVersion}" + do! setDbVersion Utils.Migration.currentDbVersion } interface IData with @@ -192,8 +221,5 @@ type PostgresData (source : NpgsqlDataSource, log : ILogger, ser : do! ensureTables () let! version = Custom.single "SELECT id FROM db_version" [] (fun row -> row.string "id") - match version with - | Some v when v = Utils.currentDbVersion -> () - | Some _ - | None -> do! migrate version + do! migrate version } diff --git a/src/MyWebLog.Data/RethinkDbData.fs b/src/MyWebLog.Data/RethinkDbData.fs index 92ace6e..1c035e3 100644 --- a/src/MyWebLog.Data/RethinkDbData.fs +++ b/src/MyWebLog.Data/RethinkDbData.fs @@ -69,20 +69,20 @@ module private RethinkHelpers = let r = RethinkDB.R /// Verify that the web log ID matches before returning an item - let verifyWebLog<'T> webLogId (prop : 'T -> WebLogId) (f : Net.IConnection -> Task<'T option>) = + let verifyWebLog<'T> webLogId (prop: 'T -> WebLogId) (f: Net.IConnection -> Task<'T option>) = fun conn -> backgroundTask { match! f conn with Some it when (prop it) = webLogId -> return Some it | _ -> return None } /// Get the first item from a list, or None if the list is empty - let tryFirst<'T> (f : Net.IConnection -> Task<'T list>) = + let tryFirst<'T> (f: Net.IConnection -> Task<'T list>) = fun conn -> backgroundTask { let! results = f conn return results |> List.tryHead } /// Cast a strongly-typed list to an object list - let objList<'T> (objects : 'T list) = objects |> List.map (fun it -> it :> obj) + let objList<'T> (objects: 'T list) = objects |> List.map (fun it -> it :> obj) open System @@ -92,16 +92,16 @@ open RethinkDb.Driver.FSharp open RethinkHelpers /// RethinkDB implementation of data functions for myWebLog -type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger) = +type RethinkDbData(conn: Net.IConnection, config: DataConfig, log: ILogger) = /// Match theme asset IDs by their prefix (the theme ID) let matchAssetByThemeId themeId = - let keyPrefix = $"^{ThemeId.toString themeId}/" - fun (row : Ast.ReqlExpr) -> row[nameof ThemeAsset.empty.Id].Match keyPrefix :> obj + let keyPrefix = $"^{themeId}/" + fun (row: Ast.ReqlExpr) -> row[nameof ThemeAsset.Empty.Id].Match keyPrefix :> obj /// Function to exclude template text from themes - let withoutTemplateText (row : Ast.ReqlExpr) : obj = - {| Templates = row[nameof Theme.empty.Templates].Without [| nameof ThemeTemplate.empty.Text |] |} + let withoutTemplateText (row: Ast.ReqlExpr) : obj = + {| Templates = row[nameof Theme.Empty.Templates].Merge(r.HashMap(nameof ThemeTemplate.Empty.Text, "")) |} /// Ensure field indexes exist, as well as special indexes for selected tables let ensureIndexes table fields = backgroundTask { @@ -112,27 +112,27 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger List.contains table then - let permalinkIdx = nameof Page.empty.Permalink + let permalinkIdx = nameof Page.Empty.Permalink if not (indexes |> List.contains permalinkIdx) then log.LogInformation $"Creating index {table}.{permalinkIdx}..." do! rethink { withTable table indexCreate permalinkIdx - (fun row -> r.Array (row[nameof Page.empty.WebLogId], row[permalinkIdx].Downcase ()) :> obj) + (fun row -> r.Array(row[nameof Page.Empty.WebLogId], row[permalinkIdx].Downcase()) :> obj) write; withRetryOnce; ignoreResult conn } // Prior permalinks are searched when a post or page permalink do not match the current URL - let priorIdx = nameof Post.empty.PriorPermalinks + let priorIdx = nameof Post.Empty.PriorPermalinks if not (indexes |> List.contains priorIdx) then log.LogInformation $"Creating index {table}.{priorIdx}..." do! rethink { withTable table - indexCreate priorIdx (fun row -> row[priorIdx].Downcase () :> obj) [ Multi ] + indexCreate priorIdx [ Multi ] write; withRetryOnce; ignoreResult conn } // Post needs indexes by category and tag (used for counting and retrieving posts) if Table.Post = table then - for idx in [ nameof Post.empty.CategoryIds; nameof Post.empty.Tags ] do + for idx in [ nameof Post.Empty.CategoryIds; nameof Post.Empty.Tags ] do if not (List.contains idx indexes) then log.LogInformation $"Creating index {table}.{idx}..." do! rethink { @@ -147,7 +147,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger - [| row[nameof TagMap.empty.WebLogId]; row[nameof TagMap.empty.Tag] |] :> obj) + [| row[nameof TagMap.Empty.WebLogId]; row[nameof TagMap.Empty.Tag] |] :> obj) write; withRetryOnce; ignoreResult conn } if not (indexes |> List.contains Index.WebLogAndUrl) then @@ -155,7 +155,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger - [| row[nameof TagMap.empty.WebLogId]; row[nameof TagMap.empty.UrlValue] |] :> obj) + [| row[nameof TagMap.Empty.WebLogId]; row[nameof TagMap.Empty.UrlValue] |] :> obj) write; withRetryOnce; ignoreResult conn } // Uploaded files need an index by web log ID and path, as that is how they are retrieved @@ -165,7 +165,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger - [| row[nameof Upload.empty.WebLogId]; row[nameof Upload.empty.Path] |] :> obj) + [| row[nameof Upload.Empty.WebLogId]; row[nameof Upload.Empty.Path] |] :> obj) write; withRetryOnce; ignoreResult conn } // Users log on with e-mail @@ -175,14 +175,18 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger - [| row[nameof WebLogUser.empty.WebLogId]; row[nameof WebLogUser.empty.Email] |] :> obj) + [| row[nameof WebLogUser.Empty.WebLogId]; row[nameof WebLogUser.Empty.Email] |] :> obj) write; withRetryOnce; ignoreResult conn } + do! rethink { withTable table; indexWait; result; withRetryDefault; ignoreResult conn } } /// The batch size for restoration methods let restoreBatchSize = 100 + /// A value to use when files need to be retrieved without their data + let emptyFile = r.Binary(Array.Empty()) + /// Delete assets for the given theme ID let deleteAssetsByTheme themeId = rethink { withTable Table.ThemeAsset @@ -192,7 +196,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger obj ] + write; withRetryOnce; ignoreResult conn + } + + Utils.Migration.logStep log "v2 to v2.1" "Setting database version to v2.1" + do! setDbVersion "v2.1" + } /// Migrate data between versions let migrate version = backgroundTask { - match version with - | Some v when v = "v2" -> () - | Some v when v = "v2-rc2" -> do! migrateV2Rc2ToV2 () - | Some v when v = "v2-rc1" -> do! migrateV2Rc1ToV2Rc2 () - | Some _ - | None -> - log.LogWarning $"Unknown database version; assuming {Utils.currentDbVersion}" - do! setDbVersion Utils.currentDbVersion + let mutable v = defaultArg version "" + + if v = "v2-rc1" then + do! migrateV2Rc1ToV2Rc2 () + v <- "v2-rc2" + + if v = "v2-rc2" then + do! migrateV2Rc2ToV2 () + v <- "v2" + + if v = "v2" then + do! migrateV2ToV2point1 () + v <- "v2.1" + + if v <> Utils.Migration.currentDbVersion then + log.LogWarning $"Unknown database version; assuming {Utils.Migration.currentDbVersion}" + do! setDbVersion Utils.Migration.currentDbVersion } /// The connection for this instance @@ -249,15 +275,15 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.Category - getAll [ webLogId ] (nameof Category.empty.WebLogId) + getAll [ webLogId ] (nameof Category.Empty.WebLogId) count result; withRetryDefault conn } member _.CountTopLevel webLogId = rethink { withTable Table.Category - getAll [ webLogId ] (nameof Category.empty.WebLogId) - filter (nameof Category.empty.ParentId) None + getAll [ webLogId ] (nameof Category.Empty.WebLogId) + filter (nameof Category.Empty.ParentId) None (Default FilterDefaultHandling.Return) count result; withRetryDefault conn } @@ -265,8 +291,8 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.Category - getAll [ webLogId ] (nameof Category.empty.WebLogId) - orderByFunc (fun it -> it[nameof Category.empty.Name].Downcase () :> obj) + getAll [ webLogId ] (nameof Category.Empty.WebLogId) + orderByFunc (fun it -> it[nameof Category.Empty.Name].Downcase() :> obj) result; withRetryDefault conn } let ordered = Utils.orderByHierarchy cats None None [] @@ -282,8 +308,8 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger List.ofSeq let! count = rethink { withTable Table.Post - getAll catIds (nameof Post.empty.CategoryIds) - filter (nameof Post.empty.Status) Published + getAll catIds (nameof Post.Empty.CategoryIds) + filter (nameof Post.Empty.Status) Published distinct count result; withRetryDefault conn @@ -298,8 +324,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger Array.tryFind (fun c -> fst c = cat.Id) |> Option.map snd - |> Option.defaultValue 0 - }) + |> Option.defaultValue 0 }) |> Array.ofSeq } @@ -309,11 +334,11 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger verifyWebLog webLogId (fun c -> c.WebLogId) <| conn + |> verifyWebLog webLogId _.WebLogId <| conn member _.FindByWebLog webLogId = rethink { withTable Table.Category - getAll [ webLogId ] (nameof Category.empty.WebLogId) + getAll [ webLogId ] (nameof Category.Empty.WebLogId) result; withRetryDefault conn } @@ -323,24 +348,26 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.Category - filter (nameof Category.empty.ParentId) catId + filter (nameof Category.Empty.ParentId) catId count result; withRetryDefault conn } if children > 0 then do! rethink { withTable Table.Category - filter (nameof Category.empty.ParentId) catId - update [ nameof Category.empty.ParentId, cat.ParentId :> obj ] + filter (nameof Category.Empty.ParentId) catId + update [ nameof Category.Empty.ParentId, cat.ParentId :> obj ] write; withRetryDefault; ignoreResult conn } // Delete the category off all posts where it is assigned do! rethink { withTable Table.Post - getAll [ webLogId ] (nameof Post.empty.WebLogId) - filter (fun row -> row[nameof Post.empty.CategoryIds].Contains catId :> obj) + getAll [ webLogId ] (nameof Post.Empty.WebLogId) + filter (fun row -> row[nameof Post.Empty.CategoryIds].Contains catId :> obj) update (fun row -> - {| CategoryIds = r.Array(row[nameof Post.empty.CategoryIds]).Remove catId |} :> obj) + {| CategoryIds = + row[nameof Post.Empty.CategoryIds].CoerceTo("array") + .SetDifference(r.Array(catId)) |} :> obj) write; withRetryDefault; ignoreResult conn } // Delete the category itself @@ -386,26 +413,26 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.Page - getAll [ webLogId ] (nameof Page.empty.WebLogId) - without [ nameof Page.empty.Text - nameof Page.empty.Metadata - nameof Page.empty.Revisions - nameof Page.empty.PriorPermalinks ] - orderByFunc (fun row -> row[nameof Page.empty.Title].Downcase () :> obj) + getAll [ webLogId ] (nameof Page.Empty.WebLogId) + merge (r.HashMap(nameof Page.Empty.Text, "") + .With(nameof Page.Empty.Metadata, [||]) + .With(nameof Page.Empty.Revisions, [||]) + .With(nameof Page.Empty.PriorPermalinks, [||])) + orderByFunc (fun row -> row[nameof Page.Empty.Title].Downcase() :> obj) result; withRetryDefault conn } member _.CountAll webLogId = rethink { withTable Table.Page - getAll [ webLogId ] (nameof Page.empty.WebLogId) + getAll [ webLogId ] (nameof Page.Empty.WebLogId) count result; withRetryDefault conn } member _.CountListed webLogId = rethink { withTable Table.Page - getAll [ webLogId ] (nameof Page.empty.WebLogId) - filter (nameof Page.empty.IsInPageList) true + getAll [ webLogId ] (nameof Page.Empty.WebLogId) + filter (nameof Page.Empty.IsInPageList) true count result; withRetryDefault conn } @@ -414,7 +441,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.Page getAll [ pageId ] - filter (fun row -> row[nameof Page.empty.WebLogId].Eq webLogId :> obj) + filter (fun row -> row[nameof Page.Empty.WebLogId].Eq webLogId :> obj) delete write; withRetryDefault conn } @@ -422,19 +449,22 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { + rethink { withTable Table.Page - get pageId - without [ nameof Page.empty.PriorPermalinks; nameof Page.empty.Revisions ] - resultOption; withRetryOptionDefault + getAll [ pageId ] + filter (nameof Page.Empty.WebLogId) webLogId + merge (r.HashMap(nameof Page.Empty.PriorPermalinks, [||]) + .With(nameof Page.Empty.Revisions, [||])) + result; withRetryDefault } - |> verifyWebLog webLogId (fun it -> it.WebLogId) <| conn + |> tryFirst <| conn member _.FindByPermalink permalink webLogId = rethink { withTable Table.Page - getAll [ [| webLogId :> obj; permalink |] ] (nameof Page.empty.Permalink) - without [ nameof Page.empty.PriorPermalinks; nameof Page.empty.Revisions ] + getAll [ [| webLogId :> obj; permalink |] ] (nameof Page.Empty.Permalink) + merge (r.HashMap(nameof Page.Empty.PriorPermalinks, [||]) + .With(nameof Page.Empty.Revisions, [||])) limit 1 result; withRetryDefault } @@ -444,14 +474,14 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.Page - getAll (objList permalinks) (nameof Page.empty.PriorPermalinks) - filter (nameof Page.empty.WebLogId) webLogId - without [ nameof Page.empty.Revisions; nameof Page.empty.Text ] + getAll (objList permalinks) (nameof Page.Empty.PriorPermalinks) + filter (nameof Page.Empty.WebLogId) webLogId + without [ nameof Page.Empty.Revisions; nameof Page.Empty.Text ] limit 1 result; withRetryDefault } |> tryFirst) conn - return result |> Option.map (fun pg -> pg.Permalink) + return result |> Option.map _.Permalink } member _.FindFullById pageId webLogId = @@ -460,30 +490,32 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger verifyWebLog webLogId (fun it -> it.WebLogId) <| conn + |> verifyWebLog webLogId _.WebLogId <| conn member _.FindFullByWebLog webLogId = rethink { withTable Table.Page - getAll [ webLogId ] (nameof Page.empty.WebLogId) + getAll [ webLogId ] (nameof Page.Empty.WebLogId) resultCursor; withRetryCursorDefault; toList conn } member _.FindListed webLogId = rethink { withTable Table.Page - getAll [ webLogId ] (nameof Page.empty.WebLogId) - filter [ nameof Page.empty.IsInPageList, true :> obj ] - without [ nameof Page.empty.Text; nameof Page.empty.PriorPermalinks; nameof Page.empty.Revisions ] - orderBy (nameof Page.empty.Title) + getAll [ webLogId ] (nameof Page.Empty.WebLogId) + filter [ nameof Page.Empty.IsInPageList, true :> obj ] + merge (r.HashMap(nameof Page.Empty.Text, "") + .With(nameof Page.Empty.PriorPermalinks, [||]) + .With(nameof Page.Empty.Revisions, [||])) + orderBy (nameof Page.Empty.Title) result; withRetryDefault conn } member _.FindPageOfPages webLogId pageNbr = rethink { withTable Table.Page - getAll [ webLogId ] (nameof Page.empty.WebLogId) - without [ nameof Page.empty.Metadata - nameof Page.empty.PriorPermalinks - nameof Page.empty.Revisions ] - orderByFunc (fun row -> row[nameof Page.empty.Title].Downcase ()) + getAll [ webLogId ] (nameof Page.Empty.WebLogId) + merge (r.HashMap(nameof Page.Empty.Metadata, [||]) + .With(nameof Page.Empty.PriorPermalinks, [||]) + .With(nameof Page.Empty.Revisions, [||])) + orderByFunc (fun row -> row[nameof Page.Empty.Title].Downcase()) skip ((pageNbr - 1) * 25) limit 25 result; withRetryDefault conn @@ -511,7 +543,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger obj ] + update [ nameof Page.Empty.PriorPermalinks, permalinks :> obj ] write; withRetryDefault; ignoreResult conn } return true @@ -540,8 +572,8 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.Post - getAll [ webLogId ] (nameof Post.empty.WebLogId) - filter (nameof Post.empty.Status) status + getAll [ webLogId ] (nameof Post.Empty.WebLogId) + filter (nameof Post.Empty.Status) status count result; withRetryDefault conn } @@ -550,7 +582,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.Post getAll [ postId ] - filter (fun row -> row[nameof Post.empty.WebLogId].Eq webLogId :> obj) + filter (fun row -> row[nameof Post.Empty.WebLogId].Eq webLogId :> obj) delete write; withRetryDefault conn } @@ -558,19 +590,22 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { + rethink { withTable Table.Post - get postId - without [ nameof Post.empty.PriorPermalinks; nameof Post.empty.Revisions ] - resultOption; withRetryOptionDefault + getAll [ postId ] + filter (nameof Post.Empty.WebLogId) webLogId + merge (r.HashMap(nameof Post.Empty.PriorPermalinks, [||]) + .With(nameof Post.Empty.Revisions, [||])) + result; withRetryDefault } - |> verifyWebLog webLogId (fun p -> p.WebLogId) <| conn + |> tryFirst <| conn member _.FindByPermalink permalink webLogId = rethink { withTable Table.Post - getAll [ [| webLogId :> obj; permalink |] ] (nameof Post.empty.Permalink) - without [ nameof Post.empty.PriorPermalinks; nameof Post.empty.Revisions ] + getAll [ [| webLogId :> obj; permalink |] ] (nameof Post.Empty.Permalink) + merge (r.HashMap(nameof Post.Empty.PriorPermalinks, [||]) + .With(nameof Post.Empty.Revisions, [||])) limit 1 result; withRetryDefault } @@ -582,36 +617,37 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger verifyWebLog webLogId (fun p -> p.WebLogId) <| conn + |> verifyWebLog webLogId _.WebLogId <| conn member _.FindCurrentPermalink permalinks webLogId = backgroundTask { let! result = (rethink { withTable Table.Post - getAll (objList permalinks) (nameof Post.empty.PriorPermalinks) - filter (nameof Post.empty.WebLogId) webLogId - without [ nameof Post.empty.Revisions; nameof Post.empty.Text ] + getAll (objList permalinks) (nameof Post.Empty.PriorPermalinks) + filter (nameof Post.Empty.WebLogId) webLogId + without [ nameof Post.Empty.Revisions; nameof Post.Empty.Text ] limit 1 result; withRetryDefault } |> tryFirst) conn - return result |> Option.map (fun post -> post.Permalink) + return result |> Option.map _.Permalink } member _.FindFullByWebLog webLogId = rethink { withTable Table.Post - getAll [ webLogId ] (nameof Post.empty.WebLogId) + getAll [ webLogId ] (nameof Post.Empty.WebLogId) resultCursor; withRetryCursorDefault; toList conn } member _.FindPageOfCategorizedPosts webLogId categoryIds pageNbr postsPerPage = rethink { withTable Table.Post - getAll (objList categoryIds) (nameof Post.empty.CategoryIds) - filter [ nameof Post.empty.WebLogId, webLogId :> obj - nameof Post.empty.Status, Published ] - without [ nameof Post.empty.PriorPermalinks; nameof Post.empty.Revisions ] + getAll (objList categoryIds) (nameof Post.Empty.CategoryIds) + filter [ nameof Post.Empty.WebLogId, webLogId :> obj + nameof Post.Empty.Status, Published ] + merge (r.HashMap(nameof Post.Empty.PriorPermalinks, [||]) + .With(nameof Post.Empty.Revisions, [||])) distinct - orderByDescending (nameof Post.empty.PublishedOn) + orderByDescending (nameof Post.Empty.PublishedOn) skip ((pageNbr - 1) * postsPerPage) limit (postsPerPage + 1) result; withRetryDefault conn @@ -619,10 +655,12 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.Post - getAll [ webLogId ] (nameof Post.empty.WebLogId) - without [ nameof Post.empty.PriorPermalinks; nameof Post.empty.Revisions ] + getAll [ webLogId ] (nameof Post.Empty.WebLogId) + merge (r.HashMap(nameof Post.Empty.Text, "") + .With(nameof Post.Empty.PriorPermalinks, [||]) + .With(nameof Post.Empty.Revisions, [||])) orderByFuncDescending (fun row -> - row[nameof Post.empty.PublishedOn].Default_ (nameof Post.empty.UpdatedOn) :> obj) + row[nameof Post.Empty.PublishedOn].Default_(nameof Post.Empty.UpdatedOn) :> obj) skip ((pageNbr - 1) * postsPerPage) limit (postsPerPage + 1) result; withRetryDefault conn @@ -630,10 +668,11 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.Post - getAll [ webLogId ] (nameof Post.empty.WebLogId) - filter (nameof Post.empty.Status) Published - without [ nameof Post.empty.PriorPermalinks; nameof Post.empty.Revisions ] - orderByDescending (nameof Post.empty.PublishedOn) + getAll [ webLogId ] (nameof Post.Empty.WebLogId) + filter (nameof Post.Empty.Status) Published + merge (r.HashMap(nameof Post.Empty.PriorPermalinks, [||]) + .With(nameof Post.Empty.Revisions, [||])) + orderByDescending (nameof Post.Empty.PublishedOn) skip ((pageNbr - 1) * postsPerPage) limit (postsPerPage + 1) result; withRetryDefault conn @@ -641,11 +680,12 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.Post - getAll [ tag ] (nameof Post.empty.Tags) - filter [ nameof Post.empty.WebLogId, webLogId :> obj - nameof Post.empty.Status, Published ] - without [ nameof Post.empty.PriorPermalinks; nameof Post.empty.Revisions ] - orderByDescending (nameof Post.empty.PublishedOn) + getAll [ tag ] (nameof Post.Empty.Tags) + filter [ nameof Post.Empty.WebLogId, webLogId :> obj + nameof Post.Empty.Status, Published ] + merge (r.HashMap(nameof Post.Empty.PriorPermalinks, [||]) + .With(nameof Post.Empty.Revisions, [||])) + orderByDescending (nameof Post.Empty.PublishedOn) skip ((pageNbr - 1) * postsPerPage) limit (postsPerPage + 1) result; withRetryDefault conn @@ -655,10 +695,11 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.Post - getAll [ webLogId ] (nameof Post.empty.WebLogId) - filter (fun row -> row[nameof Post.empty.PublishedOn].Lt publishedOn :> obj) - without [ nameof Post.empty.PriorPermalinks; nameof Post.empty.Revisions ] - orderByDescending (nameof Post.empty.PublishedOn) + getAll [ webLogId ] (nameof Post.Empty.WebLogId) + filter (fun row -> row[nameof Post.Empty.PublishedOn].Lt publishedOn :> obj) + merge (r.HashMap(nameof Post.Empty.PriorPermalinks, [||]) + .With(nameof Post.Empty.Revisions, [||])) + orderByDescending (nameof Post.Empty.PublishedOn) limit 1 result; withRetryDefault } @@ -666,10 +707,11 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.Post - getAll [ webLogId ] (nameof Post.empty.WebLogId) - filter (fun row -> row[nameof Post.empty.PublishedOn].Gt publishedOn :> obj) - without [ nameof Post.empty.PriorPermalinks; nameof Post.empty.Revisions ] - orderBy (nameof Post.empty.PublishedOn) + getAll [ webLogId ] (nameof Post.Empty.WebLogId) + filter (fun row -> row[nameof Post.Empty.PublishedOn].Gt publishedOn :> obj) + merge (r.HashMap(nameof Post.Empty.PriorPermalinks, [||]) + .With(nameof Post.Empty.Revisions, [||])) + orderBy (nameof Post.Empty.PublishedOn) limit 1 result; withRetryDefault } @@ -686,27 +728,25 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger + do! rethink { + withTable Table.Post + get post.Id + replace post + write; withRetryDefault; ignoreResult conn + } + | None -> () } - member _.UpdatePriorPermalinks postId webLogId permalinks = backgroundTask { - match! ( - rethink { - withTable Table.Post - get postId - without [ nameof Post.empty.Revisions; nameof Post.empty.PriorPermalinks ] - resultOption; withRetryOptionDefault - } - |> verifyWebLog webLogId (fun p -> p.WebLogId)) conn with + member this.UpdatePriorPermalinks postId webLogId permalinks = backgroundTask { + match! this.FindById postId webLogId with | Some _ -> do! rethink { withTable Table.Post get postId - update [ nameof Post.empty.PriorPermalinks, permalinks :> obj ] + update [ nameof Post.Empty.PriorPermalinks, permalinks :> obj ] write; withRetryDefault; ignoreResult conn } return true @@ -721,7 +761,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.TagMap getAll [ tagMapId ] - filter (fun row -> row[nameof TagMap.empty.WebLogId].Eq webLogId :> obj) + filter (fun row -> row[nameof TagMap.Empty.WebLogId].Eq webLogId :> obj) delete write; withRetryDefault conn } @@ -734,7 +774,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger verifyWebLog webLogId (fun tm -> tm.WebLogId) <| conn + |> verifyWebLog webLogId _.WebLogId <| conn member _.FindByUrlValue urlValue webLogId = rethink { @@ -747,9 +787,9 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.TagMap - between [| webLogId :> obj; r.Minval () |] [| webLogId :> obj; r.Maxval () |] + between [| webLogId :> obj; r.Minval() |] [| webLogId :> obj; r.Maxval() |] [ Index Index.WebLogAndTag ] - orderBy (nameof TagMap.empty.Tag) + orderBy (nameof TagMap.Empty.Tag) result; withRetryDefault conn } @@ -781,16 +821,16 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.Theme - filter (fun row -> row[nameof Theme.empty.Id].Ne "admin" :> obj) + filter (fun row -> row[nameof Theme.Empty.Id].Ne "admin" :> obj) merge withoutTemplateText - orderBy (nameof Theme.empty.Id) + orderBy (nameof Theme.Empty.Id) result; withRetryDefault conn } member _.Exists themeId = backgroundTask { let! count = rethink { withTable Table.Theme - filter (nameof Theme.empty.Id) themeId + filter (nameof Theme.Empty.Id) themeId count result; withRetryDefault conn } @@ -803,12 +843,14 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { - withTable Table.Theme - get themeId - merge withoutTemplateText - resultOption; withRetryOptionDefault conn - } + member _.FindByIdWithoutText themeId = + rethink { + withTable Table.Theme + getAll [ themeId ] + merge withoutTemplateText + result; withRetryDefault + } + |> tryFirst <| conn member this.Delete themeId = backgroundTask { match! this.FindByIdWithoutText themeId with @@ -837,7 +879,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.ThemeAsset - without [ nameof ThemeAsset.empty.Data ] + merge (r.HashMap(nameof ThemeAsset.Empty.Data, emptyFile)) result; withRetryDefault conn } @@ -852,7 +894,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.ThemeAsset filter (matchAssetByThemeId themeId) - without [ nameof ThemeAsset.empty.Data ] + merge (r.HashMap(nameof ThemeAsset.Empty.Data, emptyFile)) result; withRetryDefault conn } @@ -886,7 +928,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger verifyWebLog webLogId (fun u -> u.WebLogId) <| conn + |> verifyWebLog webLogId _.WebLogId <| conn match upload with | Some up -> do! rethink { @@ -895,8 +937,8 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger return Result.Error $"Upload ID {UploadId.toString uploadId} not found" + return Ok (string up.Path) + | None -> return Result.Error $"Upload ID {uploadId} not found" } member _.FindByPath path webLogId = @@ -909,15 +951,15 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.Upload - between [| webLogId :> obj; r.Minval () |] [| webLogId :> obj; r.Maxval () |] + between [| webLogId :> obj; r.Minval() |] [| webLogId :> obj; r.Maxval() |] [ Index Index.WebLogAndPath ] - without [ nameof Upload.empty.Data ] + merge (r.HashMap(nameof Upload.Empty.Data, emptyFile)) resultCursor; withRetryCursorDefault; toList conn } member _.FindByWebLogWithData webLogId = rethink { withTable Table.Upload - between [| webLogId :> obj; r.Minval () |] [| webLogId :> obj; r.Maxval () |] + between [| webLogId :> obj; r.Minval() |] [| webLogId :> obj; r.Maxval() |] [ Index Index.WebLogAndPath ] resultCursor; withRetryCursorDefault; toList conn } @@ -926,7 +968,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger List.chunkBySize 5 do do! rethink { - withTable Table.TagMap + withTable Table.Upload insert batch write; withRetryOnce; ignoreResult conn } @@ -949,24 +991,24 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { + let! thePostIds = rethink<{| Id: string |} list> { withTable Table.Post - getAll [ webLogId ] (nameof Post.empty.WebLogId) - pluck [ nameof Post.empty.Id ] + getAll [ webLogId ] (nameof Post.Empty.WebLogId) + pluck [ nameof Post.Empty.Id ] result; withRetryOnce conn } if not (List.isEmpty thePostIds) then let postIds = thePostIds |> List.map (fun it -> it.Id :> obj) do! rethink { withTable Table.Comment - getAll postIds (nameof Comment.empty.PostId) + getAll postIds (nameof Comment.Empty.PostId) delete write; withRetryOnce; ignoreResult conn } // Tag mappings do not have a straightforward webLogId index do! rethink { withTable Table.TagMap - between [| webLogId :> obj; r.Minval () |] [| webLogId :> obj; r.Maxval () |] + between [| webLogId :> obj; r.Minval() |] [| webLogId :> obj; r.Maxval() |] [ Index Index.WebLogAndTag ] delete write; withRetryOnce; ignoreResult conn @@ -974,7 +1016,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger obj; r.Minval () |] [| webLogId :> obj; r.Maxval () |] + between [| webLogId :> obj; r.Minval() |] [| webLogId :> obj; r.Maxval() |] [ Index Index.WebLogAndPath ] delete write; withRetryOnce; ignoreResult conn @@ -982,7 +1024,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.WebLog - getAll [ url ] (nameof WebLog.empty.UrlBase) + getAll [ url ] (nameof WebLog.Empty.UrlBase) limit 1 result; withRetryDefault } @@ -1009,10 +1051,17 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger obj ] + write; withRetryDefault; ignoreResult conn + } + member _.UpdateRssOptions webLog = rethink { withTable Table.WebLog get webLog.Id - update [ nameof WebLog.empty.Rss, webLog.Rss :> obj ] + update [ nameof WebLog.Empty.Rss, webLog.Rss :> obj ] write; withRetryDefault; ignoreResult conn } @@ -1049,22 +1098,22 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger verifyWebLog webLogId (fun u -> u.WebLogId) <| conn + |> verifyWebLog webLogId _.WebLogId <| conn member this.Delete userId webLogId = backgroundTask { match! this.FindById userId webLogId with | Some _ -> let! pageCount = rethink { withTable Table.Page - getAll [ webLogId ] (nameof Page.empty.WebLogId) - filter (nameof Page.empty.AuthorId) userId + getAll [ webLogId ] (nameof Page.Empty.WebLogId) + filter (nameof Page.Empty.AuthorId) userId count result; withRetryDefault conn } let! postCount = rethink { withTable Table.Post - getAll [ webLogId ] (nameof Post.empty.WebLogId) - filter (nameof Post.empty.AuthorId) userId + getAll [ webLogId ] (nameof Post.Empty.WebLogId) + filter (nameof Post.Empty.AuthorId) userId count result; withRetryDefault conn } @@ -1092,8 +1141,8 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.WebLogUser - getAll [ webLogId ] (nameof WebLogUser.empty.WebLogId) - orderByFunc (fun row -> row[nameof WebLogUser.empty.PreferredName].Downcase ()) + getAll [ webLogId ] (nameof WebLogUser.Empty.WebLogId) + orderByFunc (fun row -> row[nameof WebLogUser.Empty.PreferredName].Downcase()) result; withRetryDefault conn } @@ -1101,12 +1150,10 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { withTable Table.WebLogUser getAll (objList userIds) - filter (nameof WebLogUser.empty.WebLogId) webLogId + filter (nameof WebLogUser.Empty.WebLogId) webLogId result; withRetryDefault conn } - return - users - |> List.map (fun u -> { Name = WebLogUserId.toString u.Id; Value = WebLogUser.displayName u }) + return users |> List.map (fun u -> { Name = string u.Id; Value = u.DisplayName }) } member _.Restore users = backgroundTask { @@ -1124,7 +1171,7 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger obj ] + update [ nameof WebLogUser.Empty.LastSeenOn, Noda.now () :> obj ] write; withRetryOnce; ignoreResult conn } | None -> () @@ -1169,21 +1216,19 @@ type RethinkDbData (conn : Net.IConnection, config : DataConfig, log : ILogger { + let! version = rethink<{| Id: string |} list> { withTable Table.DbVersion limit 1 result; withRetryOnce conn } - match List.tryHead version with - | Some v when v.Id = "v2-rc2" -> () - | it -> do! migrate (it |> Option.map (fun x -> x.Id)) + do! migrate (List.tryHead version |> Option.map _.Id) } diff --git a/src/MyWebLog.Data/SQLite/Helpers.fs b/src/MyWebLog.Data/SQLite/Helpers.fs deleted file mode 100644 index 150085f..0000000 --- a/src/MyWebLog.Data/SQLite/Helpers.fs +++ /dev/null @@ -1,314 +0,0 @@ -/// Helper functions for the SQLite data implementation -[] -module MyWebLog.Data.SQLite.Helpers - -open System -open Microsoft.Data.Sqlite -open MyWebLog -open MyWebLog.Data -open NodaTime.Text - -/// Run a command that returns a count -let count (cmd : SqliteCommand) = backgroundTask { - let! it = cmd.ExecuteScalarAsync () - return int (it :?> int64) -} - -/// Create a list of items from the given data reader -let toList<'T> (it : SqliteDataReader -> 'T) (rdr : SqliteDataReader) = - seq { while rdr.Read () do it rdr } - |> List.ofSeq - -/// Verify that the web log ID matches before returning an item -let verifyWebLog<'T> webLogId (prop : 'T -> WebLogId) (it : SqliteDataReader -> 'T) (rdr : SqliteDataReader) = - if rdr.Read () then - let item = it rdr - if prop item = webLogId then Some item else None - else None - -/// Execute a command that returns no data -let write (cmd : SqliteCommand) = backgroundTask { - let! _ = cmd.ExecuteNonQueryAsync () - () -} - -/// Add a possibly-missing parameter, substituting null for None -let maybe<'T> (it : 'T option) : obj = match it with Some x -> x :> obj | None -> DBNull.Value - -/// Create a value for a Duration -let durationParam = - DurationPattern.Roundtrip.Format - -/// Create a value for an Instant -let instantParam = - InstantPattern.General.Format - -/// Create an optional value for a Duration -let maybeDuration = - Option.map durationParam >> maybe - -/// Create an optional value for an Instant -let maybeInstant = - Option.map instantParam >> maybe - -/// Create the SQL and parameters for an IN clause -let inClause<'T> colNameAndPrefix paramName (valueFunc: 'T -> string) (items : 'T list) = - if List.isEmpty items then "", [] - else - let mutable idx = 0 - items - |> List.skip 1 - |> List.fold (fun (itemS, itemP) it -> - idx <- idx + 1 - $"{itemS}, @%s{paramName}{idx}", (SqliteParameter ($"@%s{paramName}{idx}", valueFunc it) :: itemP)) - (Seq.ofList items - |> Seq.map (fun it -> - $"%s{colNameAndPrefix} IN (@%s{paramName}0", [ SqliteParameter ($"@%s{paramName}0", valueFunc it) ]) - |> Seq.head) - |> function sql, ps -> $"{sql})", ps - - -/// Functions to map domain items from a data reader -module Map = - - open System.IO - - /// Get a boolean value from a data reader - let getBoolean col (rdr : SqliteDataReader) = rdr.GetBoolean (rdr.GetOrdinal col) - - /// Get a date/time value from a data reader - let getDateTime col (rdr : SqliteDataReader) = rdr.GetDateTime (rdr.GetOrdinal col) - - /// Get a Guid value from a data reader - let getGuid col (rdr : SqliteDataReader) = rdr.GetGuid (rdr.GetOrdinal col) - - /// Get an int value from a data reader - let getInt col (rdr : SqliteDataReader) = rdr.GetInt32 (rdr.GetOrdinal col) - - /// Get a long (64-bit int) value from a data reader - let getLong col (rdr : SqliteDataReader) = rdr.GetInt64 (rdr.GetOrdinal col) - - /// Get a BLOB stream value from a data reader - let getStream col (rdr : SqliteDataReader) = rdr.GetStream (rdr.GetOrdinal col) - - /// Get a string value from a data reader - let getString col (rdr : SqliteDataReader) = rdr.GetString (rdr.GetOrdinal col) - - /// Parse a Duration from the given value - let parseDuration value = - match DurationPattern.Roundtrip.Parse value with - | it when it.Success -> it.Value - | it -> raise it.Exception - - /// Get a Duration value from a data reader - let getDuration col rdr = - getString col rdr |> parseDuration - - /// Parse an Instant from the given value - let parseInstant value = - match InstantPattern.General.Parse value with - | it when it.Success -> it.Value - | it -> raise it.Exception - - /// Get an Instant value from a data reader - let getInstant col rdr = - getString col rdr |> parseInstant - - /// Get a timespan value from a data reader - let getTimeSpan col (rdr : SqliteDataReader) = rdr.GetTimeSpan (rdr.GetOrdinal col) - - /// Get a possibly null boolean value from a data reader - let tryBoolean col (rdr : SqliteDataReader) = - if rdr.IsDBNull (rdr.GetOrdinal col) then None else Some (getBoolean col rdr) - - /// Get a possibly null date/time value from a data reader - let tryDateTime col (rdr : SqliteDataReader) = - if rdr.IsDBNull (rdr.GetOrdinal col) then None else Some (getDateTime col rdr) - - /// Get a possibly null Guid value from a data reader - let tryGuid col (rdr : SqliteDataReader) = - if rdr.IsDBNull (rdr.GetOrdinal col) then None else Some (getGuid col rdr) - - /// Get a possibly null int value from a data reader - let tryInt col (rdr : SqliteDataReader) = - if rdr.IsDBNull (rdr.GetOrdinal col) then None else Some (getInt col rdr) - - /// Get a possibly null string value from a data reader - let tryString col (rdr : SqliteDataReader) = - if rdr.IsDBNull (rdr.GetOrdinal col) then None else Some (getString col rdr) - - /// Get a possibly null Duration value from a data reader - let tryDuration col rdr = - tryString col rdr |> Option.map parseDuration - - /// Get a possibly null Instant value from a data reader - let tryInstant col rdr = - tryString col rdr |> Option.map parseInstant - - /// Get a possibly null timespan value from a data reader - let tryTimeSpan col (rdr : SqliteDataReader) = - if rdr.IsDBNull (rdr.GetOrdinal col) then None else Some (getTimeSpan col rdr) - - /// Map an id field to a category ID - let toCategoryId rdr = getString "id" rdr |> CategoryId - - /// Create a category from the current row in the given data reader - let toCategory rdr : Category = - { Id = toCategoryId rdr - WebLogId = getString "web_log_id" rdr |> WebLogId - Name = getString "name" rdr - Slug = getString "slug" rdr - Description = tryString "description" rdr - ParentId = tryString "parent_id" rdr |> Option.map CategoryId - } - - /// Create a custom feed from the current row in the given data reader - let toCustomFeed ser rdr : CustomFeed = - { Id = getString "id" rdr |> CustomFeedId - Source = getString "source" rdr |> CustomFeedSource.parse - Path = getString "path" rdr |> Permalink - Podcast = tryString "podcast" rdr |> Option.map (Utils.deserialize ser) - } - - /// Create a permalink from the current row in the given data reader - let toPermalink rdr = getString "permalink" rdr |> Permalink - - /// Create a page from the current row in the given data reader - let toPage ser rdr : Page = - { Page.empty with - Id = getString "id" rdr |> PageId - WebLogId = getString "web_log_id" rdr |> WebLogId - AuthorId = getString "author_id" rdr |> WebLogUserId - Title = getString "title" rdr - Permalink = toPermalink rdr - PublishedOn = getInstant "published_on" rdr - UpdatedOn = getInstant "updated_on" rdr - IsInPageList = getBoolean "is_in_page_list" rdr - Template = tryString "template" rdr - Text = getString "page_text" rdr - Metadata = tryString "meta_items" rdr - |> Option.map (Utils.deserialize ser) - |> Option.defaultValue [] - } - - /// Create a post from the current row in the given data reader - let toPost ser rdr : Post = - { Post.empty with - Id = getString "id" rdr |> PostId - WebLogId = getString "web_log_id" rdr |> WebLogId - AuthorId = getString "author_id" rdr |> WebLogUserId - Status = getString "status" rdr |> PostStatus.parse - Title = getString "title" rdr - Permalink = toPermalink rdr - PublishedOn = tryInstant "published_on" rdr - UpdatedOn = getInstant "updated_on" rdr - Template = tryString "template" rdr - Text = getString "post_text" rdr - Episode = tryString "episode" rdr |> Option.map (Utils.deserialize ser) - Metadata = tryString "meta_items" rdr - |> Option.map (Utils.deserialize ser) - |> Option.defaultValue [] - } - - /// Create a revision from the current row in the given data reader - let toRevision rdr : Revision = - { AsOf = getInstant "as_of" rdr - Text = getString "revision_text" rdr |> MarkupText.parse - } - - /// Create a tag mapping from the current row in the given data reader - let toTagMap rdr : TagMap = - { Id = getString "id" rdr |> TagMapId - WebLogId = getString "web_log_id" rdr |> WebLogId - Tag = getString "tag" rdr - UrlValue = getString "url_value" rdr - } - - /// Create a theme from the current row in the given data reader (excludes templates) - let toTheme rdr : Theme = - { Theme.empty with - Id = getString "id" rdr |> ThemeId - Name = getString "name" rdr - Version = getString "version" rdr - } - - /// Create a theme asset from the current row in the given data reader - let toThemeAsset includeData rdr : ThemeAsset = - let assetData = - if includeData then - use dataStream = new MemoryStream () - use blobStream = getStream "data" rdr - blobStream.CopyTo dataStream - dataStream.ToArray () - else - [||] - { Id = ThemeAssetId (ThemeId (getString "theme_id" rdr), getString "path" rdr) - UpdatedOn = getInstant "updated_on" rdr - Data = assetData - } - - /// Create a theme template from the current row in the given data reader - let toThemeTemplate includeText rdr : ThemeTemplate = - { Name = getString "name" rdr - Text = if includeText then getString "template" rdr else "" - } - - /// Create an uploaded file from the current row in the given data reader - let toUpload includeData rdr : Upload = - let data = - if includeData then - use dataStream = new MemoryStream () - use blobStream = getStream "data" rdr - blobStream.CopyTo dataStream - dataStream.ToArray () - else - [||] - { Id = getString "id" rdr |> UploadId - WebLogId = getString "web_log_id" rdr |> WebLogId - Path = getString "path" rdr |> Permalink - UpdatedOn = getInstant "updated_on" rdr - Data = data - } - - /// Create a web log from the current row in the given data reader - let toWebLog rdr : WebLog = - { Id = getString "id" rdr |> WebLogId - Name = getString "name" rdr - Slug = getString "slug" rdr - Subtitle = tryString "subtitle" rdr - DefaultPage = getString "default_page" rdr - PostsPerPage = getInt "posts_per_page" rdr - ThemeId = getString "theme_id" rdr |> ThemeId - UrlBase = getString "url_base" rdr - TimeZone = getString "time_zone" rdr - AutoHtmx = getBoolean "auto_htmx" rdr - Uploads = getString "uploads" rdr |> UploadDestination.parse - Rss = { - IsFeedEnabled = getBoolean "is_feed_enabled" rdr - FeedName = getString "feed_name" rdr - ItemsInFeed = tryInt "items_in_feed" rdr - IsCategoryEnabled = getBoolean "is_category_enabled" rdr - IsTagEnabled = getBoolean "is_tag_enabled" rdr - Copyright = tryString "copyright" rdr - CustomFeeds = [] - } - } - - /// Create a web log user from the current row in the given data reader - let toWebLogUser rdr : WebLogUser = - { Id = getString "id" rdr |> WebLogUserId - WebLogId = getString "web_log_id" rdr |> WebLogId - Email = getString "email" rdr - FirstName = getString "first_name" rdr - LastName = getString "last_name" rdr - PreferredName = getString "preferred_name" rdr - PasswordHash = getString "password_hash" rdr - Url = tryString "url" rdr - AccessLevel = getString "access_level" rdr |> AccessLevel.parse - CreatedOn = getInstant "created_on" rdr - LastSeenOn = tryInstant "last_seen_on" rdr - } - -/// Add a web log ID parameter -let addWebLogId (cmd : SqliteCommand) webLogId = - cmd.Parameters.AddWithValue ("@webLogId", WebLogId.toString webLogId) |> ignore diff --git a/src/MyWebLog.Data/SQLite/SQLiteCategoryData.fs b/src/MyWebLog.Data/SQLite/SQLiteCategoryData.fs index 75728b8..4cf6104 100644 --- a/src/MyWebLog.Data/SQLite/SQLiteCategoryData.fs +++ b/src/MyWebLog.Data/SQLite/SQLiteCategoryData.fs @@ -1,69 +1,43 @@ namespace MyWebLog.Data.SQLite open System.Threading.Tasks +open BitBadger.Documents +open BitBadger.Documents.Sqlite open Microsoft.Data.Sqlite +open Microsoft.Extensions.Logging open MyWebLog open MyWebLog.Data +open Newtonsoft.Json -/// SQLite myWebLog category data implementation -type SQLiteCategoryData (conn : SqliteConnection) = +/// SQLite myWebLog category data implementation +type SQLiteCategoryData(conn: SqliteConnection, ser: JsonSerializer, log: ILogger) = - /// Add parameters for category INSERT or UPDATE statements - let addCategoryParameters (cmd : SqliteCommand) (cat : Category) = - [ cmd.Parameters.AddWithValue ("@id", CategoryId.toString cat.Id) - cmd.Parameters.AddWithValue ("@webLogId", WebLogId.toString cat.WebLogId) - cmd.Parameters.AddWithValue ("@name", cat.Name) - cmd.Parameters.AddWithValue ("@slug", cat.Slug) - cmd.Parameters.AddWithValue ("@description", maybe cat.Description) - cmd.Parameters.AddWithValue ("@parentId", maybe (cat.ParentId |> Option.map CategoryId.toString)) - ] |> ignore - - /// Add a category - let add cat = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- - "INSERT INTO category ( - id, web_log_id, name, slug, description, parent_id - ) VALUES ( - @id, @webLogId, @name, @slug, @description, @parentId - )" - addCategoryParameters cmd cat - let! _ = cmd.ExecuteNonQueryAsync () - () - } + /// The name of the parent ID field + let parentIdField = nameof Category.Empty.ParentId /// Count all categories for the given web log - let countAll webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT COUNT(id) FROM category WHERE web_log_id = @webLogId" - addWebLogId cmd webLogId - return! count cmd - } + let countAll webLogId = + log.LogTrace "Category.countAll" + Document.countByWebLog Table.Category webLogId conn /// Count all top-level categories for the given web log - let countTopLevel webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- - "SELECT COUNT(id) FROM category WHERE web_log_id = @webLogId AND parent_id IS NULL" - addWebLogId cmd webLogId - return! count cmd - } + let countTopLevel webLogId = + log.LogTrace "Category.countTopLevel" + conn.customScalar + $"{Document.Query.countByWebLog Table.Category} AND data ->> '{parentIdField}' IS NULL" + [ webLogParam webLogId ] + (toCount >> int) + + /// Find all categories for the given web log + let findByWebLog webLogId = + log.LogTrace "Category.findByWebLog" + Document.findByWebLog Table.Category webLogId conn /// Retrieve all categories for the given web log in a DotLiquid-friendly format let findAllForView webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT * FROM category WHERE web_log_id = @webLogId" - addWebLogId cmd webLogId - use! rdr = cmd.ExecuteReaderAsync () - let cats = - seq { - while rdr.Read () do - Map.toCategory rdr - } - |> Seq.sortBy (fun cat -> cat.Name.ToLowerInvariant ()) - |> List.ofSeq - do! rdr.CloseAsync () - let ordered = Utils.orderByHierarchy cats None None [] + log.LogTrace "Category.findAllForView" + let! cats = findByWebLog webLogId + let ordered = Utils.orderByHierarchy (cats |> List.sortBy _.Name.ToLowerInvariant()) None None [] let! counts = ordered |> Seq.map (fun it -> backgroundTask { @@ -71,104 +45,80 @@ type SQLiteCategoryData (conn : SqliteConnection) = let catSql, catParams = ordered |> Seq.filter (fun cat -> cat.ParentNames |> Array.contains it.Name) - |> Seq.map (fun cat -> cat.Id) + |> Seq.map _.Id |> Seq.append (Seq.singleton it.Id) |> List.ofSeq - |> inClause "AND pc.category_id" "catId" id - cmd.Parameters.Clear () - addWebLogId cmd webLogId - cmd.Parameters.AddRange catParams - cmd.CommandText <- $" - SELECT COUNT(DISTINCT p.id) - FROM post p - INNER JOIN post_category pc ON pc.post_id = p.id - WHERE p.web_log_id = @webLogId - AND p.status = 'Published' - {catSql}" - let! postCount = count cmd - return it.Id, postCount - }) + |> inJsonArray Table.Post (nameof Post.Empty.CategoryIds) "catId" + let query = $""" + SELECT COUNT(DISTINCT data ->> '{nameof Post.Empty.Id}') + FROM {Table.Post} + WHERE {Document.Query.whereByWebLog} + AND {Query.whereByField (Field.EQ (nameof Post.Empty.Status) "") $"'{string Published}'"} + AND {catSql}""" + let! postCount = conn.customScalar query (webLogParam webLogId :: catParams) toCount + return it.Id, int postCount + }) |> Task.WhenAll return ordered |> Seq.map (fun cat -> { cat with - PostCount = counts - |> Array.tryFind (fun c -> fst c = cat.Id) - |> Option.map snd - |> Option.defaultValue 0 + PostCount = defaultArg (counts |> Array.tryFind (fun c -> fst c = cat.Id) |> Option.map snd) 0 }) |> Array.ofSeq } - /// Find a category by its ID for the given web log - let findById catId webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT * FROM category WHERE id = @id" - cmd.Parameters.AddWithValue ("@id", CategoryId.toString catId) |> ignore - use! rdr = cmd.ExecuteReaderAsync () - return Helpers.verifyWebLog webLogId (fun c -> c.WebLogId) Map.toCategory rdr - } - /// Find all categories for the given web log - let findByWebLog webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT * FROM category WHERE web_log_id = @webLogId" - cmd.Parameters.AddWithValue ("@webLogId", WebLogId.toString webLogId) |> ignore - use! rdr = cmd.ExecuteReaderAsync () - return toList Map.toCategory rdr - } + /// Find a category by its ID for the given web log + let findById catId webLogId = + log.LogTrace "Category.findById" + Document.findByIdAndWebLog Table.Category catId webLogId conn /// Delete a category let delete catId webLogId = backgroundTask { + log.LogTrace "Category.delete" match! findById catId webLogId with | Some cat -> - use cmd = conn.CreateCommand () // Reassign any children to the category's parent category - cmd.CommandText <- "SELECT COUNT(id) FROM category WHERE parent_id = @parentId" - cmd.Parameters.AddWithValue ("@parentId", CategoryId.toString catId) |> ignore - let! children = count cmd - if children > 0 then - cmd.CommandText <- "UPDATE category SET parent_id = @newParentId WHERE parent_id = @parentId" - cmd.Parameters.AddWithValue ("@newParentId", maybe (cat.ParentId |> Option.map CategoryId.toString)) - |> ignore - do! write cmd + let! children = conn.countByField Table.Category (Field.EQ parentIdField (string catId)) + if children > 0L then + let parent = Field.EQ parentIdField (string catId) + match cat.ParentId with + | Some _ -> do! conn.patchByField Table.Category parent {| ParentId = cat.ParentId |} + | None -> do! conn.removeFieldsByField Table.Category parent [ parentIdField ] // Delete the category off all posts where it is assigned, and the category itself - cmd.CommandText <- - "DELETE FROM post_category - WHERE category_id = @id - AND post_id IN (SELECT id FROM post WHERE web_log_id = @webLogId); - DELETE FROM category WHERE id = @id" - cmd.Parameters.Clear () - let _ = cmd.Parameters.AddWithValue ("@id", CategoryId.toString catId) - addWebLogId cmd webLogId - do! write cmd - return if children = 0 then CategoryDeleted else ReassignedChildCategories + let catIdField = nameof Post.Empty.CategoryIds + let! posts = + conn.customList + $"SELECT data ->> '{nameof Post.Empty.Id}', data -> '{catIdField}' + FROM {Table.Post} + WHERE {Document.Query.whereByWebLog} + AND EXISTS + (SELECT 1 + FROM json_each({Table.Post}.data -> '{catIdField}') + WHERE json_each.value = @id)" + [ idParam catId; webLogParam webLogId ] + (fun rdr -> rdr.GetString 0, Utils.deserialize ser (rdr.GetString 1)) + for postId, cats in posts do + do! conn.patchById + Table.Post postId {| CategoryIds = cats |> List.filter (fun it -> it <> string catId) |} + do! conn.deleteById Table.Category catId + return if children = 0L then CategoryDeleted else ReassignedChildCategories | None -> return CategoryNotFound } + /// Save a category + let save cat = + log.LogTrace "Category.save" + conn.save Table.Category cat + /// Restore categories from a backup let restore cats = backgroundTask { - for cat in cats do - do! add cat - } - - /// Update a category - let update cat = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- - "UPDATE category - SET name = @name, - slug = @slug, - description = @description, - parent_id = @parentId - WHERE id = @id - AND web_log_id = @webLogId" - addCategoryParameters cmd cat - do! write cmd + log.LogTrace "Category.restore" + for cat in cats do do! save cat } interface ICategoryData with - member _.Add cat = add cat + member _.Add cat = save cat member _.CountAll webLogId = countAll webLogId member _.CountTopLevel webLogId = countTopLevel webLogId member _.FindAllForView webLogId = findAllForView webLogId @@ -176,4 +126,4 @@ type SQLiteCategoryData (conn : SqliteConnection) = member _.FindByWebLog webLogId = findByWebLog webLogId member _.Delete catId webLogId = delete catId webLogId member _.Restore cats = restore cats - member _.Update cat = update cat + member _.Update cat = save cat diff --git a/src/MyWebLog.Data/SQLite/SQLiteHelpers.fs b/src/MyWebLog.Data/SQLite/SQLiteHelpers.fs new file mode 100644 index 0000000..bfb8952 --- /dev/null +++ b/src/MyWebLog.Data/SQLite/SQLiteHelpers.fs @@ -0,0 +1,307 @@ +/// Helper functions for the SQLite data implementation +[] +module MyWebLog.Data.SQLite.SQLiteHelpers + +/// The table names used in the SQLite implementation +[] +module Table = + + /// Categories + [] + let Category = "category" + + /// Database Version + [] + let DbVersion = "db_version" + + /// Pages + [] + let Page = "page" + + /// Page Revisions + [] + let PageRevision = "page_revision" + + /// Posts + [] + let Post = "post" + + /// Post Comments + [] + let PostComment = "post_comment" + + /// Post Revisions + [] + let PostRevision = "post_revision" + + /// Tag/URL Mappings + [] + let TagMap = "tag_map" + + /// Themes + [] + let Theme = "theme" + + /// Theme Assets + [] + let ThemeAsset = "theme_asset" + + /// Uploads + [] + let Upload = "upload" + + /// Web Logs + [] + let WebLog = "web_log" + + /// Users + [] + let WebLogUser = "web_log_user" + + +open System +open Microsoft.Data.Sqlite +open MyWebLog +open MyWebLog.Data +open NodaTime.Text + +/// Execute a command that returns no data +let write (cmd: SqliteCommand) = backgroundTask { + let! _ = cmd.ExecuteNonQueryAsync() + () +} + +/// Add a possibly-missing parameter, substituting null for None +let maybe<'T> (it: 'T option) : obj = match it with Some x -> x :> obj | None -> DBNull.Value + +/// Create a value for an Instant +let instantParam = + InstantPattern.General.Format + +/// Create an optional value for an Instant +let maybeInstant = + Option.map instantParam >> maybe + +/// Create the SQL and parameters for an EXISTS applied to a JSON array +let inJsonArray<'T> table jsonField paramName (items: 'T list) = + if List.isEmpty items then "", [] + else + let mutable idx = 0 + items + |> List.skip 1 + |> List.fold (fun (itemS, itemP) it -> + idx <- idx + 1 + $"{itemS}, @%s{paramName}{idx}", (SqliteParameter($"@%s{paramName}{idx}", string it) :: itemP)) + (Seq.ofList items + |> Seq.map (fun it -> $"(@%s{paramName}0", [ SqliteParameter($"@%s{paramName}0", string it) ]) + |> Seq.head) + |> function + sql, ps -> + $"EXISTS (SELECT 1 FROM json_each(%s{table}.data, '$.%s{jsonField}') WHERE value IN {sql}))", ps + +/// Create the SQL and parameters for an IN clause +let inClause<'T> colNameAndPrefix paramName (valueFunc: 'T -> string) (items: 'T list) = + if List.isEmpty items then "", [] + else + let mutable idx = 0 + items + |> List.skip 1 + |> List.fold (fun (itemS, itemP) it -> + idx <- idx + 1 + $"{itemS}, @%s{paramName}{idx}", (SqliteParameter ($"@%s{paramName}{idx}", valueFunc it) :: itemP)) + (Seq.ofList items + |> Seq.map (fun it -> + $"%s{colNameAndPrefix} IN (@%s{paramName}0", [ SqliteParameter ($"@%s{paramName}0", valueFunc it) ]) + |> Seq.head) + |> function sql, ps -> $"{sql})", ps + + +/// Functions to map domain items from a data reader +module Map = + + open System.IO + + /// Get a boolean value from a data reader + let getBoolean col (rdr: SqliteDataReader) = rdr.GetBoolean(rdr.GetOrdinal col) + + /// Get a date/time value from a data reader + let getDateTime col (rdr: SqliteDataReader) = rdr.GetDateTime(rdr.GetOrdinal col) + + /// Get a Guid value from a data reader + let getGuid col (rdr: SqliteDataReader) = rdr.GetGuid(rdr.GetOrdinal col) + + /// Get an int value from a data reader + let getInt col (rdr: SqliteDataReader) = rdr.GetInt32(rdr.GetOrdinal col) + + /// Get a long (64-bit int) value from a data reader + let getLong col (rdr: SqliteDataReader) = rdr.GetInt64(rdr.GetOrdinal col) + + /// Get a BLOB stream value from a data reader + let getStream col (rdr: SqliteDataReader) = rdr.GetStream(rdr.GetOrdinal col) + + /// Get a string value from a data reader + let getString col (rdr: SqliteDataReader) = rdr.GetString(rdr.GetOrdinal col) + + /// Parse an Instant from the given value + let parseInstant value = + match InstantPattern.General.Parse value with + | it when it.Success -> it.Value + | it -> raise it.Exception + + /// Get an Instant value from a data reader + let getInstant col rdr = + getString col rdr |> parseInstant + + /// Get a timespan value from a data reader + let getTimeSpan col (rdr: SqliteDataReader) = rdr.GetTimeSpan(rdr.GetOrdinal col) + + /// Get a possibly null boolean value from a data reader + let tryBoolean col (rdr: SqliteDataReader) = + if rdr.IsDBNull(rdr.GetOrdinal col) then None else Some (getBoolean col rdr) + + /// Get a possibly null date/time value from a data reader + let tryDateTime col (rdr: SqliteDataReader) = + if rdr.IsDBNull(rdr.GetOrdinal col) then None else Some (getDateTime col rdr) + + /// Get a possibly null Guid value from a data reader + let tryGuid col (rdr: SqliteDataReader) = + if rdr.IsDBNull(rdr.GetOrdinal col) then None else Some (getGuid col rdr) + + /// Get a possibly null int value from a data reader + let tryInt col (rdr: SqliteDataReader) = + if rdr.IsDBNull(rdr.GetOrdinal col) then None else Some (getInt col rdr) + + /// Get a possibly null string value from a data reader + let tryString col (rdr: SqliteDataReader) = + if rdr.IsDBNull(rdr.GetOrdinal col) then None else Some (getString col rdr) + + /// Get a possibly null timespan value from a data reader + let tryTimeSpan col (rdr: SqliteDataReader) = + if rdr.IsDBNull(rdr.GetOrdinal col) then None else Some (getTimeSpan col rdr) + + /// Create a permalink from the current row in the given data reader + let toPermalink rdr = getString "permalink" rdr |> Permalink + + /// Create a revision from the current row in the given data reader + let toRevision rdr : Revision = + { AsOf = getInstant "as_of" rdr + Text = getString "revision_text" rdr |> MarkupText.Parse } + + /// Create a theme asset from the current row in the given data reader + let toThemeAsset includeData rdr : ThemeAsset = + let assetData = + if includeData then + use dataStream = new MemoryStream() + use blobStream = getStream "data" rdr + blobStream.CopyTo dataStream + dataStream.ToArray() + else + [||] + { Id = ThemeAssetId (ThemeId (getString "theme_id" rdr), getString "path" rdr) + UpdatedOn = getInstant "updated_on" rdr + Data = assetData } + + /// Create an uploaded file from the current row in the given data reader + let toUpload includeData rdr : Upload = + let data = + if includeData then + use dataStream = new MemoryStream() + use blobStream = getStream "data" rdr + blobStream.CopyTo dataStream + dataStream.ToArray () + else + [||] + { Id = getString "id" rdr |> UploadId + WebLogId = getString "web_log_id" rdr |> WebLogId + Path = getString "path" rdr |> Permalink + UpdatedOn = getInstant "updated_on" rdr + Data = data } + + +/// Create a named parameter +let sqlParam name (value: obj) = + SqliteParameter(name, value) + +/// Create a web log ID parameter +let webLogParam (webLogId: WebLogId) = + sqlParam "@webLogId" (string webLogId) + + +open BitBadger.Documents +open BitBadger.Documents.Sqlite +open BitBadger.Documents.Sqlite.WithConn + +/// Functions for manipulating documents +module Document = + + /// Queries to assist with document manipulation + module Query = + + /// Fragment to add a web log ID condition to a WHERE clause (parameter @webLogId) + let whereByWebLog = + Query.whereByField (Field.EQ "WebLogId" "") "@webLogId" + + /// A SELECT query to count documents for a given web log ID + let countByWebLog table = + $"{Query.Count.all table} WHERE {whereByWebLog}" + + /// A query to select from a table by the document's ID and its web log ID + let selectByIdAndWebLog table = + $"{Query.Find.byId table} AND {whereByWebLog}" + + /// A query to select from a table by its web log ID + let selectByWebLog table = + $"{Query.selectFromTable table} WHERE {whereByWebLog}" + + /// Count documents for the given web log ID + let countByWebLog table (webLogId: WebLogId) conn = backgroundTask { + let! count = Count.byField table (Field.EQ "WebLogId" (string webLogId)) conn + return int count + } + + /// Find a document by its ID and web log ID + let findByIdAndWebLog<'TKey, 'TDoc> table (key: 'TKey) webLogId conn = + Custom.single (Query.selectByIdAndWebLog table) [ idParam key; webLogParam webLogId ] fromData<'TDoc> conn + + /// Find documents for the given web log + let findByWebLog<'TDoc> table (webLogId: WebLogId) conn = + Find.byField<'TDoc> table (Field.EQ "WebLogId" (string webLogId)) conn + + +/// Functions to support revisions +module Revisions = + + /// Find all revisions for the given entity + let findByEntityId<'TKey> revTable entityTable (key: 'TKey) conn = + Custom.list + $"SELECT as_of, revision_text FROM %s{revTable} WHERE %s{entityTable}_id = @id ORDER BY as_of DESC" + [ idParam key ] + Map.toRevision + conn + + /// Find all revisions for all posts for the given web log + let findByWebLog<'TKey> revTable entityTable (keyFunc: string -> 'TKey) webLogId conn = + Custom.list + $"SELECT pr.* + FROM %s{revTable} pr + INNER JOIN %s{entityTable} p ON p.data ->> 'Id' = pr.{entityTable}_id + WHERE p.{Document.Query.whereByWebLog} + ORDER BY as_of DESC" + [ webLogParam webLogId ] + (fun rdr -> keyFunc (Map.getString $"{entityTable}_id" rdr), Map.toRevision rdr) + conn + + /// Update a page or post's revisions + let update<'TKey> revTable entityTable (key: 'TKey) oldRevs newRevs conn = backgroundTask { + let toDelete, toAdd = Utils.diffRevisions oldRevs newRevs + for delRev in toDelete do + do! Custom.nonQuery + $"DELETE FROM %s{revTable} WHERE %s{entityTable}_id = @id AND as_of = @asOf" + [ idParam key; sqlParam "@asOf" (instantParam delRev.AsOf) ] + conn + for addRev in toAdd do + do! Custom.nonQuery + $"INSERT INTO {revTable} VALUES (@id, @asOf, @text)" + [ idParam key; sqlParam "asOf" (instantParam addRev.AsOf); sqlParam "@text" (string addRev.Text) ] + conn + } diff --git a/src/MyWebLog.Data/SQLite/SQLitePageData.fs b/src/MyWebLog.Data/SQLite/SQLitePageData.fs index 5562bcc..17af376 100644 --- a/src/MyWebLog.Data/SQLite/SQLitePageData.fs +++ b/src/MyWebLog.Data/SQLite/SQLitePageData.fs @@ -1,300 +1,173 @@ namespace MyWebLog.Data.SQLite open System.Threading.Tasks +open BitBadger.Documents +open BitBadger.Documents.Sqlite open Microsoft.Data.Sqlite +open Microsoft.Extensions.Logging open MyWebLog open MyWebLog.Data -open Newtonsoft.Json -/// SQLite myWebLog page data implementation -type SQLitePageData (conn : SqliteConnection, ser : JsonSerializer) = +/// SQLite myWebLog page data implementation +type SQLitePageData(conn: SqliteConnection, log: ILogger) = + + /// The JSON field name for the permalink + let linkName = nameof Page.Empty.Permalink + + /// The JSON field name for the "is in page list" flag + let pgListName = nameof Page.Empty.IsInPageList + + /// The JSON field for the title of the page + let titleField = $"data ->> '{nameof Page.Empty.Title}'" // SUPPORT FUNCTIONS - /// Add parameters for page INSERT or UPDATE statements - let addPageParameters (cmd : SqliteCommand) (page : Page) = - [ cmd.Parameters.AddWithValue ("@id", PageId.toString page.Id) - cmd.Parameters.AddWithValue ("@webLogId", WebLogId.toString page.WebLogId) - cmd.Parameters.AddWithValue ("@authorId", WebLogUserId.toString page.AuthorId) - cmd.Parameters.AddWithValue ("@title", page.Title) - cmd.Parameters.AddWithValue ("@permalink", Permalink.toString page.Permalink) - cmd.Parameters.AddWithValue ("@publishedOn", instantParam page.PublishedOn) - cmd.Parameters.AddWithValue ("@updatedOn", instantParam page.UpdatedOn) - cmd.Parameters.AddWithValue ("@isInPageList", page.IsInPageList) - cmd.Parameters.AddWithValue ("@template", maybe page.Template) - cmd.Parameters.AddWithValue ("@text", page.Text) - cmd.Parameters.AddWithValue ("@metaItems", maybe (if List.isEmpty page.Metadata then None - else Some (Utils.serialize ser page.Metadata))) - ] |> ignore - - /// Append revisions and permalinks to a page - let appendPageRevisionsAndPermalinks (page : Page) = backgroundTask { - use cmd = conn.CreateCommand () - cmd.Parameters.AddWithValue ("@pageId", PageId.toString page.Id) |> ignore - - cmd.CommandText <- "SELECT permalink FROM page_permalink WHERE page_id = @pageId" - use! rdr = cmd.ExecuteReaderAsync () - let page = { page with PriorPermalinks = toList Map.toPermalink rdr } - do! rdr.CloseAsync () - - cmd.CommandText <- "SELECT as_of, revision_text FROM page_revision WHERE page_id = @pageId ORDER BY as_of DESC" - use! rdr = cmd.ExecuteReaderAsync () - return { page with Revisions = toList Map.toRevision rdr } + /// Append revisions to a page + let appendPageRevisions (page : Page) = backgroundTask { + log.LogTrace "Page.appendPageRevisions" + let! revisions = Revisions.findByEntityId Table.PageRevision Table.Page page.Id conn + return { page with Revisions = revisions } } - /// Shorthand for mapping a data reader to a page - let toPage = - Map.toPage ser - - /// Return a page with no text (or prior permalinks or revisions) - let pageWithoutText rdr = - { toPage rdr with Text = "" } - - /// Update a page's prior permalinks - let updatePagePermalinks pageId oldLinks newLinks = backgroundTask { - let toDelete, toAdd = Utils.diffPermalinks oldLinks newLinks - if List.isEmpty toDelete && List.isEmpty toAdd then - return () - else - use cmd = conn.CreateCommand () - [ cmd.Parameters.AddWithValue ("@pageId", PageId.toString pageId) - cmd.Parameters.Add ("@link", SqliteType.Text) - ] |> ignore - let runCmd link = backgroundTask { - cmd.Parameters["@link"].Value <- Permalink.toString link - do! write cmd - } - cmd.CommandText <- "DELETE FROM page_permalink WHERE page_id = @pageId AND permalink = @link" - toDelete - |> List.map runCmd - |> Task.WhenAll - |> ignore - cmd.CommandText <- "INSERT INTO page_permalink VALUES (@pageId, @link)" - toAdd - |> List.map runCmd - |> Task.WhenAll - |> ignore - } + /// Create a page with no prior permalinks + let pageWithoutLinks rdr = + { fromData rdr with PriorPermalinks = [] } /// Update a page's revisions - let updatePageRevisions pageId oldRevs newRevs = backgroundTask { - let toDelete, toAdd = Utils.diffRevisions oldRevs newRevs - if List.isEmpty toDelete && List.isEmpty toAdd then - return () - else - use cmd = conn.CreateCommand () - let runCmd withText rev = backgroundTask { - cmd.Parameters.Clear () - [ cmd.Parameters.AddWithValue ("@pageId", PageId.toString pageId) - cmd.Parameters.AddWithValue ("@asOf", instantParam rev.AsOf) - ] |> ignore - if withText then cmd.Parameters.AddWithValue ("@text", MarkupText.toString rev.Text) |> ignore - do! write cmd - } - cmd.CommandText <- "DELETE FROM page_revision WHERE page_id = @pageId AND as_of = @asOf" - toDelete - |> List.map (runCmd false) - |> Task.WhenAll - |> ignore - cmd.CommandText <- "INSERT INTO page_revision VALUES (@pageId, @asOf, @text)" - toAdd - |> List.map (runCmd true) - |> Task.WhenAll - |> ignore - } + let updatePageRevisions (pageId: PageId) oldRevs newRevs = + log.LogTrace "Page.updatePageRevisions" + Revisions.update Table.PageRevision Table.Page pageId oldRevs newRevs conn // IMPLEMENTATION FUNCTIONS /// Add a page - let add page = backgroundTask { - use cmd = conn.CreateCommand () - // The page itself - cmd.CommandText <- - "INSERT INTO page ( - id, web_log_id, author_id, title, permalink, published_on, updated_on, is_in_page_list, template, - page_text, meta_items - ) VALUES ( - @id, @webLogId, @authorId, @title, @permalink, @publishedOn, @updatedOn, @isInPageList, @template, - @text, @metaItems - )" - addPageParameters cmd page - do! write cmd - do! updatePagePermalinks page.Id [] page.PriorPermalinks - do! updatePageRevisions page.Id [] page.Revisions + let add (page: Page) = backgroundTask { + log.LogTrace "Page.add" + do! conn.insert Table.Page { page with Revisions = [] } + do! updatePageRevisions page.Id [] page.Revisions } - /// Get all pages for a web log (without text, revisions, prior permalinks, or metadata) - let all webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT * FROM page WHERE web_log_id = @webLogId ORDER BY LOWER(title)" - addWebLogId cmd webLogId - use! rdr = cmd.ExecuteReaderAsync () - return toList pageWithoutText rdr - } + /// Get all pages for a web log (without text, metadata, revisions, or prior permalinks) + let all webLogId = + log.LogTrace "Page.all" + conn.customList + $"{Query.selectFromTable Table.Page} WHERE {Document.Query.whereByWebLog} ORDER BY LOWER({titleField})" + [ webLogParam webLogId ] + (fun rdr -> { fromData rdr with Text = ""; Metadata = []; PriorPermalinks = [] }) /// Count all pages for the given web log - let countAll webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT COUNT(id) FROM page WHERE web_log_id = @webLogId" - addWebLogId cmd webLogId - return! count cmd - } + let countAll webLogId = + log.LogTrace "Page.countAll" + Document.countByWebLog Table.Page webLogId conn /// Count all pages shown in the page list for the given web log - let countListed webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- - "SELECT COUNT(id) - FROM page - WHERE web_log_id = @webLogId - AND is_in_page_list = @isInPageList" - addWebLogId cmd webLogId - cmd.Parameters.AddWithValue ("@isInPageList", true) |> ignore - return! count cmd - } + let countListed webLogId = + log.LogTrace "Page.countListed" + conn.customScalar + $"""{Document.Query.countByWebLog Table.Page} AND {Query.whereByField (Field.EQ pgListName "") "true"}""" + [ webLogParam webLogId ] + (toCount >> int) /// Find a page by its ID (without revisions and prior permalinks) let findById pageId webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT * FROM page WHERE id = @id" - cmd.Parameters.AddWithValue ("@id", PageId.toString pageId) |> ignore - use! rdr = cmd.ExecuteReaderAsync () - return Helpers.verifyWebLog webLogId (fun it -> it.WebLogId) (Map.toPage ser) rdr + log.LogTrace "Page.findById" + match! Document.findByIdAndWebLog Table.Page pageId webLogId conn with + | Some page -> return Some { page with PriorPermalinks = [] } + | None -> return None } /// Find a complete page by its ID let findFullById pageId webLogId = backgroundTask { - match! findById pageId webLogId with + log.LogTrace "Page.findFullById" + match! Document.findByIdAndWebLog Table.Page pageId webLogId conn with | Some page -> - let! page = appendPageRevisionsAndPermalinks page + let! page = appendPageRevisions page return Some page | None -> return None } + // TODO: need to handle when the page being deleted is the home page + /// Delete a page by its ID let delete pageId webLogId = backgroundTask { + log.LogTrace "Page.delete" match! findById pageId webLogId with | Some _ -> - use cmd = conn.CreateCommand () - cmd.Parameters.AddWithValue ("@id", PageId.toString pageId) |> ignore - cmd.CommandText <- - "DELETE FROM page_revision WHERE page_id = @id; - DELETE FROM page_permalink WHERE page_id = @id; - DELETE FROM page WHERE id = @id" - do! write cmd + do! conn.customNonQuery + $"DELETE FROM {Table.PageRevision} WHERE page_id = @id; {Query.Delete.byId Table.Page}" + [ idParam pageId ] return true | None -> return false } /// Find a page by its permalink for the given web log - let findByPermalink permalink webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT * FROM page WHERE web_log_id = @webLogId AND permalink = @link" - addWebLogId cmd webLogId - cmd.Parameters.AddWithValue ("@link", Permalink.toString permalink) |> ignore - use! rdr = cmd.ExecuteReaderAsync () - return if rdr.Read () then Some (toPage rdr) else None - } + let findByPermalink (permalink: Permalink) webLogId = + log.LogTrace "Page.findByPermalink" + let linkParam = Field.EQ linkName (string permalink) + conn.customSingle + $"""{Document.Query.selectByWebLog Table.Page} AND {Query.whereByField linkParam "@link"}""" + (addFieldParam "@link" linkParam [ webLogParam webLogId ]) + pageWithoutLinks /// Find the current permalink within a set of potential prior permalinks for the given web log - let findCurrentPermalink permalinks webLogId = backgroundTask { - use cmd = conn.CreateCommand () - let linkSql, linkParams = inClause "AND pp.permalink" "link" Permalink.toString permalinks - cmd.CommandText <- $" - SELECT p.permalink - FROM page p - INNER JOIN page_permalink pp ON pp.page_id = p.id - WHERE p.web_log_id = @webLogId - {linkSql}" - addWebLogId cmd webLogId - cmd.Parameters.AddRange linkParams - use! rdr = cmd.ExecuteReaderAsync () - return if rdr.Read () then Some (Map.toPermalink rdr) else None - } + let findCurrentPermalink (permalinks: Permalink list) webLogId = + log.LogTrace "Page.findCurrentPermalink" + let linkSql, linkParams = inJsonArray Table.Page (nameof Page.Empty.PriorPermalinks) "link" permalinks + conn.customSingle + $"SELECT data ->> '{linkName}' AS permalink + FROM {Table.Page} + WHERE {Document.Query.whereByWebLog} AND {linkSql}" + (webLogParam webLogId :: linkParams) + Map.toPermalink /// Get all complete pages for the given web log let findFullByWebLog webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT * FROM page WHERE web_log_id = @webLogId" - addWebLogId cmd webLogId - use! rdr = cmd.ExecuteReaderAsync () - let! pages = - toList toPage rdr - |> List.map (fun page -> backgroundTask { return! appendPageRevisionsAndPermalinks page }) - |> Task.WhenAll - return List.ofArray pages + log.LogTrace "Page.findFullByWebLog" + let! pages = Document.findByWebLog Table.Page webLogId conn + let! withRevs = pages |> List.map appendPageRevisions |> Task.WhenAll + return List.ofArray withRevs } - /// Get all listed pages for the given web log (without revisions, prior permalinks, or text) - let findListed webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- - "SELECT * - FROM page - WHERE web_log_id = @webLogId - AND is_in_page_list = @isInPageList - ORDER BY LOWER(title)" - addWebLogId cmd webLogId - cmd.Parameters.AddWithValue ("@isInPageList", true) |> ignore - use! rdr = cmd.ExecuteReaderAsync () - return toList pageWithoutText rdr - } + /// Get all listed pages for the given web log (without revisions or text) + let findListed webLogId = + log.LogTrace "Page.findListed" + conn.customList + $"""{Document.Query.selectByWebLog Table.Page} AND {Query.whereByField (Field.EQ pgListName "") "true"} + ORDER BY LOWER({titleField})""" + [ webLogParam webLogId ] + (fun rdr -> { fromData rdr with Text = "" }) - /// Get a page of pages for the given web log (without revisions, prior permalinks, or metadata) - let findPageOfPages webLogId pageNbr = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- - "SELECT * - FROM page - WHERE web_log_id = @webLogId - ORDER BY LOWER(title) - LIMIT @pageSize OFFSET @toSkip" - addWebLogId cmd webLogId - [ cmd.Parameters.AddWithValue ("@pageSize", 26) - cmd.Parameters.AddWithValue ("@toSkip", (pageNbr - 1) * 25) - ] |> ignore - use! rdr = cmd.ExecuteReaderAsync () - return toList toPage rdr + /// Get a page of pages for the given web log (without revisions) + let findPageOfPages webLogId pageNbr = + log.LogTrace "Page.findPageOfPages" + conn.customList + $"{Document.Query.selectByWebLog Table.Page} ORDER BY LOWER({titleField}) LIMIT @pageSize OFFSET @toSkip" + [ webLogParam webLogId; SqliteParameter("@pageSize", 26); SqliteParameter("@toSkip", (pageNbr - 1) * 25) ] + (fun rdr -> { pageWithoutLinks rdr with Metadata = [] }) + + /// Update a page + let update (page: Page) = backgroundTask { + log.LogTrace "Page.update" + match! findFullById page.Id page.WebLogId with + | Some oldPage -> + do! conn.updateById Table.Page page.Id { page with Revisions = [] } + do! updatePageRevisions page.Id oldPage.Revisions page.Revisions + | None -> () } /// Restore pages from a backup let restore pages = backgroundTask { - for page in pages do - do! add page - } - - /// Update a page - let update (page : Page) = backgroundTask { - match! findFullById page.Id page.WebLogId with - | Some oldPage -> - use cmd = conn.CreateCommand () - cmd.CommandText <- - "UPDATE page - SET author_id = @authorId, - title = @title, - permalink = @permalink, - published_on = @publishedOn, - updated_on = @updatedOn, - is_in_page_list = @isInPageList, - template = @template, - page_text = @text, - meta_items = @metaItems - WHERE id = @id - AND web_log_id = @webLogId" - addPageParameters cmd page - do! write cmd - do! updatePagePermalinks page.Id oldPage.PriorPermalinks page.PriorPermalinks - do! updatePageRevisions page.Id oldPage.Revisions page.Revisions - return () - | None -> return () + log.LogTrace "Page.restore" + for page in pages do do! add page } /// Update a page's prior permalinks - let updatePriorPermalinks pageId webLogId permalinks = backgroundTask { - match! findFullById pageId webLogId with - | Some page -> - do! updatePagePermalinks pageId page.PriorPermalinks permalinks + let updatePriorPermalinks pageId webLogId (permalinks: Permalink list) = backgroundTask { + log.LogTrace "Page.updatePriorPermalinks" + match! findById pageId webLogId with + | Some _ -> + do! conn.patchById Table.Page pageId {| PriorPermalinks = permalinks |} return true - | None -> return false + | None -> return false } interface IPageData with diff --git a/src/MyWebLog.Data/SQLite/SQLitePostData.fs b/src/MyWebLog.Data/SQLite/SQLitePostData.fs index 257bdf7..cc2063e 100644 --- a/src/MyWebLog.Data/SQLite/SQLitePostData.fs +++ b/src/MyWebLog.Data/SQLite/SQLitePostData.fs @@ -1,467 +1,215 @@ namespace MyWebLog.Data.SQLite open System.Threading.Tasks +open BitBadger.Documents +open BitBadger.Documents.Sqlite open Microsoft.Data.Sqlite +open Microsoft.Extensions.Logging open MyWebLog open MyWebLog.Data -open Newtonsoft.Json open NodaTime -/// SQLite myWebLog post data implementation -type SQLitePostData (conn : SqliteConnection, ser : JsonSerializer) = - +/// SQLite myWebLog post data implementation +type SQLitePostData(conn: SqliteConnection, log: ILogger) = + + /// The name of the JSON field for the post's permalink + let linkName = nameof Post.Empty.Permalink + + /// The JSON field for when the post was published + let publishField = $"data ->> '{nameof Post.Empty.PublishedOn}'" + + /// The name of the JSON field for the post's status + let statName = nameof Post.Empty.Status + // SUPPORT FUNCTIONS - /// Add parameters for post INSERT or UPDATE statements - let addPostParameters (cmd : SqliteCommand) (post : Post) = - [ cmd.Parameters.AddWithValue ("@id", PostId.toString post.Id) - cmd.Parameters.AddWithValue ("@webLogId", WebLogId.toString post.WebLogId) - cmd.Parameters.AddWithValue ("@authorId", WebLogUserId.toString post.AuthorId) - cmd.Parameters.AddWithValue ("@status", PostStatus.toString post.Status) - cmd.Parameters.AddWithValue ("@title", post.Title) - cmd.Parameters.AddWithValue ("@permalink", Permalink.toString post.Permalink) - cmd.Parameters.AddWithValue ("@publishedOn", maybeInstant post.PublishedOn) - cmd.Parameters.AddWithValue ("@updatedOn", instantParam post.UpdatedOn) - cmd.Parameters.AddWithValue ("@template", maybe post.Template) - cmd.Parameters.AddWithValue ("@text", post.Text) - cmd.Parameters.AddWithValue ("@episode", maybe (if Option.isSome post.Episode then - Some (Utils.serialize ser post.Episode) - else None)) - cmd.Parameters.AddWithValue ("@metaItems", maybe (if List.isEmpty post.Metadata then None - else Some (Utils.serialize ser post.Metadata))) - ] |> ignore - - /// Append category IDs and tags to a post - let appendPostCategoryAndTag (post : Post) = backgroundTask { - use cmd = conn.CreateCommand () - cmd.Parameters.AddWithValue ("@id", PostId.toString post.Id) |> ignore - - cmd.CommandText <- "SELECT category_id AS id FROM post_category WHERE post_id = @id" - use! rdr = cmd.ExecuteReaderAsync () - let post = { post with CategoryIds = toList Map.toCategoryId rdr } - do! rdr.CloseAsync () - - cmd.CommandText <- "SELECT tag FROM post_tag WHERE post_id = @id" - use! rdr = cmd.ExecuteReaderAsync () - return { post with Tags = toList (Map.getString "tag") rdr } + /// Append revisions to a post + let appendPostRevisions (post: Post) = backgroundTask { + log.LogTrace "Post.appendPostRevisions" + let! revisions = Revisions.findByEntityId Table.PostRevision Table.Post post.Id conn + return { post with Revisions = revisions } } - /// Append revisions and permalinks to a post - let appendPostRevisionsAndPermalinks (post : Post) = backgroundTask { - use cmd = conn.CreateCommand () - cmd.Parameters.AddWithValue ("@postId", PostId.toString post.Id) |> ignore - - cmd.CommandText <- "SELECT permalink FROM post_permalink WHERE post_id = @postId" - use! rdr = cmd.ExecuteReaderAsync () - let post = { post with PriorPermalinks = toList Map.toPermalink rdr } - do! rdr.CloseAsync () - - cmd.CommandText <- "SELECT as_of, revision_text FROM post_revision WHERE post_id = @postId ORDER BY as_of DESC" - use! rdr = cmd.ExecuteReaderAsync () - return { post with Revisions = toList Map.toRevision rdr } - } + /// The SELECT statement to retrieve posts with a web log ID parameter + let postByWebLog = Document.Query.selectByWebLog Table.Post - /// The SELECT statement for a post that will include episode data, if it exists - let selectPost = "SELECT p.* FROM post p" - - /// Shorthand for mapping a data reader to a post - let toPost = - Map.toPost ser - - /// Find just-the-post by its ID for the given web log (excludes category, tag, meta, revisions, and permalinks) - let findPostById postId webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- $"{selectPost} WHERE p.id = @id" - cmd.Parameters.AddWithValue ("@id", PostId.toString postId) |> ignore - use! rdr = cmd.ExecuteReaderAsync () - return Helpers.verifyWebLog webLogId (fun p -> p.WebLogId) toPost rdr - } + /// Return a post with no revisions or prior permalinks + let postWithoutLinks rdr = + { fromData rdr with PriorPermalinks = [] } /// Return a post with no revisions, prior permalinks, or text let postWithoutText rdr = - { toPost rdr with Text = "" } + { postWithoutLinks rdr with Text = "" } - /// Update a post's assigned categories - let updatePostCategories postId oldCats newCats = backgroundTask { - let toDelete, toAdd = Utils.diffLists oldCats newCats CategoryId.toString - if List.isEmpty toDelete && List.isEmpty toAdd then - return () - else - use cmd = conn.CreateCommand () - [ cmd.Parameters.AddWithValue ("@postId", PostId.toString postId) - cmd.Parameters.Add ("@categoryId", SqliteType.Text) - ] |> ignore - let runCmd catId = backgroundTask { - cmd.Parameters["@categoryId"].Value <- CategoryId.toString catId - do! write cmd - } - cmd.CommandText <- "DELETE FROM post_category WHERE post_id = @postId AND category_id = @categoryId" - toDelete - |> List.map runCmd - |> Task.WhenAll - |> ignore - cmd.CommandText <- "INSERT INTO post_category VALUES (@postId, @categoryId)" - toAdd - |> List.map runCmd - |> Task.WhenAll - |> ignore - } - - /// Update a post's assigned categories - let updatePostTags postId (oldTags : string list) newTags = backgroundTask { - let toDelete, toAdd = Utils.diffLists oldTags newTags id - if List.isEmpty toDelete && List.isEmpty toAdd then - return () - else - use cmd = conn.CreateCommand () - [ cmd.Parameters.AddWithValue ("@postId", PostId.toString postId) - cmd.Parameters.Add ("@tag", SqliteType.Text) - ] |> ignore - let runCmd (tag : string) = backgroundTask { - cmd.Parameters["@tag"].Value <- tag - do! write cmd - } - cmd.CommandText <- "DELETE FROM post_tag WHERE post_id = @postId AND tag = @tag" - toDelete - |> List.map runCmd - |> Task.WhenAll - |> ignore - cmd.CommandText <- "INSERT INTO post_tag VALUES (@postId, @tag)" - toAdd - |> List.map runCmd - |> Task.WhenAll - |> ignore - } - - /// Update a post's prior permalinks - let updatePostPermalinks postId oldLinks newLinks = backgroundTask { - let toDelete, toAdd = Utils.diffPermalinks oldLinks newLinks - if List.isEmpty toDelete && List.isEmpty toAdd then - return () - else - use cmd = conn.CreateCommand () - [ cmd.Parameters.AddWithValue ("@postId", PostId.toString postId) - cmd.Parameters.Add ("@link", SqliteType.Text) - ] |> ignore - let runCmd link = backgroundTask { - cmd.Parameters["@link"].Value <- Permalink.toString link - do! write cmd - } - cmd.CommandText <- "DELETE FROM post_permalink WHERE post_id = @postId AND permalink = @link" - toDelete - |> List.map runCmd - |> Task.WhenAll - |> ignore - cmd.CommandText <- "INSERT INTO post_permalink VALUES (@postId, @link)" - toAdd - |> List.map runCmd - |> Task.WhenAll - |> ignore - } + /// The SELECT statement to retrieve published posts with a web log ID parameter + let publishedPostByWebLog = + $"""{postByWebLog} AND {Query.whereByField (Field.EQ statName "") $"'{string Published}'"}""" /// Update a post's revisions - let updatePostRevisions postId oldRevs newRevs = backgroundTask { - let toDelete, toAdd = Utils.diffRevisions oldRevs newRevs - if List.isEmpty toDelete && List.isEmpty toAdd then - return () - else - use cmd = conn.CreateCommand () - let runCmd withText rev = backgroundTask { - cmd.Parameters.Clear () - [ cmd.Parameters.AddWithValue ("@postId", PostId.toString postId) - cmd.Parameters.AddWithValue ("@asOf", instantParam rev.AsOf) - ] |> ignore - if withText then cmd.Parameters.AddWithValue ("@text", MarkupText.toString rev.Text) |> ignore - do! write cmd - } - cmd.CommandText <- "DELETE FROM post_revision WHERE post_id = @postId AND as_of = @asOf" - toDelete - |> List.map (runCmd false) - |> Task.WhenAll - |> ignore - cmd.CommandText <- "INSERT INTO post_revision VALUES (@postId, @asOf, @text)" - toAdd - |> List.map (runCmd true) - |> Task.WhenAll - |> ignore - } + let updatePostRevisions (postId: PostId) oldRevs newRevs = + log.LogTrace "Post.updatePostRevisions" + Revisions.update Table.PostRevision Table.Post postId oldRevs newRevs conn // IMPLEMENTATION FUNCTIONS /// Add a post - let add post = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- - "INSERT INTO post ( - id, web_log_id, author_id, status, title, permalink, published_on, updated_on, template, post_text, - episode, meta_items - ) VALUES ( - @id, @webLogId, @authorId, @status, @title, @permalink, @publishedOn, @updatedOn, @template, @text, - @episode, @metaItems - )" - addPostParameters cmd post - do! write cmd - do! updatePostCategories post.Id [] post.CategoryIds - do! updatePostTags post.Id [] post.Tags - do! updatePostPermalinks post.Id [] post.PriorPermalinks - do! updatePostRevisions post.Id [] post.Revisions + let add (post: Post) = backgroundTask { + log.LogTrace "Post.add" + do! conn.insert Table.Post { post with Revisions = [] } + do! updatePostRevisions post.Id [] post.Revisions } /// Count posts in a status for the given web log - let countByStatus status webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT COUNT(id) FROM post WHERE web_log_id = @webLogId AND status = @status" - addWebLogId cmd webLogId - cmd.Parameters.AddWithValue ("@status", PostStatus.toString status) |> ignore - return! count cmd - } + let countByStatus (status: PostStatus) webLogId = + log.LogTrace "Post.countByStatus" + let statParam = Field.EQ statName (string status) + conn.customScalar + $"""{Document.Query.countByWebLog Table.Post} AND {Query.whereByField statParam "@status"}""" + (addFieldParam "@status" statParam [ webLogParam webLogId ]) + (toCount >> int) - /// Find a post by its ID for the given web log (excluding revisions and prior permalinks + /// Find a post by its ID for the given web log (excluding revisions) let findById postId webLogId = backgroundTask { - match! findPostById postId webLogId with - | Some post -> - let! post = appendPostCategoryAndTag post - return Some post + log.LogTrace "Post.findById" + match! Document.findByIdAndWebLog Table.Post postId webLogId conn with + | Some post -> return Some { post with PriorPermalinks = [] } | None -> return None } - /// Find a post by its permalink for the given web log (excluding revisions and prior permalinks) - let findByPermalink permalink webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- $"{selectPost} WHERE p.web_log_id = @webLogId AND p.permalink = @link" - addWebLogId cmd webLogId - cmd.Parameters.AddWithValue ("@link", Permalink.toString permalink) |> ignore - use! rdr = cmd.ExecuteReaderAsync () - if rdr.Read () then - let! post = appendPostCategoryAndTag (toPost rdr) - return Some post - else - return None - } + /// Find a post by its permalink for the given web log (excluding revisions) + let findByPermalink (permalink: Permalink) webLogId = + log.LogTrace "Post.findByPermalink" + let linkParam = Field.EQ linkName (string permalink) + conn.customSingle + $"""{Document.Query.selectByWebLog Table.Post} AND {Query.whereByField linkParam "@link"}""" + (addFieldParam "@link" linkParam [ webLogParam webLogId ]) + postWithoutLinks /// Find a complete post by its ID for the given web log let findFullById postId webLogId = backgroundTask { - match! findById postId webLogId with + log.LogTrace "Post.findFullById" + match! Document.findByIdAndWebLog Table.Post postId webLogId conn with | Some post -> - let! post = appendPostRevisionsAndPermalinks post + let! post = appendPostRevisions post return Some post | None -> return None } /// Delete a post by its ID for the given web log let delete postId webLogId = backgroundTask { - match! findFullById postId webLogId with + log.LogTrace "Post.delete" + match! findById postId webLogId with | Some _ -> - use cmd = conn.CreateCommand () - cmd.Parameters.AddWithValue ("@id", PostId.toString postId) |> ignore - cmd.CommandText <- - "DELETE FROM post_revision WHERE post_id = @id; - DELETE FROM post_permalink WHERE post_id = @id; - DELETE FROM post_tag WHERE post_id = @id; - DELETE FROM post_category WHERE post_id = @id; - DELETE FROM post_comment WHERE post_id = @id; - DELETE FROM post WHERE id = @id" - do! write cmd + do! conn.customNonQuery + $"""DELETE FROM {Table.PostRevision} WHERE post_id = @id; + DELETE FROM {Table.PostComment} + WHERE {Query.whereByField (Field.EQ (nameof Comment.Empty.PostId) "") "@id"}; + {Query.Delete.byId Table.Post}""" + [ idParam postId ] return true | None -> return false } /// Find the current permalink from a list of potential prior permalinks for the given web log - let findCurrentPermalink permalinks webLogId = backgroundTask { - use cmd = conn.CreateCommand () - let linkSql, linkParams = inClause "AND pp.permalink" "link" Permalink.toString permalinks - cmd.CommandText <- $" - SELECT p.permalink - FROM post p - INNER JOIN post_permalink pp ON pp.post_id = p.id - WHERE p.web_log_id = @webLogId - {linkSql}" - addWebLogId cmd webLogId - cmd.Parameters.AddRange linkParams - use! rdr = cmd.ExecuteReaderAsync () - return if rdr.Read () then Some (Map.toPermalink rdr) else None - } + let findCurrentPermalink (permalinks: Permalink list) webLogId = + log.LogTrace "Post.findCurrentPermalink" + let linkSql, linkParams = inJsonArray Table.Post (nameof Post.Empty.PriorPermalinks) "link" permalinks + conn.customSingle + $"SELECT data ->> '{linkName}' AS permalink + FROM {Table.Post} + WHERE {Document.Query.whereByWebLog} AND {linkSql}" + (webLogParam webLogId :: linkParams) + Map.toPermalink /// Get all complete posts for the given web log let findFullByWebLog webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- $"{selectPost} WHERE p.web_log_id = @webLogId" - addWebLogId cmd webLogId - use! rdr = cmd.ExecuteReaderAsync () - let! posts = - toList toPost rdr - |> List.map (fun post -> backgroundTask { - let! post = appendPostCategoryAndTag post - return! appendPostRevisionsAndPermalinks post - }) - |> Task.WhenAll - return List.ofArray posts + log.LogTrace "Post.findFullByWebLog" + let! posts = Document.findByWebLog Table.Post webLogId conn + let! withRevs = posts |> List.map appendPostRevisions |> Task.WhenAll + return List.ofArray withRevs } - /// Get a page of categorized posts for the given web log (excludes revisions and prior permalinks) - let findPageOfCategorizedPosts webLogId categoryIds pageNbr postsPerPage = backgroundTask { - use cmd = conn.CreateCommand () - let catSql, catParams = inClause "AND pc.category_id" "catId" CategoryId.toString categoryIds - cmd.CommandText <- $" - {selectPost} - INNER JOIN post_category pc ON pc.post_id = p.id - WHERE p.web_log_id = @webLogId - AND p.status = @status - {catSql} - ORDER BY published_on DESC - LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}" - addWebLogId cmd webLogId - cmd.Parameters.AddWithValue ("@status", PostStatus.toString Published) |> ignore - cmd.Parameters.AddRange catParams - use! rdr = cmd.ExecuteReaderAsync () - let! posts = - toList toPost rdr - |> List.map (fun post -> backgroundTask { return! appendPostCategoryAndTag post }) - |> Task.WhenAll - return List.ofArray posts - } + /// Get a page of categorized posts for the given web log (excludes revisions) + let findPageOfCategorizedPosts webLogId (categoryIds: CategoryId list) pageNbr postsPerPage = + log.LogTrace "Post.findPageOfCategorizedPosts" + let catSql, catParams = inJsonArray Table.Post (nameof Post.Empty.CategoryIds) "catId" categoryIds + conn.customList + $"{publishedPostByWebLog} AND {catSql} + ORDER BY {publishField} DESC + LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}" + (webLogParam webLogId :: catParams) + postWithoutLinks - /// Get a page of posts for the given web log (excludes text, revisions, and prior permalinks) - let findPageOfPosts webLogId pageNbr postsPerPage = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- $" - {selectPost} - WHERE p.web_log_id = @webLogId - ORDER BY p.published_on DESC NULLS FIRST, p.updated_on - LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}" - addWebLogId cmd webLogId - use! rdr = cmd.ExecuteReaderAsync () - let! posts = - toList postWithoutText rdr - |> List.map (fun post -> backgroundTask { return! appendPostCategoryAndTag post }) - |> Task.WhenAll - return List.ofArray posts - } + /// Get a page of posts for the given web log (excludes text and revisions) + let findPageOfPosts webLogId pageNbr postsPerPage = + log.LogTrace "Post.findPageOfPosts" + conn.customList + $"{postByWebLog} + ORDER BY {publishField} DESC NULLS FIRST, data ->> '{nameof Post.Empty.UpdatedOn}' + LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}" + [ webLogParam webLogId ] + postWithoutText - /// Get a page of published posts for the given web log (excludes revisions and prior permalinks) - let findPageOfPublishedPosts webLogId pageNbr postsPerPage = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- $" - {selectPost} - WHERE p.web_log_id = @webLogId - AND p.status = @status - ORDER BY p.published_on DESC - LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}" - addWebLogId cmd webLogId - cmd.Parameters.AddWithValue ("@status", PostStatus.toString Published) |> ignore - use! rdr = cmd.ExecuteReaderAsync () - let! posts = - toList toPost rdr - |> List.map (fun post -> backgroundTask { return! appendPostCategoryAndTag post }) - |> Task.WhenAll - return List.ofArray posts - } + /// Get a page of published posts for the given web log (excludes revisions) + let findPageOfPublishedPosts webLogId pageNbr postsPerPage = + log.LogTrace "Post.findPageOfPublishedPosts" + conn.customList + $"{publishedPostByWebLog} + ORDER BY {publishField} DESC + LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}" + [ webLogParam webLogId ] + postWithoutLinks - /// Get a page of tagged posts for the given web log (excludes revisions and prior permalinks) - let findPageOfTaggedPosts webLogId (tag : string) pageNbr postsPerPage = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- $" - {selectPost} - INNER JOIN post_tag pt ON pt.post_id = p.id - WHERE p.web_log_id = @webLogId - AND p.status = @status - AND pt.tag = @tag - ORDER BY p.published_on DESC - LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}" - addWebLogId cmd webLogId - [ cmd.Parameters.AddWithValue ("@status", PostStatus.toString Published) - cmd.Parameters.AddWithValue ("@tag", tag) - ] |> ignore - use! rdr = cmd.ExecuteReaderAsync () - let! posts = - toList toPost rdr - |> List.map (fun post -> backgroundTask { return! appendPostCategoryAndTag post }) - |> Task.WhenAll - return List.ofArray posts - } + /// Get a page of tagged posts for the given web log (excludes revisions) + let findPageOfTaggedPosts webLogId (tag : string) pageNbr postsPerPage = + log.LogTrace "Post.findPageOfTaggedPosts" + let tagSql, tagParams = inJsonArray Table.Post (nameof Post.Empty.Tags) "tag" [ tag ] + conn.customList + $"{publishedPostByWebLog} AND {tagSql} + ORDER BY {publishField} DESC + LIMIT {postsPerPage + 1} OFFSET {(pageNbr - 1) * postsPerPage}" + (webLogParam webLogId :: tagParams) + postWithoutLinks /// Find the next newest and oldest post from a publish date for the given web log let findSurroundingPosts webLogId (publishedOn : Instant) = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- $" - {selectPost} - WHERE p.web_log_id = @webLogId - AND p.status = @status - AND p.published_on < @publishedOn - ORDER BY p.published_on DESC - LIMIT 1" - addWebLogId cmd webLogId - [ cmd.Parameters.AddWithValue ("@status", PostStatus.toString Published) - cmd.Parameters.AddWithValue ("@publishedOn", instantParam publishedOn) - ] |> ignore - use! rdr = cmd.ExecuteReaderAsync () - let! older = backgroundTask { - if rdr.Read () then - let! post = appendPostCategoryAndTag (postWithoutText rdr) - return Some post - else - return None - } - do! rdr.CloseAsync () - cmd.CommandText <- $" - {selectPost} - WHERE p.web_log_id = @webLogId - AND p.status = @status - AND p.published_on > @publishedOn - ORDER BY p.published_on - LIMIT 1" - use! rdr = cmd.ExecuteReaderAsync () - let! newer = backgroundTask { - if rdr.Read () then - let! post = appendPostCategoryAndTag (postWithoutText rdr) - return Some post - else - return None - } + log.LogTrace "Post.findSurroundingPosts" + let! older = + conn.customSingle + $"{publishedPostByWebLog} AND {publishField} < @publishedOn ORDER BY {publishField} DESC LIMIT 1" + [ webLogParam webLogId; SqliteParameter("@publishedOn", instantParam publishedOn) ] + postWithoutLinks + let! newer = + conn.customSingle + $"{publishedPostByWebLog} AND {publishField} > @publishedOn ORDER BY {publishField} LIMIT 1" + [ webLogParam webLogId; SqliteParameter("@publishedOn", instantParam publishedOn) ] + postWithoutLinks return older, newer } + /// Update a post + let update (post: Post) = backgroundTask { + log.LogTrace "Post.update" + match! findFullById post.Id post.WebLogId with + | Some oldPost -> + do! conn.updateById Table.Post post.Id { post with Revisions = [] } + do! updatePostRevisions post.Id oldPost.Revisions post.Revisions + | None -> () + } + /// Restore posts from a backup let restore posts = backgroundTask { - for post in posts do - do! add post - } - - /// Update a post - let update (post : Post) = backgroundTask { - match! findFullById post.Id post.WebLogId with - | Some oldPost -> - use cmd = conn.CreateCommand () - cmd.CommandText <- - "UPDATE post - SET author_id = @authorId, - status = @status, - title = @title, - permalink = @permalink, - published_on = @publishedOn, - updated_on = @updatedOn, - template = @template, - post_text = @text, - episode = @episode, - meta_items = @metaItems - WHERE id = @id - AND web_log_id = @webLogId" - addPostParameters cmd post - do! write cmd - do! updatePostCategories post.Id oldPost.CategoryIds post.CategoryIds - do! updatePostTags post.Id oldPost.Tags post.Tags - do! updatePostPermalinks post.Id oldPost.PriorPermalinks post.PriorPermalinks - do! updatePostRevisions post.Id oldPost.Revisions post.Revisions - | None -> return () + log.LogTrace "Post.restore" + for post in posts do do! add post } /// Update prior permalinks for a post - let updatePriorPermalinks postId webLogId permalinks = backgroundTask { - match! findFullById postId webLogId with - | Some post -> - do! updatePostPermalinks postId post.PriorPermalinks permalinks + let updatePriorPermalinks postId webLogId (permalinks: Permalink list) = backgroundTask { + match! findById postId webLogId with + | Some _ -> + do! conn.patchById Table.Post postId {| PriorPermalinks = permalinks |} return true - | None -> return false + | None -> return false } interface IPostData with diff --git a/src/MyWebLog.Data/SQLite/SQLiteTagMapData.fs b/src/MyWebLog.Data/SQLite/SQLiteTagMapData.fs index 00de07b..f71c61e 100644 --- a/src/MyWebLog.Data/SQLite/SQLiteTagMapData.fs +++ b/src/MyWebLog.Data/SQLite/SQLiteTagMapData.fs @@ -1,97 +1,62 @@ namespace MyWebLog.Data.SQLite +open BitBadger.Documents +open BitBadger.Documents.Sqlite open Microsoft.Data.Sqlite +open Microsoft.Extensions.Logging open MyWebLog open MyWebLog.Data -/// SQLite myWebLog tag mapping data implementation -type SQLiteTagMapData (conn : SqliteConnection) = +/// SQLite myWebLog tag mapping data implementation +type SQLiteTagMapData(conn: SqliteConnection, log: ILogger) = /// Find a tag mapping by its ID for the given web log - let findById tagMapId webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT * FROM tag_map WHERE id = @id" - cmd.Parameters.AddWithValue ("@id", TagMapId.toString tagMapId) |> ignore - use! rdr = cmd.ExecuteReaderAsync () - return Helpers.verifyWebLog webLogId (fun tm -> tm.WebLogId) Map.toTagMap rdr - } + let findById tagMapId webLogId = + log.LogTrace "TagMap.findById" + Document.findByIdAndWebLog Table.TagMap tagMapId webLogId conn /// Delete a tag mapping for the given web log let delete tagMapId webLogId = backgroundTask { + log.LogTrace "TagMap.delete" match! findById tagMapId webLogId with | Some _ -> - use cmd = conn.CreateCommand () - cmd.CommandText <- "DELETE FROM tag_map WHERE id = @id" - cmd.Parameters.AddWithValue ("@id", TagMapId.toString tagMapId) |> ignore - do! write cmd + do! conn.deleteById Table.TagMap tagMapId return true | None -> return false } /// Find a tag mapping by its URL value for the given web log - let findByUrlValue (urlValue : string) webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT * FROM tag_map WHERE web_log_id = @webLogId AND url_value = @urlValue" - addWebLogId cmd webLogId - cmd.Parameters.AddWithValue ("@urlValue", urlValue) |> ignore - use! rdr = cmd.ExecuteReaderAsync () - return if rdr.Read () then Some (Map.toTagMap rdr) else None - } + let findByUrlValue (urlValue: string) webLogId = + log.LogTrace "TagMap.findByUrlValue" + let urlParam = Field.EQ (nameof TagMap.Empty.UrlValue) urlValue + conn.customSingle + $"""{Document.Query.selectByWebLog Table.TagMap} AND {Query.whereByField urlParam "@urlValue"}""" + (addFieldParam "@urlValue" urlParam [ webLogParam webLogId ]) + fromData /// Get all tag mappings for the given web log - let findByWebLog webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT * FROM tag_map WHERE web_log_id = @webLogId ORDER BY tag" - addWebLogId cmd webLogId - use! rdr = cmd.ExecuteReaderAsync () - return toList Map.toTagMap rdr - } + let findByWebLog webLogId = + log.LogTrace "TagMap.findByWebLog" + Document.findByWebLog Table.TagMap webLogId conn /// Find any tag mappings in a list of tags for the given web log - let findMappingForTags (tags : string list) webLogId = backgroundTask { - use cmd = conn.CreateCommand () - let mapSql, mapParams = inClause "AND tag" "tag" id tags - cmd.CommandText <- $" - SELECT * - FROM tag_map - WHERE web_log_id = @webLogId - {mapSql}" - addWebLogId cmd webLogId - cmd.Parameters.AddRange mapParams - use! rdr = cmd.ExecuteReaderAsync () - return toList Map.toTagMap rdr - } + let findMappingForTags (tags: string list) webLogId = + log.LogTrace "TagMap.findMappingForTags" + let mapSql, mapParams = inClause $"AND data ->> '{nameof TagMap.Empty.Tag}'" "tag" id tags + conn.customList + $"{Document.Query.selectByWebLog Table.TagMap} {mapSql}" + (webLogParam webLogId :: mapParams) + fromData /// Save a tag mapping - let save (tagMap : TagMap) = backgroundTask { - use cmd = conn.CreateCommand () - match! findById tagMap.Id tagMap.WebLogId with - | Some _ -> - cmd.CommandText <- - "UPDATE tag_map - SET tag = @tag, - url_value = @urlValue - WHERE id = @id - AND web_log_id = @webLogId" - | None -> - cmd.CommandText <- - "INSERT INTO tag_map ( - id, web_log_id, tag, url_value - ) VALUES ( - @id, @webLogId, @tag, @urlValue - )" - addWebLogId cmd tagMap.WebLogId - [ cmd.Parameters.AddWithValue ("@id", TagMapId.toString tagMap.Id) - cmd.Parameters.AddWithValue ("@tag", tagMap.Tag) - cmd.Parameters.AddWithValue ("@urlValue", tagMap.UrlValue) - ] |> ignore - do! write cmd - } + let save (tagMap: TagMap) = + log.LogTrace "TagMap.save" + conn.save Table.TagMap tagMap /// Restore tag mappings from a backup let restore tagMaps = backgroundTask { - for tagMap in tagMaps do - do! save tagMap + log.LogTrace "TagMap.restore" + for tagMap in tagMaps do do! save tagMap } interface ITagMapData with diff --git a/src/MyWebLog.Data/SQLite/SQLiteThemeData.fs b/src/MyWebLog.Data/SQLite/SQLiteThemeData.fs index dd3d81b..7ff48f3 100644 --- a/src/MyWebLog.Data/SQLite/SQLiteThemeData.fs +++ b/src/MyWebLog.Data/SQLite/SQLiteThemeData.fs @@ -1,141 +1,69 @@ namespace MyWebLog.Data.SQLite -open System.Threading.Tasks +open BitBadger.Documents +open BitBadger.Documents.Sqlite open Microsoft.Data.Sqlite +open Microsoft.Extensions.Logging open MyWebLog open MyWebLog.Data -/// SQLite myWebLog theme data implementation -type SQLiteThemeData (conn : SqliteConnection) = +/// SQLite myWebLog theme data implementation +type SQLiteThemeData(conn : SqliteConnection, log: ILogger) = + + /// The JSON field for the theme ID + let idField = $"data ->> '{nameof Theme.Empty.Id}'" + + /// Convert a document to a theme with no template text + let withoutTemplateText (rdr: SqliteDataReader) = + let theme = fromData rdr + { theme with Templates = theme.Templates |> List.map (fun t -> { t with Text = "" })} + + /// Remove the template text from a theme + let withoutTemplateText' (it: Theme) = + { it with Templates = it.Templates |> List.map (fun t -> { t with Text = "" }) } /// Retrieve all themes (except 'admin'; excludes template text) - let all () = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT * FROM theme WHERE id <> 'admin' ORDER BY id" - use! rdr = cmd.ExecuteReaderAsync () - let themes = toList Map.toTheme rdr - do! rdr.CloseAsync () - cmd.CommandText <- "SELECT name, theme_id FROM theme_template WHERE theme_id <> 'admin' ORDER BY name" - use! rdr = cmd.ExecuteReaderAsync () - let templates = - seq { while rdr.Read () do ThemeId (Map.getString "theme_id" rdr), Map.toThemeTemplate false rdr } - |> List.ofSeq - return - themes - |> List.map (fun t -> - { t with Templates = templates |> List.filter (fun (themeId, _) -> themeId = t.Id) |> List.map snd }) - } + let all () = + log.LogTrace "Theme.all" + conn.customList + $"{Query.selectFromTable Table.Theme} WHERE {idField} <> 'admin' ORDER BY {idField}" + [] + withoutTemplateText /// Does a given theme exist? - let exists themeId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT COUNT(id) FROM theme WHERE id = @id" - cmd.Parameters.AddWithValue ("@id", ThemeId.toString themeId) |> ignore - let! count = count cmd - return count > 0 - } + let exists (themeId: ThemeId) = + log.LogTrace "Theme.exists" + conn.existsById Table.Theme themeId /// Find a theme by its ID - let findById themeId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT * FROM theme WHERE id = @id" - cmd.Parameters.AddWithValue ("@id", ThemeId.toString themeId) |> ignore - use! rdr = cmd.ExecuteReaderAsync () - if rdr.Read () then - let theme = Map.toTheme rdr - let templateCmd = conn.CreateCommand () - templateCmd.CommandText <- "SELECT * FROM theme_template WHERE theme_id = @id" - templateCmd.Parameters.Add cmd.Parameters["@id"] |> ignore - use! templateRdr = templateCmd.ExecuteReaderAsync () - return Some { theme with Templates = toList (Map.toThemeTemplate true) templateRdr } - else - return None - } + let findById themeId = + log.LogTrace "Theme.findById" + conn.findById Table.Theme themeId /// Find a theme by its ID (excludes the text of templates) - let findByIdWithoutText themeId = backgroundTask { - match! findById themeId with - | Some theme -> - return Some { - theme with Templates = theme.Templates |> List.map (fun t -> { t with Text = "" }) - } - | None -> return None - } + let findByIdWithoutText (themeId: ThemeId) = + log.LogTrace "Theme.findByIdWithoutText" + conn.customSingle (Query.Find.byId Table.Theme) [ idParam themeId ] withoutTemplateText /// Delete a theme by its ID let delete themeId = backgroundTask { + log.LogTrace "Theme.delete" match! findByIdWithoutText themeId with | Some _ -> - use cmd = conn.CreateCommand () - cmd.CommandText <- - "DELETE FROM theme_asset WHERE theme_id = @id; - DELETE FROM theme_template WHERE theme_id = @id; - DELETE FROM theme WHERE id = @id" - cmd.Parameters.AddWithValue ("@id", ThemeId.toString themeId) |> ignore - do! write cmd + do! conn.customNonQuery + $"DELETE FROM {Table.ThemeAsset} WHERE theme_id = @id; {Query.Delete.byId Table.Theme}" + [ idParam themeId ] return true | None -> return false } /// Save a theme - let save (theme : Theme) = backgroundTask { - use cmd = conn.CreateCommand () - let! oldTheme = findById theme.Id - cmd.CommandText <- - match oldTheme with - | Some _ -> "UPDATE theme SET name = @name, version = @version WHERE id = @id" - | None -> "INSERT INTO theme VALUES (@id, @name, @version)" - [ cmd.Parameters.AddWithValue ("@id", ThemeId.toString theme.Id) - cmd.Parameters.AddWithValue ("@name", theme.Name) - cmd.Parameters.AddWithValue ("@version", theme.Version) - ] |> ignore - do! write cmd - - let toDelete, toAdd = - Utils.diffLists (oldTheme |> Option.map (fun t -> t.Templates) |> Option.defaultValue []) - theme.Templates (fun t -> t.Name) - let toUpdate = - theme.Templates - |> List.filter (fun t -> - not (toDelete |> List.exists (fun d -> d.Name = t.Name)) - && not (toAdd |> List.exists (fun a -> a.Name = t.Name))) - cmd.CommandText <- - "UPDATE theme_template SET template = @template WHERE theme_id = @themeId AND name = @name" - cmd.Parameters.Clear () - [ cmd.Parameters.AddWithValue ("@themeId", ThemeId.toString theme.Id) - cmd.Parameters.Add ("@name", SqliteType.Text) - cmd.Parameters.Add ("@template", SqliteType.Text) - ] |> ignore - toUpdate - |> List.map (fun template -> backgroundTask { - cmd.Parameters["@name" ].Value <- template.Name - cmd.Parameters["@template"].Value <- template.Text - do! write cmd - }) - |> Task.WhenAll - |> ignore - cmd.CommandText <- "INSERT INTO theme_template VALUES (@themeId, @name, @template)" - toAdd - |> List.map (fun template -> backgroundTask { - cmd.Parameters["@name" ].Value <- template.Name - cmd.Parameters["@template"].Value <- template.Text - do! write cmd - }) - |> Task.WhenAll - |> ignore - cmd.CommandText <- "DELETE FROM theme_template WHERE theme_id = @themeId AND name = @name" - cmd.Parameters.Remove cmd.Parameters["@template"] - toDelete - |> List.map (fun template -> backgroundTask { - cmd.Parameters["@name"].Value <- template.Name - do! write cmd - }) - |> Task.WhenAll - |> ignore - } + let save (theme: Theme) = + log.LogTrace "Theme.save" + conn.save Table.Theme theme interface IThemeData with - member _.All () = all () + member _.All() = all () member _.Delete themeId = delete themeId member _.Exists themeId = exists themeId member _.FindById themeId = findById themeId @@ -145,97 +73,75 @@ type SQLiteThemeData (conn : SqliteConnection) = open System.IO -/// SQLite myWebLog theme data implementation -type SQLiteThemeAssetData (conn : SqliteConnection) = +/// SQLite myWebLog theme data implementation +type SQLiteThemeAssetData(conn : SqliteConnection, log: ILogger) = + + /// Create parameters for a theme asset ID + let assetIdParams assetId = + let (ThemeAssetId (ThemeId themeId, path)) = assetId + [ idParam themeId; sqlParam "@path" path ] /// Get all theme assets (excludes data) - let all () = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT theme_id, path, updated_on FROM theme_asset" - use! rdr = cmd.ExecuteReaderAsync () - return toList (Map.toThemeAsset false) rdr - } + let all () = + log.LogTrace "ThemeAsset.all" + conn.customList $"SELECT theme_id, path, updated_on FROM {Table.ThemeAsset}" [] (Map.toThemeAsset false) /// Delete all assets for the given theme - let deleteByTheme themeId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "DELETE FROM theme_asset WHERE theme_id = @themeId" - cmd.Parameters.AddWithValue ("@themeId", ThemeId.toString themeId) |> ignore - do! write cmd - } + let deleteByTheme (themeId: ThemeId) = + log.LogTrace "ThemeAsset.deleteByTheme" + conn.customNonQuery $"DELETE FROM {Table.ThemeAsset} WHERE theme_id = @id" [ idParam themeId ] /// Find a theme asset by its ID - let findById assetId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT *, ROWID FROM theme_asset WHERE theme_id = @themeId AND path = @path" - let (ThemeAssetId (ThemeId themeId, path)) = assetId - [ cmd.Parameters.AddWithValue ("@themeId", themeId) - cmd.Parameters.AddWithValue ("@path", path) - ] |> ignore - use! rdr = cmd.ExecuteReaderAsync () - return if rdr.Read () then Some (Map.toThemeAsset true rdr) else None - } + let findById assetId = + log.LogTrace "ThemeAsset.findById" + conn.customSingle + $"SELECT *, ROWID FROM {Table.ThemeAsset} WHERE theme_id = @id AND path = @path" + (assetIdParams assetId) + (Map.toThemeAsset true) /// Get theme assets for the given theme (excludes data) - let findByTheme themeId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT theme_id, path, updated_on FROM theme_asset WHERE theme_id = @themeId" - cmd.Parameters.AddWithValue ("@themeId", ThemeId.toString themeId) |> ignore - use! rdr = cmd.ExecuteReaderAsync () - return toList (Map.toThemeAsset false) rdr - } + let findByTheme (themeId: ThemeId) = + log.LogTrace "ThemeAsset.findByTheme" + conn.customList + $"SELECT theme_id, path, updated_on FROM {Table.ThemeAsset} WHERE theme_id = @id" + [ idParam themeId ] + (Map.toThemeAsset false) /// Get theme assets for the given theme - let findByThemeWithData themeId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT *, ROWID FROM theme_asset WHERE theme_id = @themeId" - cmd.Parameters.AddWithValue ("@themeId", ThemeId.toString themeId) |> ignore - use! rdr = cmd.ExecuteReaderAsync () - return toList (Map.toThemeAsset true) rdr - } + let findByThemeWithData (themeId: ThemeId) = + log.LogTrace "ThemeAsset.findByThemeWithData" + conn.customList + $"SELECT *, ROWID FROM {Table.ThemeAsset} WHERE theme_id = @id" + [ idParam themeId ] + (Map.toThemeAsset true) /// Save a theme asset - let save (asset : ThemeAsset) = backgroundTask { - use sideCmd = conn.CreateCommand () - sideCmd.CommandText <- - "SELECT COUNT(path) FROM theme_asset WHERE theme_id = @themeId AND path = @path" - let (ThemeAssetId (ThemeId themeId, path)) = asset.Id - [ sideCmd.Parameters.AddWithValue ("@themeId", themeId) - sideCmd.Parameters.AddWithValue ("@path", path) - ] |> ignore - let! exists = count sideCmd - - use cmd = conn.CreateCommand () - cmd.CommandText <- - if exists = 1 then - "UPDATE theme_asset - SET updated_on = @updatedOn, - data = ZEROBLOB(@dataLength) - WHERE theme_id = @themeId - AND path = @path" - else - "INSERT INTO theme_asset ( + let save (asset: ThemeAsset) = backgroundTask { + log.LogTrace "ThemeAsset.save" + do! conn.customNonQuery + $"INSERT INTO {Table.ThemeAsset} ( theme_id, path, updated_on, data - ) VALUES ( - @themeId, @path, @updatedOn, ZEROBLOB(@dataLength) - )" - [ cmd.Parameters.AddWithValue ("@themeId", themeId) - cmd.Parameters.AddWithValue ("@path", path) - cmd.Parameters.AddWithValue ("@updatedOn", instantParam asset.UpdatedOn) - cmd.Parameters.AddWithValue ("@dataLength", asset.Data.Length) - ] |> ignore - do! write cmd + ) VALUES ( + @id, @path, @updatedOn, ZEROBLOB(@dataLength) + ) ON CONFLICT (theme_id, path) DO UPDATE + SET updated_on = @updatedOn, + data = ZEROBLOB(@dataLength)" + [ sqlParam "@updatedOn" (instantParam asset.UpdatedOn) + sqlParam "@dataLength" asset.Data.Length + yield! (assetIdParams asset.Id) ] - sideCmd.CommandText <- "SELECT ROWID FROM theme_asset WHERE theme_id = @themeId AND path = @path" - let! rowId = sideCmd.ExecuteScalarAsync () - - use dataStream = new MemoryStream (asset.Data) - use blobStream = new SqliteBlob (conn, "theme_asset", "data", rowId :?> int64) + let! rowId = + conn.customScalar + $"SELECT ROWID FROM {Table.ThemeAsset} WHERE theme_id = @id AND path = @path" + (assetIdParams asset.Id) + _.GetInt64(0) + use dataStream = new MemoryStream(asset.Data) + use blobStream = new SqliteBlob(conn, Table.ThemeAsset, "data", rowId) do! dataStream.CopyToAsync blobStream } interface IThemeAssetData with - member _.All () = all () + member _.All() = all () member _.DeleteByTheme themeId = deleteByTheme themeId member _.FindById assetId = findById assetId member _.FindByTheme themeId = findByTheme themeId diff --git a/src/MyWebLog.Data/SQLite/SQLiteUploadData.fs b/src/MyWebLog.Data/SQLite/SQLiteUploadData.fs index 886e113..fbc2ce9 100644 --- a/src/MyWebLog.Data/SQLite/SQLiteUploadData.fs +++ b/src/MyWebLog.Data/SQLite/SQLiteUploadData.fs @@ -1,93 +1,78 @@ namespace MyWebLog.Data.SQLite open System.IO +open BitBadger.Documents.Sqlite open Microsoft.Data.Sqlite +open Microsoft.Extensions.Logging open MyWebLog open MyWebLog.Data -/// SQLite myWebLog web log data implementation -type SQLiteUploadData (conn : SqliteConnection) = +/// SQLite myWebLog web log data implementation +type SQLiteUploadData(conn: SqliteConnection, log: ILogger) = - /// Add parameters for uploaded file INSERT and UPDATE statements - let addUploadParameters (cmd : SqliteCommand) (upload : Upload) = - [ cmd.Parameters.AddWithValue ("@id", UploadId.toString upload.Id) - cmd.Parameters.AddWithValue ("@webLogId", WebLogId.toString upload.WebLogId) - cmd.Parameters.AddWithValue ("@path", Permalink.toString upload.Path) - cmd.Parameters.AddWithValue ("@updatedOn", instantParam upload.UpdatedOn) - cmd.Parameters.AddWithValue ("@dataLength", upload.Data.Length) - ] |> ignore - /// Save an uploaded file - let add upload = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- - "INSERT INTO upload ( - id, web_log_id, path, updated_on, data - ) VALUES ( - @id, @webLogId, @path, @updatedOn, ZEROBLOB(@dataLength) - )" - addUploadParameters cmd upload - do! write cmd - - cmd.CommandText <- "SELECT ROWID FROM upload WHERE id = @id" - let! rowId = cmd.ExecuteScalarAsync () - - use dataStream = new MemoryStream (upload.Data) - use blobStream = new SqliteBlob (conn, "upload", "data", rowId :?> int64) + let add (upload: Upload) = backgroundTask { + log.LogTrace "Upload.add" + do! conn.customNonQuery + $"INSERT INTO {Table.Upload} ( + id, web_log_id, path, updated_on, data + ) VALUES ( + @id, @webLogId, @path, @updatedOn, ZEROBLOB(@dataLength) + )" + [ idParam upload.Id + webLogParam upload.WebLogId + sqlParam "@path" (string upload.Path) + sqlParam "@updatedOn" (instantParam upload.UpdatedOn) + sqlParam "@dataLength" upload.Data.Length ] + let! rowId = + conn.customScalar $"SELECT ROWID FROM {Table.Upload} WHERE id = @id" [ idParam upload.Id ] _.GetInt64(0) + use dataStream = new MemoryStream(upload.Data) + use blobStream = new SqliteBlob(conn, Table.Upload, "data", rowId) do! dataStream.CopyToAsync blobStream } /// Delete an uploaded file by its ID - let delete uploadId webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- - "SELECT id, web_log_id, path, updated_on - FROM upload - WHERE id = @id - AND web_log_id = @webLogId" - addWebLogId cmd webLogId - cmd.Parameters.AddWithValue ("@id", UploadId.toString uploadId) |> ignore - let! rdr = cmd.ExecuteReaderAsync () - if (rdr.Read ()) then - let upload = Map.toUpload false rdr - do! rdr.CloseAsync () - cmd.CommandText <- "DELETE FROM upload WHERE id = @id AND web_log_id = @webLogId" - do! write cmd - return Ok (Permalink.toString upload.Path) - else - return Error $"""Upload ID {cmd.Parameters["@id"]} not found""" + let delete (uploadId: UploadId) webLogId = backgroundTask { + log.LogTrace "Upload.delete" + let! upload = + conn.customSingle + $"SELECT id, web_log_id, path, updated_on FROM {Table.Upload} WHERE id = @id AND web_log_id = @webLogId" + [ idParam uploadId; webLogParam webLogId ] + (Map.toUpload false) + match upload with + | Some up -> + do! conn.customNonQuery $"DELETE FROM {Table.Upload} WHERE id = @id" [ idParam up.Id ] + return Ok (string up.Path) + | None -> return Error $"Upload ID {string uploadId} not found" } /// Find an uploaded file by its path for the given web log - let findByPath (path : string) webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT *, ROWID FROM upload WHERE web_log_id = @webLogId AND path = @path" - addWebLogId cmd webLogId - cmd.Parameters.AddWithValue ("@path", path) |> ignore - let! rdr = cmd.ExecuteReaderAsync () - return if rdr.Read () then Some (Map.toUpload true rdr) else None - } + let findByPath (path: string) webLogId = + log.LogTrace "Upload.findByPath" + conn.customSingle + $"SELECT *, ROWID FROM {Table.Upload} WHERE web_log_id = @webLogId AND path = @path" + [ webLogParam webLogId; sqlParam "@path" path ] + (Map.toUpload true) /// Find all uploaded files for the given web log (excludes data) - let findByWebLog webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT id, web_log_id, path, updated_on FROM upload WHERE web_log_id = @webLogId" - addWebLogId cmd webLogId - let! rdr = cmd.ExecuteReaderAsync () - return toList (Map.toUpload false) rdr - } + let findByWebLog webLogId = + log.LogTrace "Upload.findByWebLog" + conn.customList + $"SELECT id, web_log_id, path, updated_on FROM {Table.Upload} WHERE web_log_id = @webLogId" + [ webLogParam webLogId ] + (Map.toUpload false) /// Find all uploaded files for the given web log - let findByWebLogWithData webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT *, ROWID FROM upload WHERE web_log_id = @webLogId" - addWebLogId cmd webLogId - let! rdr = cmd.ExecuteReaderAsync () - return toList (Map.toUpload true) rdr - } + let findByWebLogWithData webLogId = + log.LogTrace "Upload.findByWebLogWithData" + conn.customList + $"SELECT *, ROWID FROM {Table.Upload} WHERE web_log_id = @webLogId" + [ webLogParam webLogId ] + (Map.toUpload true) /// Restore uploads from a backup let restore uploads = backgroundTask { + log.LogTrace "Upload.restore" for upload in uploads do do! add upload } diff --git a/src/MyWebLog.Data/SQLite/SQLiteWebLogData.fs b/src/MyWebLog.Data/SQLite/SQLiteWebLogData.fs index aa34719..4c5a797 100644 --- a/src/MyWebLog.Data/SQLite/SQLiteWebLogData.fs +++ b/src/MyWebLog.Data/SQLite/SQLiteWebLogData.fs @@ -1,251 +1,67 @@ namespace MyWebLog.Data.SQLite -open System.Threading.Tasks +open BitBadger.Documents +open BitBadger.Documents.Sqlite open Microsoft.Data.Sqlite +open Microsoft.Extensions.Logging open MyWebLog open MyWebLog.Data -open Newtonsoft.Json -// The web log podcast insert loop is not statically compilable; this is OK -#nowarn "3511" - -/// SQLite myWebLog web log data implementation -type SQLiteWebLogData (conn : SqliteConnection, ser : JsonSerializer) = - - // SUPPORT FUNCTIONS - - /// Add parameters for web log INSERT or web log/RSS options UPDATE statements - let addWebLogRssParameters (cmd : SqliteCommand) (webLog : WebLog) = - [ cmd.Parameters.AddWithValue ("@isFeedEnabled", webLog.Rss.IsFeedEnabled) - cmd.Parameters.AddWithValue ("@feedName", webLog.Rss.FeedName) - cmd.Parameters.AddWithValue ("@itemsInFeed", maybe webLog.Rss.ItemsInFeed) - cmd.Parameters.AddWithValue ("@isCategoryEnabled", webLog.Rss.IsCategoryEnabled) - cmd.Parameters.AddWithValue ("@isTagEnabled", webLog.Rss.IsTagEnabled) - cmd.Parameters.AddWithValue ("@copyright", maybe webLog.Rss.Copyright) - ] |> ignore - - /// Add parameters for web log INSERT or UPDATE statements - let addWebLogParameters (cmd : SqliteCommand) (webLog : WebLog) = - [ cmd.Parameters.AddWithValue ("@id", WebLogId.toString webLog.Id) - cmd.Parameters.AddWithValue ("@name", webLog.Name) - cmd.Parameters.AddWithValue ("@slug", webLog.Slug) - cmd.Parameters.AddWithValue ("@subtitle", maybe webLog.Subtitle) - cmd.Parameters.AddWithValue ("@defaultPage", webLog.DefaultPage) - cmd.Parameters.AddWithValue ("@postsPerPage", webLog.PostsPerPage) - cmd.Parameters.AddWithValue ("@themeId", ThemeId.toString webLog.ThemeId) - cmd.Parameters.AddWithValue ("@urlBase", webLog.UrlBase) - cmd.Parameters.AddWithValue ("@timeZone", webLog.TimeZone) - cmd.Parameters.AddWithValue ("@autoHtmx", webLog.AutoHtmx) - cmd.Parameters.AddWithValue ("@uploads", UploadDestination.toString webLog.Uploads) - ] |> ignore - addWebLogRssParameters cmd webLog - - /// Add parameters for custom feed INSERT or UPDATE statements - let addCustomFeedParameters (cmd : SqliteCommand) webLogId (feed : CustomFeed) = - [ cmd.Parameters.AddWithValue ("@id", CustomFeedId.toString feed.Id) - cmd.Parameters.AddWithValue ("@webLogId", WebLogId.toString webLogId) - cmd.Parameters.AddWithValue ("@source", CustomFeedSource.toString feed.Source) - cmd.Parameters.AddWithValue ("@path", Permalink.toString feed.Path) - cmd.Parameters.AddWithValue ("@podcast", maybe (if Option.isSome feed.Podcast then - Some (Utils.serialize ser feed.Podcast) - else None)) - ] |> ignore - - /// Shorthand to map a data reader to a custom feed - let toCustomFeed = - Map.toCustomFeed ser - - /// Get the current custom feeds for a web log - let getCustomFeeds (webLog : WebLog) = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT * FROM web_log_feed WHERE web_log_id = @webLogId" - addWebLogId cmd webLog.Id - use! rdr = cmd.ExecuteReaderAsync () - return toList toCustomFeed rdr - } - - /// Append custom feeds to a web log - let appendCustomFeeds (webLog : WebLog) = backgroundTask { - let! feeds = getCustomFeeds webLog - return { webLog with Rss = { webLog.Rss with CustomFeeds = feeds } } - } - - /// Update the custom feeds for a web log - let updateCustomFeeds (webLog : WebLog) = backgroundTask { - let! feeds = getCustomFeeds webLog - let toDelete, toAdd = Utils.diffLists feeds webLog.Rss.CustomFeeds (fun it -> $"{CustomFeedId.toString it.Id}") - let toId (feed : CustomFeed) = feed.Id - let toUpdate = - webLog.Rss.CustomFeeds - |> List.filter (fun f -> - not (toDelete |> List.map toId |> List.append (toAdd |> List.map toId) |> List.contains f.Id)) - use cmd = conn.CreateCommand () - cmd.Parameters.Add ("@id", SqliteType.Text) |> ignore - toDelete - |> List.map (fun it -> backgroundTask { - cmd.CommandText <- "DELETE FROM web_log_feed WHERE id = @id" - cmd.Parameters["@id"].Value <- CustomFeedId.toString it.Id - do! write cmd - }) - |> Task.WhenAll - |> ignore - cmd.Parameters.Clear () - toAdd - |> List.map (fun it -> backgroundTask { - cmd.CommandText <- - "INSERT INTO web_log_feed ( - id, web_log_id, source, path, podcast - ) VALUES ( - @id, @webLogId, @source, @path, @podcast - )" - cmd.Parameters.Clear () - addCustomFeedParameters cmd webLog.Id it - do! write cmd - }) - |> Task.WhenAll - |> ignore - toUpdate - |> List.map (fun it -> backgroundTask { - cmd.CommandText <- - "UPDATE web_log_feed - SET source = @source, - path = @path, - podcast = @podcast - WHERE id = @id - AND web_log_id = @webLogId" - cmd.Parameters.Clear () - addCustomFeedParameters cmd webLog.Id it - do! write cmd - }) - |> Task.WhenAll - |> ignore - } - - // IMPLEMENTATION FUNCTIONS +/// SQLite myWebLog web log data implementation +type SQLiteWebLogData(conn: SqliteConnection, log: ILogger) = /// Add a web log - let add webLog = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- - "INSERT INTO web_log ( - id, name, slug, subtitle, default_page, posts_per_page, theme_id, url_base, time_zone, auto_htmx, - uploads, is_feed_enabled, feed_name, items_in_feed, is_category_enabled, is_tag_enabled, copyright - ) VALUES ( - @id, @name, @slug, @subtitle, @defaultPage, @postsPerPage, @themeId, @urlBase, @timeZone, @autoHtmx, - @uploads, @isFeedEnabled, @feedName, @itemsInFeed, @isCategoryEnabled, @isTagEnabled, @copyright - )" - addWebLogParameters cmd webLog - do! write cmd - do! updateCustomFeeds webLog - } + let add webLog = + log.LogTrace "WebLog.add" + conn.insert Table.WebLog webLog /// Retrieve all web logs - let all () = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT * FROM web_log" - use! rdr = cmd.ExecuteReaderAsync () - let! webLogs = - toList Map.toWebLog rdr - |> List.map (fun webLog -> backgroundTask { return! appendCustomFeeds webLog }) - |> Task.WhenAll - return List.ofArray webLogs - } + let all () = + log.LogTrace "WebLog.all" + conn.findAll Table.WebLog /// Delete a web log by its ID - let delete webLogId = backgroundTask { - use cmd = conn.CreateCommand () - addWebLogId cmd webLogId - let subQuery table = $"(SELECT id FROM {table} WHERE web_log_id = @webLogId)" - let postSubQuery = subQuery "post" - let pageSubQuery = subQuery "page" - cmd.CommandText <- $" - DELETE FROM post_comment WHERE post_id IN {postSubQuery}; - DELETE FROM post_revision WHERE post_id IN {postSubQuery}; - DELETE FROM post_permalink WHERE post_id IN {postSubQuery}; - DELETE FROM post_tag WHERE post_id IN {postSubQuery}; - DELETE FROM post_category WHERE post_id IN {postSubQuery}; - DELETE FROM post WHERE web_log_id = @webLogId; - DELETE FROM page_revision WHERE page_id IN {pageSubQuery}; - DELETE FROM page_permalink WHERE page_id IN {pageSubQuery}; - DELETE FROM page WHERE web_log_id = @webLogId; - DELETE FROM category WHERE web_log_id = @webLogId; - DELETE FROM tag_map WHERE web_log_id = @webLogId; - DELETE FROM upload WHERE web_log_id = @webLogId; - DELETE FROM web_log_user WHERE web_log_id = @webLogId; - DELETE FROM web_log_feed WHERE web_log_id = @webLogId; - DELETE FROM web_log WHERE id = @webLogId" - do! write cmd - } + let delete webLogId = + log.LogTrace "WebLog.delete" + let webLogMatches = Query.whereByField (Field.EQ "WebLogId" "") "@webLogId" + let subQuery table = $"(SELECT data ->> 'Id' FROM {table} WHERE {webLogMatches})" + Custom.nonQuery + $"""DELETE FROM {Table.PostComment} WHERE data ->> 'PostId' IN {subQuery Table.Post}; + DELETE FROM {Table.PostRevision} WHERE post_id IN {subQuery Table.Post}; + DELETE FROM {Table.PageRevision} WHERE page_id IN {subQuery Table.Page}; + DELETE FROM {Table.Post} WHERE {webLogMatches}; + DELETE FROM {Table.Page} WHERE {webLogMatches}; + DELETE FROM {Table.Category} WHERE {webLogMatches}; + DELETE FROM {Table.TagMap} WHERE {webLogMatches}; + DELETE FROM {Table.Upload} WHERE web_log_id = @webLogId; + DELETE FROM {Table.WebLogUser} WHERE {webLogMatches}; + DELETE FROM {Table.WebLog} WHERE {Query.whereById "@webLogId"}""" + [ webLogParam webLogId ] /// Find a web log by its host (URL base) - let findByHost (url : string) = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT * FROM web_log WHERE url_base = @urlBase" - cmd.Parameters.AddWithValue ("@urlBase", url) |> ignore - use! rdr = cmd.ExecuteReaderAsync () - if rdr.Read () then - let! webLog = appendCustomFeeds (Map.toWebLog rdr) - return Some webLog - else - return None - } + let findByHost (url: string) = + log.LogTrace "WebLog.findByHost" + conn.findFirstByField Table.WebLog (Field.EQ (nameof WebLog.Empty.UrlBase) url) /// Find a web log by its ID - let findById webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT * FROM web_log WHERE id = @webLogId" - addWebLogId cmd webLogId - use! rdr = cmd.ExecuteReaderAsync () - if rdr.Read () then - let! webLog = appendCustomFeeds (Map.toWebLog rdr) - return Some webLog - else - return None - } + let findById webLogId = + log.LogTrace "WebLog.findById" + conn.findById Table.WebLog webLogId + + /// Update redirect rules for a web log + let updateRedirectRules (webLog: WebLog) = + log.LogTrace "WebLog.updateRedirectRules" + conn.patchById Table.WebLog webLog.Id {| RedirectRules = webLog.RedirectRules |} + + /// Update RSS options for a web log + let updateRssOptions (webLog: WebLog) = + log.LogTrace "WebLog.updateRssOptions" + conn.patchById Table.WebLog webLog.Id {| Rss = webLog.Rss |} /// Update settings for a web log - let updateSettings webLog = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- - "UPDATE web_log - SET name = @name, - slug = @slug, - subtitle = @subtitle, - default_page = @defaultPage, - posts_per_page = @postsPerPage, - theme_id = @themeId, - url_base = @urlBase, - time_zone = @timeZone, - auto_htmx = @autoHtmx, - uploads = @uploads, - is_feed_enabled = @isFeedEnabled, - feed_name = @feedName, - items_in_feed = @itemsInFeed, - is_category_enabled = @isCategoryEnabled, - is_tag_enabled = @isTagEnabled, - copyright = @copyright - WHERE id = @id" - addWebLogParameters cmd webLog - do! write cmd - } - - /// Update RSS options for a web log - let updateRssOptions webLog = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- - "UPDATE web_log - SET is_feed_enabled = @isFeedEnabled, - feed_name = @feedName, - items_in_feed = @itemsInFeed, - is_category_enabled = @isCategoryEnabled, - is_tag_enabled = @isTagEnabled, - copyright = @copyright - WHERE id = @id" - addWebLogRssParameters cmd webLog - cmd.Parameters.AddWithValue ("@id", WebLogId.toString webLog.Id) |> ignore - do! write cmd - do! updateCustomFeeds webLog - } + let updateSettings (webLog: WebLog) = + log.LogTrace "WebLog.updateSettings" + conn.updateById Table.WebLog webLog.Id webLog interface IWebLogData with member _.Add webLog = add webLog @@ -253,5 +69,6 @@ type SQLiteWebLogData (conn : SqliteConnection, ser : JsonSerializer) = member _.Delete webLogId = delete webLogId member _.FindByHost url = findByHost url member _.FindById webLogId = findById webLogId - member _.UpdateSettings webLog = updateSettings webLog + member _.UpdateRedirectRules webLog = updateRedirectRules webLog member _.UpdateRssOptions webLog = updateRssOptions webLog + member _.UpdateSettings webLog = updateSettings webLog diff --git a/src/MyWebLog.Data/SQLite/SQLiteWebLogUserData.fs b/src/MyWebLog.Data/SQLite/SQLiteWebLogUserData.fs index 8eb8cd9..35a9cd5 100644 --- a/src/MyWebLog.Data/SQLite/SQLiteWebLogUserData.fs +++ b/src/MyWebLog.Data/SQLite/SQLiteWebLogUserData.fs @@ -1,147 +1,86 @@ namespace MyWebLog.Data.SQLite +open BitBadger.Documents +open BitBadger.Documents.Sqlite open Microsoft.Data.Sqlite +open Microsoft.Extensions.Logging open MyWebLog open MyWebLog.Data -/// SQLite myWebLog user data implementation -type SQLiteWebLogUserData (conn : SqliteConnection) = - - // SUPPORT FUNCTIONS - - /// Add parameters for web log user INSERT or UPDATE statements - let addWebLogUserParameters (cmd : SqliteCommand) (user : WebLogUser) = - [ cmd.Parameters.AddWithValue ("@id", WebLogUserId.toString user.Id) - cmd.Parameters.AddWithValue ("@webLogId", WebLogId.toString user.WebLogId) - cmd.Parameters.AddWithValue ("@email", user.Email) - cmd.Parameters.AddWithValue ("@firstName", user.FirstName) - cmd.Parameters.AddWithValue ("@lastName", user.LastName) - cmd.Parameters.AddWithValue ("@preferredName", user.PreferredName) - cmd.Parameters.AddWithValue ("@passwordHash", user.PasswordHash) - cmd.Parameters.AddWithValue ("@url", maybe user.Url) - cmd.Parameters.AddWithValue ("@accessLevel", AccessLevel.toString user.AccessLevel) - cmd.Parameters.AddWithValue ("@createdOn", instantParam user.CreatedOn) - cmd.Parameters.AddWithValue ("@lastSeenOn", maybeInstant user.LastSeenOn) - ] |> ignore - - // IMPLEMENTATION FUNCTIONS +/// SQLite myWebLog user data implementation +type SQLiteWebLogUserData(conn: SqliteConnection, log: ILogger) = /// Add a user - let add user = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- - "INSERT INTO web_log_user ( - id, web_log_id, email, first_name, last_name, preferred_name, password_hash, url, access_level, - created_on, last_seen_on - ) VALUES ( - @id, @webLogId, @email, @firstName, @lastName, @preferredName, @passwordHash, @url, @accessLevel, - @createdOn, @lastSeenOn - )" - addWebLogUserParameters cmd user - do! write cmd - } + let add user = + log.LogTrace "WebLogUser.add" + conn.insert Table.WebLogUser user /// Find a user by their ID for the given web log - let findById userId webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT * FROM web_log_user WHERE id = @id" - cmd.Parameters.AddWithValue ("@id", WebLogUserId.toString userId) |> ignore - use! rdr = cmd.ExecuteReaderAsync () - return Helpers.verifyWebLog webLogId (fun u -> u.WebLogId) Map.toWebLogUser rdr - } + let findById userId webLogId = + log.LogTrace "WebLogUser.findById" + Document.findByIdAndWebLog Table.WebLogUser userId webLogId conn /// Delete a user if they have no posts or pages let delete userId webLogId = backgroundTask { + log.LogTrace "WebLogUser.delete" match! findById userId webLogId with | Some _ -> - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT COUNT(id) FROM page WHERE author_id = @userId" - cmd.Parameters.AddWithValue ("@userId", WebLogUserId.toString userId) |> ignore - let! pageCount = count cmd - cmd.CommandText <- "SELECT COUNT(id) FROM post WHERE author_id = @userId" - let! postCount = count cmd + let! pageCount = conn.countByField Table.Page (Field.EQ (nameof Page.Empty.AuthorId) (string userId)) + let! postCount = conn.countByField Table.Post (Field.EQ (nameof Post.Empty.AuthorId) (string userId)) if pageCount + postCount > 0 then return Error "User has pages or posts; cannot delete" else - cmd.CommandText <- "DELETE FROM web_log_user WHERE id = @userId" - let! _ = cmd.ExecuteNonQueryAsync () + do! conn.deleteById Table.WebLogUser userId return Ok true | None -> return Error "User does not exist" } /// Find a user by their e-mail address for the given web log - let findByEmail (email : string) webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT * FROM web_log_user WHERE web_log_id = @webLogId AND email = @email" - addWebLogId cmd webLogId - cmd.Parameters.AddWithValue ("@email", email) |> ignore - use! rdr = cmd.ExecuteReaderAsync () - return if rdr.Read () then Some (Map.toWebLogUser rdr) else None - } + let findByEmail (email: string) webLogId = + log.LogTrace "WebLogUser.findByEmail" + let emailParam = Field.EQ (nameof WebLogUser.Empty.Email) email + conn.customSingle + $"""{Document.Query.selectByWebLog Table.WebLogUser} + AND {Query.whereByField emailParam "@email"}""" + (addFieldParam "@email" emailParam [ webLogParam webLogId ]) + fromData /// Get all users for the given web log let findByWebLog webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT * FROM web_log_user WHERE web_log_id = @webLogId ORDER BY LOWER(preferred_name)" - addWebLogId cmd webLogId - use! rdr = cmd.ExecuteReaderAsync () - return toList Map.toWebLogUser rdr + log.LogTrace "WebLogUser.findByWebLog" + let! users = Document.findByWebLog Table.WebLogUser webLogId conn + return users |> List.sortBy _.PreferredName.ToLowerInvariant() } /// Find the names of users by their IDs for the given web log - let findNames webLogId userIds = backgroundTask { - use cmd = conn.CreateCommand () - let nameSql, nameParams = inClause "AND id" "id" WebLogUserId.toString userIds - cmd.CommandText <- $"SELECT * FROM web_log_user WHERE web_log_id = @webLogId {nameSql}" - addWebLogId cmd webLogId - cmd.Parameters.AddRange nameParams - use! rdr = cmd.ExecuteReaderAsync () - return - toList Map.toWebLogUser rdr - |> List.map (fun u -> { Name = WebLogUserId.toString u.Id; Value = WebLogUser.displayName u }) - } + let findNames webLogId (userIds: WebLogUserId list) = + log.LogTrace "WebLogUser.findNames" + let nameSql, nameParams = inClause $"AND data ->> '{nameof WebLogUser.Empty.Id}'" "id" string userIds + conn.customList + $"{Document.Query.selectByWebLog Table.WebLogUser} {nameSql}" + (webLogParam webLogId :: nameParams) + (fun rdr -> + let user = fromData rdr + { Name = string user.Id; Value = user.DisplayName }) /// Restore users from a backup let restore users = backgroundTask { - for user in users do - do! add user + log.LogTrace "WebLogUser.restore" + for user in users do do! add user } /// Set a user's last seen date/time to now let setLastSeen userId webLogId = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- - "UPDATE web_log_user - SET last_seen_on = @lastSeenOn - WHERE id = @id - AND web_log_id = @webLogId" - addWebLogId cmd webLogId - [ cmd.Parameters.AddWithValue ("@id", WebLogUserId.toString userId) - cmd.Parameters.AddWithValue ("@lastSeenOn", instantParam (Noda.now ())) - ] |> ignore - let! _ = cmd.ExecuteNonQueryAsync () - () + log.LogTrace "WebLogUser.setLastSeen" + match! findById userId webLogId with + | Some _ -> do! conn.patchById Table.WebLogUser userId {| LastSeenOn = Noda.now () |} + | None -> () } /// Update a user - let update user = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- - "UPDATE web_log_user - SET email = @email, - first_name = @firstName, - last_name = @lastName, - preferred_name = @preferredName, - password_hash = @passwordHash, - url = @url, - access_level = @accessLevel, - created_on = @createdOn, - last_seen_on = @lastSeenOn - WHERE id = @id - AND web_log_id = @webLogId" - addWebLogUserParameters cmd user - do! write cmd - } + let update (user: WebLogUser) = + log.LogTrace "WebLogUser.update" + conn.updateById Table.WebLogUser user.Id user interface IWebLogUserData with member _.Add user = add user diff --git a/src/MyWebLog.Data/SQLiteData.fs b/src/MyWebLog.Data/SQLiteData.fs index 873945c..424f282 100644 --- a/src/MyWebLog.Data/SQLiteData.fs +++ b/src/MyWebLog.Data/SQLiteData.fs @@ -1,5 +1,9 @@ namespace MyWebLog.Data +open System +open System.Threading.Tasks +open BitBadger.Documents +open BitBadger.Documents.Sqlite open Microsoft.Data.Sqlite open Microsoft.Extensions.Logging open MyWebLog @@ -7,231 +11,122 @@ open MyWebLog.Data.SQLite open Newtonsoft.Json open NodaTime -/// SQLite myWebLog data implementation -type SQLiteData (conn : SqliteConnection, log : ILogger, ser : JsonSerializer) = +/// SQLite myWebLog data implementation +type SQLiteData(conn: SqliteConnection, log: ILogger, ser: JsonSerializer) = + /// Create tables (and their associated indexes) if they do not exist let ensureTables () = backgroundTask { - - use cmd = conn.CreateCommand () - let! tables = backgroundTask { - cmd.CommandText <- "SELECT name FROM sqlite_master WHERE type = 'table'" - let! rdr = cmd.ExecuteReaderAsync () - let mutable tableList = [] - while rdr.Read() do - tableList <- Map.getString "name" rdr :: tableList - do! rdr.CloseAsync () - return tableList - } + Configuration.useSerializer (Utils.createDocumentSerializer ser) + + let! tables = conn.customList "SELECT name FROM sqlite_master WHERE type = 'table'" [] _.GetString(0) + let needsTable table = not (List.contains table tables) - seq { - // Theme tables - if needsTable "theme" then - "CREATE TABLE theme ( - id TEXT PRIMARY KEY, - name TEXT NOT NULL, - version TEXT NOT NULL)" - if needsTable "theme_template" then - "CREATE TABLE theme_template ( - theme_id TEXT NOT NULL REFERENCES theme (id), - name TEXT NOT NULL, - template TEXT NOT NULL, - PRIMARY KEY (theme_id, name))" - if needsTable "theme_asset" then - "CREATE TABLE theme_asset ( - theme_id TEXT NOT NULL REFERENCES theme (id), - path TEXT NOT NULL, - updated_on TEXT NOT NULL, - data BLOB NOT NULL, - PRIMARY KEY (theme_id, path))" - - // Web log tables - if needsTable "web_log" then - "CREATE TABLE web_log ( - id TEXT PRIMARY KEY, - name TEXT NOT NULL, - slug TEXT NOT NULL, - subtitle TEXT, - default_page TEXT NOT NULL, - posts_per_page INTEGER NOT NULL, - theme_id TEXT NOT NULL REFERENCES theme (id), - url_base TEXT NOT NULL, - time_zone TEXT NOT NULL, - auto_htmx INTEGER NOT NULL DEFAULT 0, - uploads TEXT NOT NULL, - is_feed_enabled INTEGER NOT NULL DEFAULT 0, - feed_name TEXT NOT NULL, - items_in_feed INTEGER, - is_category_enabled INTEGER NOT NULL DEFAULT 0, - is_tag_enabled INTEGER NOT NULL DEFAULT 0, - copyright TEXT); - CREATE INDEX web_log_theme_idx ON web_log (theme_id)" - if needsTable "web_log_feed" then - "CREATE TABLE web_log_feed ( - id TEXT PRIMARY KEY, - web_log_id TEXT NOT NULL REFERENCES web_log (id), - source TEXT NOT NULL, - path TEXT NOT NULL, - podcast TEXT); - CREATE INDEX web_log_feed_web_log_idx ON web_log_feed (web_log_id)" - - // Category table - if needsTable "category" then - "CREATE TABLE category ( - id TEXT PRIMARY KEY, - web_log_id TEXT NOT NULL REFERENCES web_log (id), - name TEXT NOT NULL, - slug TEXT NOT NULL, - description TEXT, - parent_id TEXT); - CREATE INDEX category_web_log_idx ON category (web_log_id)" - - // Web log user table - if needsTable "web_log_user" then - "CREATE TABLE web_log_user ( - id TEXT PRIMARY KEY, - web_log_id TEXT NOT NULL REFERENCES web_log (id), - email TEXT NOT NULL, - first_name TEXT NOT NULL, - last_name TEXT NOT NULL, - preferred_name TEXT NOT NULL, - password_hash TEXT NOT NULL, - url TEXT, - access_level TEXT NOT NULL, - created_on TEXT NOT NULL, - last_seen_on TEXT); - CREATE INDEX web_log_user_web_log_idx ON web_log_user (web_log_id); - CREATE INDEX web_log_user_email_idx ON web_log_user (web_log_id, email)" - - // Page tables - if needsTable "page" then - "CREATE TABLE page ( - id TEXT PRIMARY KEY, - web_log_id TEXT NOT NULL REFERENCES web_log (id), - author_id TEXT NOT NULL REFERENCES web_log_user (id), - title TEXT NOT NULL, - permalink TEXT NOT NULL, - published_on TEXT NOT NULL, - updated_on TEXT NOT NULL, - is_in_page_list INTEGER NOT NULL DEFAULT 0, - template TEXT, - page_text TEXT NOT NULL, - meta_items TEXT); - CREATE INDEX page_web_log_idx ON page (web_log_id); - CREATE INDEX page_author_idx ON page (author_id); - CREATE INDEX page_permalink_idx ON page (web_log_id, permalink)" - if needsTable "page_permalink" then - "CREATE TABLE page_permalink ( - page_id TEXT NOT NULL REFERENCES page (id), - permalink TEXT NOT NULL, - PRIMARY KEY (page_id, permalink))" - if needsTable "page_revision" then - "CREATE TABLE page_revision ( - page_id TEXT NOT NULL REFERENCES page (id), - as_of TEXT NOT NULL, - revision_text TEXT NOT NULL, - PRIMARY KEY (page_id, as_of))" - - // Post tables - if needsTable "post" then - "CREATE TABLE post ( - id TEXT PRIMARY KEY, - web_log_id TEXT NOT NULL REFERENCES web_log (id), - author_id TEXT NOT NULL REFERENCES web_log_user (id), - status TEXT NOT NULL, - title TEXT NOT NULL, - permalink TEXT NOT NULL, - published_on TEXT, - updated_on TEXT NOT NULL, - template TEXT, - post_text TEXT NOT NULL, - meta_items TEXT, - episode TEXT); - CREATE INDEX post_web_log_idx ON post (web_log_id); - CREATE INDEX post_author_idx ON post (author_id); - CREATE INDEX post_status_idx ON post (web_log_id, status, updated_on); - CREATE INDEX post_permalink_idx ON post (web_log_id, permalink)" - if needsTable "post_category" then - "CREATE TABLE post_category ( - post_id TEXT NOT NULL REFERENCES post (id), - category_id TEXT NOT NULL REFERENCES category (id), - PRIMARY KEY (post_id, category_id)); - CREATE INDEX post_category_category_idx ON post_category (category_id)" - if needsTable "post_tag" then - "CREATE TABLE post_tag ( - post_id TEXT NOT NULL REFERENCES post (id), - tag TEXT NOT NULL, - PRIMARY KEY (post_id, tag))" - if needsTable "post_permalink" then - "CREATE TABLE post_permalink ( - post_id TEXT NOT NULL REFERENCES post (id), - permalink TEXT NOT NULL, - PRIMARY KEY (post_id, permalink))" - if needsTable "post_revision" then - "CREATE TABLE post_revision ( - post_id TEXT NOT NULL REFERENCES post (id), - as_of TEXT NOT NULL, - revision_text TEXT NOT NULL, - PRIMARY KEY (post_id, as_of))" - if needsTable "post_comment" then - "CREATE TABLE post_comment ( - id TEXT PRIMARY KEY, - post_id TEXT NOT NULL REFERENCES post(id), - in_reply_to_id TEXT, - name TEXT NOT NULL, - email TEXT NOT NULL, - url TEXT, - status TEXT NOT NULL, - posted_on TEXT NOT NULL, - comment_text TEXT NOT NULL); - CREATE INDEX post_comment_post_idx ON post_comment (post_id)" - - // Tag map table - if needsTable "tag_map" then - "CREATE TABLE tag_map ( - id TEXT PRIMARY KEY, - web_log_id TEXT NOT NULL REFERENCES web_log (id), - tag TEXT NOT NULL, - url_value TEXT NOT NULL); - CREATE INDEX tag_map_web_log_idx ON tag_map (web_log_id)" - - // Uploaded file table - if needsTable "upload" then - "CREATE TABLE upload ( - id TEXT PRIMARY KEY, - web_log_id TEXT NOT NULL REFERENCES web_log (id), - path TEXT NOT NULL, - updated_on TEXT NOT NULL, - data BLOB NOT NULL); - CREATE INDEX upload_web_log_idx ON upload (web_log_id); - CREATE INDEX upload_path_idx ON upload (web_log_id, path)" - - // Database version table - if needsTable "db_version" then - "CREATE TABLE db_version (id TEXT PRIMARY KEY); - INSERT INTO db_version VALUES ('v2-rc1')" - } - |> Seq.map (fun sql -> - log.LogInformation $"Creating {(sql.Split ' ')[2]} table..." - cmd.CommandText <- sql - write cmd |> Async.AwaitTask |> Async.RunSynchronously) - |> List.ofSeq - |> ignore + + let jsonTable table = + $"{Query.Definition.ensureTable table}; {Query.Definition.ensureKey table}" + + let tasks = + seq { + // Theme tables + if needsTable Table.Theme then jsonTable Table.Theme + if needsTable Table.ThemeAsset then + $"CREATE TABLE {Table.ThemeAsset} ( + theme_id TEXT NOT NULL, + path TEXT NOT NULL, + updated_on TEXT NOT NULL, + data BLOB NOT NULL, + PRIMARY KEY (theme_id, path))" + + // Web log table + if needsTable Table.WebLog then jsonTable Table.WebLog + + // Category table + if needsTable Table.Category then + $"""{jsonTable Table.Category}; + {Query.Definition.ensureIndexOn Table.Category "web_log" [ nameof Category.Empty.WebLogId ]}""" + + // Web log user table + if needsTable Table.WebLogUser then + $"""{jsonTable Table.WebLogUser}; + {Query.Definition.ensureIndexOn + Table.WebLogUser + "email" + [ nameof WebLogUser.Empty.WebLogId; nameof WebLogUser.Empty.Email ]}""" + + // Page tables + if needsTable Table.Page then + $"""{jsonTable Table.Page}; + {Query.Definition.ensureIndexOn Table.Page "author" [ nameof Page.Empty.AuthorId ]}; + {Query.Definition.ensureIndexOn + Table.Page "permalink" [ nameof Page.Empty.WebLogId; nameof Page.Empty.Permalink ]}""" + if needsTable Table.PageRevision then + $"CREATE TABLE {Table.PageRevision} ( + page_id TEXT NOT NULL, + as_of TEXT NOT NULL, + revision_text TEXT NOT NULL, + PRIMARY KEY (page_id, as_of))" + + // Post tables + if needsTable Table.Post then + $"""{jsonTable Table.Post}; + {Query.Definition.ensureIndexOn Table.Post "author" [ nameof Post.Empty.AuthorId ]}; + {Query.Definition.ensureIndexOn + Table.Post "permalink" [ nameof Post.Empty.WebLogId; nameof Post.Empty.Permalink ]}; + {Query.Definition.ensureIndexOn + Table.Post + "status" + [ nameof Post.Empty.WebLogId; nameof Post.Empty.Status; nameof Post.Empty.UpdatedOn ]}""" + // TODO: index categories by post? + if needsTable Table.PostRevision then + $"CREATE TABLE {Table.PostRevision} ( + post_id TEXT NOT NULL, + as_of TEXT NOT NULL, + revision_text TEXT NOT NULL, + PRIMARY KEY (post_id, as_of))" + if needsTable Table.PostComment then + $"""{jsonTable Table.PostComment}; + {Query.Definition.ensureIndexOn Table.PostComment "post" [ nameof Comment.Empty.PostId ]}""" + + // Tag map table + if needsTable Table.TagMap then + $"""{jsonTable Table.TagMap}; + {Query.Definition.ensureIndexOn + Table.TagMap "url" [ nameof TagMap.Empty.WebLogId; nameof TagMap.Empty.UrlValue ]}""" + + // Uploaded file table + if needsTable Table.Upload then + $"CREATE TABLE {Table.Upload} ( + id TEXT PRIMARY KEY, + web_log_id TEXT NOT NULL, + path TEXT NOT NULL, + updated_on TEXT NOT NULL, + data BLOB NOT NULL); + CREATE INDEX idx_{Table.Upload}_path ON {Table.Upload} (web_log_id, path)" + + // Database version table + if needsTable Table.DbVersion then + $"CREATE TABLE {Table.DbVersion} (id TEXT PRIMARY KEY); + INSERT INTO {Table.DbVersion} VALUES ('{Utils.Migration.currentDbVersion}')" + } + |> Seq.map (fun sql -> + log.LogInformation $"""Creating {(sql.Replace("IF NOT EXISTS ", "").Split ' ')[2]} table...""" + conn.customNonQuery sql []) + + let! _ = Task.WhenAll tasks + () } /// Set the database version to the specified version - let setDbVersion version = backgroundTask { - use cmd = conn.CreateCommand () - cmd.CommandText <- $"DELETE FROM db_version; INSERT INTO db_version VALUES ('%s{version}')" - do! write cmd - } - + let setDbVersion version = + conn.customNonQuery $"DELETE FROM {Table.DbVersion}; INSERT INTO {Table.DbVersion} VALUES ('%s{version}')" [] + /// Implement the changes between v2-rc1 and v2-rc2 let migrateV2Rc1ToV2Rc2 () = backgroundTask { - let logStep = Utils.logMigrationStep log "v2-rc1 to v2-rc2" + let logStep = Utils.Migration.logStep log "v2-rc1 to v2-rc2" // Move meta items, podcast settings, and episode details to JSON-encoded text fields - use cmd = conn.CreateCommand () + use cmd = conn.CreateCommand() logStep "Adding new columns" cmd.CommandText <- "ALTER TABLE web_log_feed ADD COLUMN podcast TEXT; @@ -242,10 +137,10 @@ type SQLiteData (conn : SqliteConnection, log : ILogger, ser : JsonS logStep "Migrating meta items" let migrateMeta entity = backgroundTask { cmd.CommandText <- $"SELECT * FROM %s{entity}_meta" - use! metaRdr = cmd.ExecuteReaderAsync () + use! metaRdr = cmd.ExecuteReaderAsync() let allMetas = seq { - while metaRdr.Read () do + while metaRdr.Read() do Map.getString $"{entity}_id" metaRdr, { Name = Map.getString "name" metaRdr; Value = Map.getString "value" metaRdr } } |> List.ofSeq @@ -261,118 +156,117 @@ type SQLiteData (conn : SqliteConnection, log : ILogger, ser : JsonS "UPDATE post SET meta_items = @metaItems WHERE id = @postId" - [ cmd.Parameters.AddWithValue ("@metaItems", Utils.serialize ser items) - cmd.Parameters.AddWithValue ("@id", entityId) ] |> ignore - let _ = cmd.ExecuteNonQuery () - cmd.Parameters.Clear ()) + [ cmd.Parameters.AddWithValue("@metaItems", Utils.serialize ser items) + cmd.Parameters.AddWithValue("@id", entityId) ] |> ignore + let _ = cmd.ExecuteNonQuery() + cmd.Parameters.Clear()) } do! migrateMeta "page" do! migrateMeta "post" logStep "Migrating podcasts and episodes" cmd.CommandText <- "SELECT * FROM web_log_feed_podcast" - use! podcastRdr = cmd.ExecuteReaderAsync () + use! podcastRdr = cmd.ExecuteReaderAsync() let podcasts = seq { - while podcastRdr.Read () do + while podcastRdr.Read() do CustomFeedId (Map.getString "feed_id" podcastRdr), - { Title = Map.getString "title" podcastRdr - Subtitle = Map.tryString "subtitle" podcastRdr - ItemsInFeed = Map.getInt "items_in_feed" podcastRdr - Summary = Map.getString "summary" podcastRdr - DisplayedAuthor = Map.getString "displayed_author" podcastRdr - Email = Map.getString "email" podcastRdr - ImageUrl = Map.getString "image_url" podcastRdr |> Permalink - AppleCategory = Map.getString "apple_category" podcastRdr - AppleSubcategory = Map.tryString "apple_subcategory" podcastRdr - Explicit = Map.getString "explicit" podcastRdr |> ExplicitRating.parse - DefaultMediaType = Map.tryString "default_media_type" podcastRdr - MediaBaseUrl = Map.tryString "media_base_url" podcastRdr - PodcastGuid = Map.tryGuid "podcast_guid" podcastRdr - FundingUrl = Map.tryString "funding_url" podcastRdr - FundingText = Map.tryString "funding_text" podcastRdr - Medium = Map.tryString "medium" podcastRdr - |> Option.map PodcastMedium.parse - } + { Title = Map.getString "title" podcastRdr + Subtitle = Map.tryString "subtitle" podcastRdr + ItemsInFeed = Map.getInt "items_in_feed" podcastRdr + Summary = Map.getString "summary" podcastRdr + DisplayedAuthor = Map.getString "displayed_author" podcastRdr + Email = Map.getString "email" podcastRdr + ImageUrl = Map.getString "image_url" podcastRdr |> Permalink + AppleCategory = Map.getString "apple_category" podcastRdr + AppleSubcategory = Map.tryString "apple_subcategory" podcastRdr + Explicit = Map.getString "explicit" podcastRdr |> ExplicitRating.Parse + DefaultMediaType = Map.tryString "default_media_type" podcastRdr + MediaBaseUrl = Map.tryString "media_base_url" podcastRdr + PodcastGuid = Map.tryGuid "podcast_guid" podcastRdr + FundingUrl = Map.tryString "funding_url" podcastRdr + FundingText = Map.tryString "funding_text" podcastRdr + Medium = Map.tryString "medium" podcastRdr + |> Option.map PodcastMedium.Parse } } |> List.ofSeq - podcastRdr.Close () + podcastRdr.Close() podcasts |> List.iter (fun (feedId, podcast) -> cmd.CommandText <- "UPDATE web_log_feed SET podcast = @podcast WHERE id = @id" - [ cmd.Parameters.AddWithValue ("@podcast", Utils.serialize ser podcast) - cmd.Parameters.AddWithValue ("@id", CustomFeedId.toString feedId) ] |> ignore - let _ = cmd.ExecuteNonQuery () - cmd.Parameters.Clear ()) + [ cmd.Parameters.AddWithValue("@podcast", Utils.serialize ser podcast) + cmd.Parameters.AddWithValue("@id", string feedId) ] |> ignore + let _ = cmd.ExecuteNonQuery() + cmd.Parameters.Clear()) cmd.CommandText <- "SELECT * FROM post_episode" - use! epRdr = cmd.ExecuteReaderAsync () + use! epRdr = cmd.ExecuteReaderAsync() let episodes = seq { - while epRdr.Read () do + while epRdr.Read() do PostId (Map.getString "post_id" epRdr), - { Media = Map.getString "media" epRdr - Length = Map.getLong "length" epRdr - Duration = Map.tryTimeSpan "duration" epRdr - |> Option.map Duration.FromTimeSpan - MediaType = Map.tryString "media_type" epRdr - ImageUrl = Map.tryString "image_url" epRdr - Subtitle = Map.tryString "subtitle" epRdr - Explicit = Map.tryString "explicit" epRdr - |> Option.map ExplicitRating.parse - ChapterFile = Map.tryString "chapter_file" epRdr - ChapterType = Map.tryString "chapter_type" epRdr - TranscriptUrl = Map.tryString "transcript_url" epRdr - TranscriptType = Map.tryString "transcript_type" epRdr - TranscriptLang = Map.tryString "transcript_lang" epRdr - TranscriptCaptions = Map.tryBoolean "transcript_captions" epRdr - SeasonNumber = Map.tryInt "season_number" epRdr - SeasonDescription = Map.tryString "season_description" epRdr - EpisodeNumber = Map.tryString "episode_number" epRdr - |> Option.map System.Double.Parse - EpisodeDescription = Map.tryString "episode_description" epRdr - } + { Media = Map.getString "media" epRdr + Length = Map.getLong "length" epRdr + Duration = Map.tryTimeSpan "duration" epRdr + |> Option.map Duration.FromTimeSpan + MediaType = Map.tryString "media_type" epRdr + ImageUrl = Map.tryString "image_url" epRdr + Subtitle = Map.tryString "subtitle" epRdr + Explicit = Map.tryString "explicit" epRdr + |> Option.map ExplicitRating.Parse + Chapters = Map.tryString "chapters" epRdr + |> Option.map (Utils.deserialize ser) + ChapterFile = Map.tryString "chapter_file" epRdr + ChapterType = Map.tryString "chapter_type" epRdr + ChapterWaypoints = None + TranscriptUrl = Map.tryString "transcript_url" epRdr + TranscriptType = Map.tryString "transcript_type" epRdr + TranscriptLang = Map.tryString "transcript_lang" epRdr + TranscriptCaptions = Map.tryBoolean "transcript_captions" epRdr + SeasonNumber = Map.tryInt "season_number" epRdr + SeasonDescription = Map.tryString "season_description" epRdr + EpisodeNumber = Map.tryString "episode_number" epRdr |> Option.map Double.Parse + EpisodeDescription = Map.tryString "episode_description" epRdr } } |> List.ofSeq - epRdr.Close () + epRdr.Close() episodes |> List.iter (fun (postId, episode) -> cmd.CommandText <- "UPDATE post SET episode = @episode WHERE id = @id" - [ cmd.Parameters.AddWithValue ("@episode", Utils.serialize ser episode) - cmd.Parameters.AddWithValue ("@id", PostId.toString postId) ] |> ignore - let _ = cmd.ExecuteNonQuery () - cmd.Parameters.Clear ()) + [ cmd.Parameters.AddWithValue("@episode", Utils.serialize ser episode) + cmd.Parameters.AddWithValue("@id", string postId) ] |> ignore + let _ = cmd.ExecuteNonQuery() + cmd.Parameters.Clear()) logStep "Migrating dates/times" - let inst (dt : System.DateTime) = - System.DateTime (dt.Ticks, System.DateTimeKind.Utc) + let inst (dt: DateTime) = + DateTime(dt.Ticks, DateTimeKind.Utc) |> (Instant.FromDateTimeUtc >> Noda.toSecondsPrecision) // page.updated_on, page.published_on cmd.CommandText <- "SELECT id, updated_on, published_on FROM page" - use! pageRdr = cmd.ExecuteReaderAsync () + use! pageRdr = cmd.ExecuteReaderAsync() let toUpdate = seq { - while pageRdr.Read () do + while pageRdr.Read() do Map.getString "id" pageRdr, inst (Map.getDateTime "updated_on" pageRdr), inst (Map.getDateTime "published_on" pageRdr) } |> List.ofSeq - pageRdr.Close () + pageRdr.Close() cmd.CommandText <- "UPDATE page SET updated_on = @updatedOn, published_on = @publishedOn WHERE id = @id" - [ cmd.Parameters.Add ("@id", SqliteType.Text) - cmd.Parameters.Add ("@updatedOn", SqliteType.Text) - cmd.Parameters.Add ("@publishedOn", SqliteType.Text) - ] |> ignore + [ cmd.Parameters.Add("@id", SqliteType.Text) + cmd.Parameters.Add("@updatedOn", SqliteType.Text) + cmd.Parameters.Add("@publishedOn", SqliteType.Text) ] |> ignore toUpdate |> List.iter (fun (pageId, updatedOn, publishedOn) -> cmd.Parameters["@id" ].Value <- pageId cmd.Parameters["@updatedOn" ].Value <- instantParam updatedOn cmd.Parameters["@publishedOn"].Value <- instantParam publishedOn - let _ = cmd.ExecuteNonQuery () + let _ = cmd.ExecuteNonQuery() ()) - cmd.Parameters.Clear () + cmd.Parameters.Clear() // page_revision.as_of cmd.CommandText <- "SELECT * FROM page_revision" - use! pageRevRdr = cmd.ExecuteReaderAsync () + use! pageRevRdr = cmd.ExecuteReaderAsync() let toUpdate = seq { - while pageRevRdr.Read () do + while pageRevRdr.Read() do let asOf = Map.getDateTime "as_of" pageRevRdr Map.getString "page_id" pageRevRdr, asOf, inst asOf, Map.getString "revision_text" pageRevRdr } |> List.ofSeq @@ -380,141 +274,135 @@ type SQLiteData (conn : SqliteConnection, log : ILogger, ser : JsonS cmd.CommandText <- "DELETE FROM page_revision WHERE page_id = @pageId AND as_of = @oldAsOf; INSERT INTO page_revision (page_id, as_of, revision_text) VALUES (@pageId, @asOf, @text)" - [ cmd.Parameters.Add ("@pageId", SqliteType.Text) - cmd.Parameters.Add ("@oldAsOf", SqliteType.Text) - cmd.Parameters.Add ("@asOf", SqliteType.Text) - cmd.Parameters.Add ("@text", SqliteType.Text) - ] |> ignore + [ cmd.Parameters.Add("@pageId", SqliteType.Text) + cmd.Parameters.Add("@oldAsOf", SqliteType.Text) + cmd.Parameters.Add("@asOf", SqliteType.Text) + cmd.Parameters.Add("@text", SqliteType.Text) ] |> ignore toUpdate |> List.iter (fun (pageId, oldAsOf, asOf, text) -> cmd.Parameters["@pageId" ].Value <- pageId cmd.Parameters["@oldAsOf"].Value <- oldAsOf cmd.Parameters["@asOf" ].Value <- instantParam asOf cmd.Parameters["@text" ].Value <- text - let _ = cmd.ExecuteNonQuery () + let _ = cmd.ExecuteNonQuery() ()) - cmd.Parameters.Clear () + cmd.Parameters.Clear() // post.updated_on, post.published_on (opt) cmd.CommandText <- "SELECT id, updated_on, published_on FROM post" - use! postRdr = cmd.ExecuteReaderAsync () + use! postRdr = cmd.ExecuteReaderAsync() let toUpdate = seq { - while postRdr.Read () do + while postRdr.Read() do Map.getString "id" postRdr, - inst (Map.getDateTime "updated_on" postRdr), + inst (Map.getDateTime "updated_on" postRdr), (Map.tryDateTime "published_on" postRdr |> Option.map inst) } |> List.ofSeq - postRdr.Close () + postRdr.Close() cmd.CommandText <- "UPDATE post SET updated_on = @updatedOn, published_on = @publishedOn WHERE id = @id" - [ cmd.Parameters.Add ("@id", SqliteType.Text) - cmd.Parameters.Add ("@updatedOn", SqliteType.Text) - cmd.Parameters.Add ("@publishedOn", SqliteType.Text) - ] |> ignore + [ cmd.Parameters.Add("@id", SqliteType.Text) + cmd.Parameters.Add("@updatedOn", SqliteType.Text) + cmd.Parameters.Add("@publishedOn", SqliteType.Text) ] |> ignore toUpdate |> List.iter (fun (postId, updatedOn, publishedOn) -> cmd.Parameters["@id" ].Value <- postId cmd.Parameters["@updatedOn" ].Value <- instantParam updatedOn cmd.Parameters["@publishedOn"].Value <- maybeInstant publishedOn - let _ = cmd.ExecuteNonQuery () + let _ = cmd.ExecuteNonQuery() ()) - cmd.Parameters.Clear () + cmd.Parameters.Clear() // post_revision.as_of cmd.CommandText <- "SELECT * FROM post_revision" - use! postRevRdr = cmd.ExecuteReaderAsync () + use! postRevRdr = cmd.ExecuteReaderAsync() let toUpdate = seq { - while postRevRdr.Read () do + while postRevRdr.Read() do let asOf = Map.getDateTime "as_of" postRevRdr Map.getString "post_id" postRevRdr, asOf, inst asOf, Map.getString "revision_text" postRevRdr } |> List.ofSeq - postRevRdr.Close () + postRevRdr.Close() cmd.CommandText <- "DELETE FROM post_revision WHERE post_id = @postId AND as_of = @oldAsOf; INSERT INTO post_revision (post_id, as_of, revision_text) VALUES (@postId, @asOf, @text)" - [ cmd.Parameters.Add ("@postId", SqliteType.Text) - cmd.Parameters.Add ("@oldAsOf", SqliteType.Text) - cmd.Parameters.Add ("@asOf", SqliteType.Text) - cmd.Parameters.Add ("@text", SqliteType.Text) - ] |> ignore + [ cmd.Parameters.Add("@postId", SqliteType.Text) + cmd.Parameters.Add("@oldAsOf", SqliteType.Text) + cmd.Parameters.Add("@asOf", SqliteType.Text) + cmd.Parameters.Add("@text", SqliteType.Text) ] |> ignore toUpdate |> List.iter (fun (postId, oldAsOf, asOf, text) -> cmd.Parameters["@postId" ].Value <- postId cmd.Parameters["@oldAsOf"].Value <- oldAsOf cmd.Parameters["@asOf" ].Value <- instantParam asOf cmd.Parameters["@text" ].Value <- text - let _ = cmd.ExecuteNonQuery () + let _ = cmd.ExecuteNonQuery() ()) - cmd.Parameters.Clear () + cmd.Parameters.Clear() // theme_asset.updated_on cmd.CommandText <- "SELECT theme_id, path, updated_on FROM theme_asset" - use! assetRdr = cmd.ExecuteReaderAsync () + use! assetRdr = cmd.ExecuteReaderAsync() let toUpdate = seq { - while assetRdr.Read () do + while assetRdr.Read() do Map.getString "theme_id" assetRdr, Map.getString "path" assetRdr, inst (Map.getDateTime "updated_on" assetRdr) } |> List.ofSeq assetRdr.Close () cmd.CommandText <- "UPDATE theme_asset SET updated_on = @updatedOn WHERE theme_id = @themeId AND path = @path" - [ cmd.Parameters.Add ("@updatedOn", SqliteType.Text) - cmd.Parameters.Add ("@themeId", SqliteType.Text) - cmd.Parameters.Add ("@path", SqliteType.Text) - ] |> ignore + [ cmd.Parameters.Add("@updatedOn", SqliteType.Text) + cmd.Parameters.Add("@themeId", SqliteType.Text) + cmd.Parameters.Add("@path", SqliteType.Text) ] |> ignore toUpdate |> List.iter (fun (themeId, path, updatedOn) -> cmd.Parameters["@themeId" ].Value <- themeId cmd.Parameters["@path" ].Value <- path cmd.Parameters["@updatedOn"].Value <- instantParam updatedOn - let _ = cmd.ExecuteNonQuery () + let _ = cmd.ExecuteNonQuery() ()) - cmd.Parameters.Clear () + cmd.Parameters.Clear() // upload.updated_on cmd.CommandText <- "SELECT id, updated_on FROM upload" - use! upRdr = cmd.ExecuteReaderAsync () + use! upRdr = cmd.ExecuteReaderAsync() let toUpdate = seq { - while upRdr.Read () do + while upRdr.Read() do Map.getString "id" upRdr, inst (Map.getDateTime "updated_on" upRdr) } |> List.ofSeq upRdr.Close () cmd.CommandText <- "UPDATE upload SET updated_on = @updatedOn WHERE id = @id" - [ cmd.Parameters.Add ("@updatedOn", SqliteType.Text) - cmd.Parameters.Add ("@id", SqliteType.Text) - ] |> ignore + [ cmd.Parameters.Add("@updatedOn", SqliteType.Text) + cmd.Parameters.Add("@id", SqliteType.Text) ] |> ignore toUpdate |> List.iter (fun (upId, updatedOn) -> cmd.Parameters["@id" ].Value <- upId cmd.Parameters["@updatedOn"].Value <- instantParam updatedOn - let _ = cmd.ExecuteNonQuery () + let _ = cmd.ExecuteNonQuery() ()) - cmd.Parameters.Clear () + cmd.Parameters.Clear() // web_log_user.created_on, web_log_user.last_seen_on (opt) cmd.CommandText <- "SELECT id, created_on, last_seen_on FROM web_log_user" - use! userRdr = cmd.ExecuteReaderAsync () + use! userRdr = cmd.ExecuteReaderAsync() let toUpdate = seq { - while userRdr.Read () do + while userRdr.Read() do Map.getString "id" userRdr, inst (Map.getDateTime "created_on" userRdr), (Map.tryDateTime "last_seen_on" userRdr |> Option.map inst) } |> List.ofSeq - userRdr.Close () + userRdr.Close() cmd.CommandText <- "UPDATE web_log_user SET created_on = @createdOn, last_seen_on = @lastSeenOn WHERE id = @id" - [ cmd.Parameters.Add ("@id", SqliteType.Text) - cmd.Parameters.Add ("@createdOn", SqliteType.Text) - cmd.Parameters.Add ("@lastSeenOn", SqliteType.Text) - ] |> ignore + [ cmd.Parameters.Add("@id", SqliteType.Text) + cmd.Parameters.Add("@createdOn", SqliteType.Text) + cmd.Parameters.Add("@lastSeenOn", SqliteType.Text) ] |> ignore toUpdate |> List.iter (fun (userId, createdOn, lastSeenOn) -> cmd.Parameters["@id" ].Value <- userId cmd.Parameters["@createdOn" ].Value <- instantParam createdOn cmd.Parameters["@lastSeenOn"].Value <- maybeInstant lastSeenOn - let _ = cmd.ExecuteNonQuery () + let _ = cmd.ExecuteNonQuery() ()) - cmd.Parameters.Clear () + cmd.Parameters.Clear() - conn.Close () - conn.Open () + conn.Close() + conn.Open() logStep "Dropping old tables and columns" cmd.CommandText <- @@ -531,58 +419,57 @@ type SQLiteData (conn : SqliteConnection, log : ILogger, ser : JsonS /// Migrate from v2-rc2 to v2 let migrateV2Rc2ToV2 () = backgroundTask { - Utils.logMigrationStep log "v2-rc2 to v2" "Setting database version; no migration required" + Utils.Migration.logStep log "v2-rc2 to v2" "Setting database version; no migration required" do! setDbVersion "v2" } + /// Migrate from v2 to v2.1 + let migrateV2ToV2point1 () = backgroundTask { + let! webLogs = + Custom.list $"SELECT url_base, slug FROM {Table.WebLog}" [] (fun rdr -> rdr.GetString(0), rdr.GetString(1)) + Utils.Migration.backupAndRestoreRequired log "v2" "v2.1" webLogs + } + /// Migrate data among versions (up only) let migrate version = backgroundTask { - - match version with - | Some v when v = "v2" -> () - | Some v when v = "v2-rc2" -> do! migrateV2Rc2ToV2 () - | Some v when v = "v2-rc1" -> do! migrateV2Rc1ToV2Rc2 () - | Some _ - | None -> - log.LogWarning $"Unknown database version; assuming {Utils.currentDbVersion}" - do! setDbVersion Utils.currentDbVersion - } + let mutable v = defaultArg version "" + if v = "v2-rc1" then + do! migrateV2Rc1ToV2Rc2 () + v <- "v2-rc2" + + if v = "v2-rc2" then + do! migrateV2Rc2ToV2 () + v <- "v2" + + if v = "v2" then + do! migrateV2ToV2point1 () + v <- "v2.1" + + if v <> Utils.Migration.currentDbVersion then + log.LogWarning $"Unknown database version; assuming {Utils.Migration.currentDbVersion}" + do! setDbVersion Utils.Migration.currentDbVersion + } + /// The connection for this instance member _.Conn = conn - /// Make a SQLite connection ready to execute commends - static member setUpConnection (conn : SqliteConnection) = backgroundTask { - do! conn.OpenAsync () - use cmd = conn.CreateCommand () - cmd.CommandText <- "PRAGMA foreign_keys = TRUE" - let! _ = cmd.ExecuteNonQueryAsync () - () - } - interface IData with - member _.Category = SQLiteCategoryData conn - member _.Page = SQLitePageData (conn, ser) - member _.Post = SQLitePostData (conn, ser) - member _.TagMap = SQLiteTagMapData conn - member _.Theme = SQLiteThemeData conn - member _.ThemeAsset = SQLiteThemeAssetData conn - member _.Upload = SQLiteUploadData conn - member _.WebLog = SQLiteWebLogData (conn, ser) - member _.WebLogUser = SQLiteWebLogUserData conn + member _.Category = SQLiteCategoryData (conn, ser, log) + member _.Page = SQLitePageData (conn, log) + member _.Post = SQLitePostData (conn, log) + member _.TagMap = SQLiteTagMapData (conn, log) + member _.Theme = SQLiteThemeData (conn, log) + member _.ThemeAsset = SQLiteThemeAssetData (conn, log) + member _.Upload = SQLiteUploadData (conn, log) + member _.WebLog = SQLiteWebLogData (conn, log) + member _.WebLogUser = SQLiteWebLogUserData (conn, log) member _.Serializer = ser member _.StartUp () = backgroundTask { do! ensureTables () - - use cmd = conn.CreateCommand () - cmd.CommandText <- "SELECT id FROM db_version" - use! rdr = cmd.ExecuteReaderAsync () - let version = if rdr.Read () then Some (Map.getString "id" rdr) else None - match version with - | Some v when v = "v2-rc2" -> () - | Some _ - | None -> do! migrate version + let! version = conn.customSingle $"SELECT id FROM {Table.DbVersion}" [] _.GetString(0) + do! migrate version } diff --git a/src/MyWebLog.Data/Utils.fs b/src/MyWebLog.Data/Utils.fs index 9f08592..3432d34 100644 --- a/src/MyWebLog.Data/Utils.fs +++ b/src/MyWebLog.Data/Utils.fs @@ -5,54 +5,76 @@ module internal MyWebLog.Data.Utils open MyWebLog open MyWebLog.ViewModels -/// The current database version -let currentDbVersion = "v2" - /// Create a category hierarchy from the given list of categories -let rec orderByHierarchy (cats : Category list) parentId slugBase parentNames = seq { +let rec orderByHierarchy (cats: Category list) parentId slugBase parentNames = seq { for cat in cats |> List.filter (fun c -> c.ParentId = parentId) do let fullSlug = (match slugBase with Some it -> $"{it}/" | None -> "") + cat.Slug - { Id = CategoryId.toString cat.Id + { Id = string cat.Id Slug = fullSlug Name = cat.Name Description = cat.Description ParentNames = Array.ofList parentNames // Post counts are filled on a second pass - PostCount = 0 - } + PostCount = 0 } yield! orderByHierarchy cats (Some cat.Id) (Some fullSlug) ([ cat.Name ] |> List.append parentNames) } /// Get lists of items removed from and added to the given lists -let diffLists<'T, 'U when 'U : equality> oldItems newItems (f : 'T -> 'U) = +let diffLists<'T, 'U when 'U: equality> oldItems newItems (f: 'T -> 'U) = let diff compList = fun item -> not (compList |> List.exists (fun other -> f item = f other)) List.filter (diff newItems) oldItems, List.filter (diff oldItems) newItems -/// Find meta items added and removed -let diffMetaItems (oldItems : MetaItem list) newItems = - diffLists oldItems newItems (fun item -> $"{item.Name}|{item.Value}") - -/// Find the permalinks added and removed -let diffPermalinks oldLinks newLinks = - diffLists oldLinks newLinks Permalink.toString - /// Find the revisions added and removed -let diffRevisions oldRevs newRevs = - diffLists oldRevs newRevs (fun (rev : Revision) -> $"{rev.AsOf.ToUnixTimeTicks ()}|{MarkupText.toString rev.Text}") +let diffRevisions (oldRevs: Revision list) newRevs = + diffLists oldRevs newRevs (fun rev -> $"{rev.AsOf.ToUnixTimeTicks()}|{rev.Text}") open MyWebLog.Converters open Newtonsoft.Json /// Serialize an object to JSON -let serialize<'T> ser (item : 'T) = - JsonConvert.SerializeObject (item, Json.settings ser) +let serialize<'T> ser (item: 'T) = + JsonConvert.SerializeObject(item, Json.settings ser) /// Deserialize a JSON string -let deserialize<'T> (ser : JsonSerializer) value = - JsonConvert.DeserializeObject<'T> (value, Json.settings ser) +let deserialize<'T> (ser: JsonSerializer) value = + JsonConvert.DeserializeObject<'T>(value, Json.settings ser) -open Microsoft.Extensions.Logging +open BitBadger.Documents -/// Log a migration step -let logMigrationStep<'T> (log : ILogger<'T>) migration message = - log.LogInformation $"Migrating %s{migration}: %s{message}" +/// Create a document serializer using the given JsonSerializer +let createDocumentSerializer ser = + { new IDocumentSerializer with + member _.Serialize<'T>(it: 'T) : string = serialize ser it + member _.Deserialize<'T>(it: string) : 'T = deserialize ser it + } + +/// Data migration utilities +module Migration = + + open Microsoft.Extensions.Logging + + /// The current database version + let currentDbVersion = "v2.1" + + /// Log a migration step + let logStep<'T> (log: ILogger<'T>) migration message = + log.LogInformation $"Migrating %s{migration}: %s{message}" + + /// Notify the user that a backup/restore + let backupAndRestoreRequired log oldVersion newVersion webLogs = + logStep log $"%s{oldVersion} to %s{newVersion}" "Requires Using Action" + + [ "** MANUAL DATABASE UPGRADE REQUIRED **"; "" + $"The data structure changed between {oldVersion} and {newVersion}." + "To migrate your data:" + $" - Use a {oldVersion} executable to back up each web log" + " - Drop all tables from the database" + " - Use this executable to restore each backup"; "" + "Commands to back up all web logs:" + yield! webLogs |> List.map (fun (url, slug) -> $"./myWebLog backup %s{url} {oldVersion}.%s{slug}.json") ] + |> String.concat "\n" + |> log.LogWarning + + log.LogCritical "myWebLog will now exit" + exit 1 |> ignore + \ No newline at end of file diff --git a/src/MyWebLog.Domain/DataTypes.fs b/src/MyWebLog.Domain/DataTypes.fs index 87b9a1c..65c2325 100644 --- a/src/MyWebLog.Domain/DataTypes.fs +++ b/src/MyWebLog.Domain/DataTypes.fs @@ -1,485 +1,440 @@ namespace MyWebLog -open System open MyWebLog open NodaTime /// A category under which a post may be identified [] -type Category = - { /// The ID of the category - Id : CategoryId +type Category = { + /// The ID of the category + Id: CategoryId - /// The ID of the web log to which the category belongs - WebLogId : WebLogId + /// The ID of the web log to which the category belongs + WebLogId: WebLogId - /// The displayed name - Name : string + /// The displayed name + Name: string - /// The slug (used in category URLs) - Slug : string + /// The slug (used in category URLs) + Slug: string - /// A longer description of the category - Description : string option + /// A longer description of the category + Description: string option - /// The parent ID of this category (if a subcategory) - ParentId : CategoryId option - } - -/// Functions to support categories -module Category = + /// The parent ID of this category (if a subcategory) + ParentId: CategoryId option +} with /// An empty category - let empty = - { Id = CategoryId.empty - WebLogId = WebLogId.empty - Name = "" - Slug = "" - Description = None - ParentId = None - } + static member Empty = + { Id = CategoryId.Empty + WebLogId = WebLogId.Empty + Name = "" + Slug = "" + Description = None + ParentId = None } /// A comment on a post [] -type Comment = - { /// The ID of the comment - Id : CommentId +type Comment = { + /// The ID of the comment + Id: CommentId - /// The ID of the post to which this comment applies - PostId : PostId + /// The ID of the post to which this comment applies + PostId: PostId - /// The ID of the comment to which this comment is a reply - InReplyToId : CommentId option + /// The ID of the comment to which this comment is a reply + InReplyToId: CommentId option - /// The name of the commentor - Name : string + /// The name of the commentor + Name: string - /// The e-mail address of the commentor - Email : string + /// The e-mail address of the commentor + Email: string - /// The URL of the commentor's personal website - Url : string option + /// The URL of the commentor's personal website + Url: string option - /// The status of the comment - Status : CommentStatus + /// The status of the comment + Status: CommentStatus - /// When the comment was posted - PostedOn : Instant + /// When the comment was posted + PostedOn: Instant - /// The text of the comment - Text : string - } - -/// Functions to support comments -module Comment = + /// The text of the comment + Text: string +} with /// An empty comment - let empty = - { Id = CommentId.empty - PostId = PostId.empty - InReplyToId = None - Name = "" - Email = "" - Url = None - Status = Pending - PostedOn = Noda.epoch - Text = "" - } + static member Empty = + { Id = CommentId.Empty + PostId = PostId.Empty + InReplyToId = None + Name = "" + Email = "" + Url = None + Status = Pending + PostedOn = Noda.epoch + Text = "" } /// A page (text not associated with a date/time) [] -type Page = - { /// The ID of this page - Id : PageId +type Page = { + /// The ID of this page + Id: PageId - /// The ID of the web log to which this page belongs - WebLogId : WebLogId + /// The ID of the web log to which this page belongs + WebLogId: WebLogId - /// The ID of the author of this page - AuthorId : WebLogUserId + /// The ID of the author of this page + AuthorId: WebLogUserId - /// The title of the page - Title : string + /// The title of the page + Title: string - /// The link at which this page is displayed - Permalink : Permalink + /// The link at which this page is displayed + Permalink: Permalink - /// When this page was published - PublishedOn : Instant + /// When this page was published + PublishedOn: Instant - /// When this page was last updated - UpdatedOn : Instant + /// When this page was last updated + UpdatedOn: Instant - /// Whether this page shows as part of the web log's navigation - IsInPageList : bool + /// Whether this page shows as part of the web log's navigation + IsInPageList: bool - /// The template to use when rendering this page - Template : string option + /// The template to use when rendering this page + Template: string option - /// The current text of the page - Text : string + /// The current text of the page + Text: string - /// Metadata for this page - Metadata : MetaItem list - - /// Permalinks at which this page may have been previously served (useful for migrated content) - PriorPermalinks : Permalink list + /// Metadata for this page + Metadata: MetaItem list + + /// Permalinks at which this page may have been previously served (useful for migrated content) + PriorPermalinks: Permalink list - /// Revisions of this page - Revisions : Revision list - } - -/// Functions to support pages -module Page = + /// Revisions of this page + Revisions: Revision list +} with /// An empty page - let empty = - { Id = PageId.empty - WebLogId = WebLogId.empty - AuthorId = WebLogUserId.empty - Title = "" - Permalink = Permalink.empty - PublishedOn = Noda.epoch - UpdatedOn = Noda.epoch - IsInPageList = false - Template = None - Text = "" - Metadata = [] - PriorPermalinks = [] - Revisions = [] - } + static member Empty = + { Id = PageId.Empty + WebLogId = WebLogId.Empty + AuthorId = WebLogUserId.Empty + Title = "" + Permalink = Permalink.Empty + PublishedOn = Noda.epoch + UpdatedOn = Noda.epoch + IsInPageList = false + Template = None + Text = "" + Metadata = [] + PriorPermalinks = [] + Revisions = [] } /// A web log post [] -type Post = - { /// The ID of this post - Id : PostId +type Post = { + /// The ID of this post + Id: PostId - /// The ID of the web log to which this post belongs - WebLogId : WebLogId + /// The ID of the web log to which this post belongs + WebLogId: WebLogId - /// The ID of the author of this post - AuthorId : WebLogUserId + /// The ID of the author of this post + AuthorId: WebLogUserId - /// The status - Status : PostStatus + /// The status + Status: PostStatus - /// The title - Title : string + /// The title + Title: string - /// The link at which the post resides - Permalink : Permalink + /// The link at which the post resides + Permalink: Permalink - /// The instant on which the post was originally published - PublishedOn : Instant option + /// The instant on which the post was originally published + PublishedOn: Instant option - /// The instant on which the post was last updated - UpdatedOn : Instant + /// The instant on which the post was last updated + UpdatedOn: Instant - /// The template to use in displaying the post - Template : string option - - /// The text of the post in HTML (ready to display) format - Text : string + /// The template to use in displaying the post + Template: string option + + /// The text of the post in HTML (ready to display) format + Text: string - /// The Ids of the categories to which this is assigned - CategoryIds : CategoryId list + /// The Ids of the categories to which this is assigned + CategoryIds: CategoryId list - /// The tags for the post - Tags : string list + /// The tags for the post + Tags: string list - /// Podcast episode information for this post - Episode : Episode option - - /// Metadata for the post - Metadata : MetaItem list - - /// Permalinks at which this post may have been previously served (useful for migrated content) - PriorPermalinks : Permalink list + /// Podcast episode information for this post + Episode: Episode option + + /// Metadata for the post + Metadata: MetaItem list + + /// Permalinks at which this post may have been previously served (useful for migrated content) + PriorPermalinks: Permalink list - /// The revisions for this post - Revisions : Revision list - } - -/// Functions to support posts -module Post = + /// The revisions for this post + Revisions: Revision list +} with /// An empty post - let empty = - { Id = PostId.empty - WebLogId = WebLogId.empty - AuthorId = WebLogUserId.empty - Status = Draft - Title = "" - Permalink = Permalink.empty - PublishedOn = None - UpdatedOn = Noda.epoch - Text = "" - Template = None - CategoryIds = [] - Tags = [] - Episode = None - Metadata = [] - PriorPermalinks = [] - Revisions = [] - } + static member Empty = + { Id = PostId.Empty + WebLogId = WebLogId.Empty + AuthorId = WebLogUserId.Empty + Status = Draft + Title = "" + Permalink = Permalink.Empty + PublishedOn = None + UpdatedOn = Noda.epoch + Text = "" + Template = None + CategoryIds = [] + Tags = [] + Episode = None + Metadata = [] + PriorPermalinks = [] + Revisions = [] } /// A mapping between a tag and its URL value, used to translate restricted characters (ex. "#1" -> "number-1") -type TagMap = - { /// The ID of this tag mapping - Id : TagMapId - - /// The ID of the web log to which this tag mapping belongs - WebLogId : WebLogId - - /// The tag which should be mapped to a different value in links - Tag : string - - /// The value by which the tag should be linked - UrlValue : string - } - -/// Functions to support tag mappings -module TagMap = +[] +type TagMap = { + /// The ID of this tag mapping + Id: TagMapId + + /// The ID of the web log to which this tag mapping belongs + WebLogId: WebLogId + + /// The tag which should be mapped to a different value in links + Tag: string + + /// The value by which the tag should be linked + UrlValue: string +} with /// An empty tag mapping - let empty = - { Id = TagMapId.empty - WebLogId = WebLogId.empty - Tag = "" - UrlValue = "" - } + static member Empty = + { Id = TagMapId.Empty; WebLogId = WebLogId.Empty; Tag = ""; UrlValue = "" } /// A theme -type Theme = - { /// The ID / path of the theme - Id : ThemeId - - /// A long name of the theme - Name : string - - /// The version of the theme - Version : string - - /// The templates for this theme - Templates: ThemeTemplate list - } - -/// Functions to support themes -module Theme = +[] +type Theme = { + /// The ID / path of the theme + Id: ThemeId + + /// A long name of the theme + Name: string + + /// The version of the theme + Version: string + + /// The templates for this theme + Templates: ThemeTemplate list +} with /// An empty theme - let empty = - { Id = ThemeId "" - Name = "" - Version = "" - Templates = [] - } + static member Empty = + { Id = ThemeId.Empty; Name = ""; Version = ""; Templates = [] } /// A theme asset (a file served as part of a theme, at /themes/[theme]/[asset-path]) -type ThemeAsset = - { - /// The ID of the asset (consists of theme and path) - Id : ThemeAssetId - - /// The updated date (set from the file date from the ZIP archive) - UpdatedOn : Instant - - /// The data for the asset - Data : byte[] - } - -/// Functions to support theme assets -module ThemeAsset = +[] +type ThemeAsset = { + /// The ID of the asset (consists of theme and path) + Id: ThemeAssetId + + /// The updated date (set from the file date from the ZIP archive) + UpdatedOn: Instant + + /// The data for the asset + Data: byte array +} with /// An empty theme asset - let empty = - { Id = ThemeAssetId (ThemeId "", "") - UpdatedOn = Noda.epoch - Data = [||] - } + static member Empty = + { Id = ThemeAssetId.Empty; UpdatedOn = Noda.epoch; Data = [||] } /// An uploaded file -type Upload = - { /// The ID of the upload - Id : UploadId - - /// The ID of the web log to which this upload belongs - WebLogId : WebLogId - - /// The link at which this upload is served - Path : Permalink - - /// The updated date/time for this upload - UpdatedOn : Instant - - /// The data for the upload - Data : byte[] - } - -/// Functions to support uploaded files -module Upload = +[] +type Upload = { + /// The ID of the upload + Id: UploadId + + /// The ID of the web log to which this upload belongs + WebLogId: WebLogId + + /// The link at which this upload is served + Path: Permalink + + /// The updated date/time for this upload + UpdatedOn: Instant + + /// The data for the upload + Data: byte array +} with /// An empty upload - let empty = - { Id = UploadId.empty - WebLogId = WebLogId.empty - Path = Permalink.empty - UpdatedOn = Noda.epoch - Data = [||] - } + static member Empty = + { Id = UploadId.Empty; WebLogId = WebLogId.Empty; Path = Permalink.Empty; UpdatedOn = Noda.epoch; Data = [||] } +open Newtonsoft.Json + /// A web log [] -type WebLog = - { /// The ID of the web log - Id : WebLogId +type WebLog = { + /// The ID of the web log + Id: WebLogId - /// The name of the web log - Name : string + /// The name of the web log + Name: string - /// The slug of the web log - Slug : string - - /// A subtitle for the web log - Subtitle : string option + /// The slug of the web log + Slug: string + + /// A subtitle for the web log + Subtitle: string option - /// The default page ("posts" or a page Id) - DefaultPage : string + /// The default page ("posts" or a page Id) + DefaultPage: string - /// The number of posts to display on pages of posts - PostsPerPage : int + /// The number of posts to display on pages of posts + PostsPerPage: int - /// The ID of the theme (also the path within /themes) - ThemeId : ThemeId + /// The ID of the theme (also the path within /themes) + ThemeId: ThemeId - /// The URL base - UrlBase : string + /// The URL base + UrlBase: string - /// The time zone in which dates/times should be displayed - TimeZone : string - - /// The RSS options for this web log - Rss : RssOptions - - /// Whether to automatically load htmx - AutoHtmx : bool - - /// Where uploads are placed - Uploads : UploadDestination - } + /// The time zone in which dates/times should be displayed + TimeZone: string + + /// The RSS options for this web log + Rss: RssOptions + + /// Whether to automatically load htmx + AutoHtmx: bool + + /// Where uploads are placed + Uploads: UploadDestination -/// Functions to support web logs -module WebLog = + /// Redirect rules for this weblog + RedirectRules: RedirectRule list +} with /// An empty web log - let empty = - { Id = WebLogId.empty - Name = "" - Slug = "" - Subtitle = None - DefaultPage = "" - PostsPerPage = 10 - ThemeId = ThemeId "default" - UrlBase = "" - TimeZone = "" - Rss = RssOptions.empty - AutoHtmx = false - Uploads = Database - } + static member Empty = + { Id = WebLogId.Empty + Name = "" + Slug = "" + Subtitle = None + DefaultPage = "" + PostsPerPage = 10 + ThemeId = ThemeId "default" + UrlBase = "" + TimeZone = "" + Rss = RssOptions.Empty + AutoHtmx = false + Uploads = Database + RedirectRules = [] } - /// Get the host (including scheme) and extra path from the URL base - let hostAndPath webLog = - let scheme = webLog.UrlBase.Split "://" - let host = scheme[1].Split "/" - $"{scheme[0]}://{host[0]}", if host.Length > 1 then $"""/{String.Join ("/", host |> Array.skip 1)}""" else "" + /// Any extra path where this web log is hosted (blank if web log is hosted at the root of the domain) + [] + member this.ExtraPath = + let pathParts = this.UrlBase.Split "://" + if pathParts.Length < 2 then + "" + else + let path = pathParts[1].Split "/" + if path.Length > 1 then $"""/{path |> Array.skip 1 |> String.concat "/"}""" else "" /// Generate an absolute URL for the given link - let absoluteUrl webLog permalink = - $"{webLog.UrlBase}/{Permalink.toString permalink}" - + member this.AbsoluteUrl(permalink: Permalink) = + $"{this.UrlBase}/{permalink}" + /// Generate a relative URL for the given link - let relativeUrl webLog permalink = - let _, leadPath = hostAndPath webLog - $"{leadPath}/{Permalink.toString permalink}" + member this.RelativeUrl(permalink: Permalink) = + $"{this.ExtraPath}/{permalink}" /// Convert an Instant (UTC reference) to the web log's local date/time - let localTime webLog (date : Instant) = - match DateTimeZoneProviders.Tzdb[webLog.TimeZone] with - | null -> date.ToDateTimeUtc () - | tz -> date.InZone(tz).ToDateTimeUnspecified () + member this.LocalTime(date: Instant) = + DateTimeZoneProviders.Tzdb.GetZoneOrNull this.TimeZone + |> Option.ofObj + |> Option.map (fun tz -> date.InZone(tz).ToDateTimeUnspecified()) + |> Option.defaultValue (date.ToDateTimeUtc()) /// A user of the web log [] -type WebLogUser = - { /// The ID of the user - Id : WebLogUserId +type WebLogUser = { + /// The ID of the user + Id: WebLogUserId - /// The ID of the web log to which this user belongs - WebLogId : WebLogId + /// The ID of the web log to which this user belongs + WebLogId: WebLogId - /// The user name (e-mail address) - Email : string + /// The user name (e-mail address) + Email: string - /// The user's first name - FirstName : string + /// The user's first name + FirstName: string - /// The user's last name - LastName : string + /// The user's last name + LastName: string - /// The user's preferred name - PreferredName : string + /// The user's preferred name + PreferredName: string - /// The hash of the user's password - PasswordHash : string + /// The hash of the user's password + PasswordHash: string - /// The URL of the user's personal site - Url : string option + /// The URL of the user's personal site + Url: string option - /// The user's access level - AccessLevel : AccessLevel - - /// When the user was created - CreatedOn : Instant - - /// When the user last logged on - LastSeenOn : Instant option - } - -/// Functions to support web log users -module WebLogUser = + /// The user's access level + AccessLevel: AccessLevel + + /// When the user was created + CreatedOn: Instant + + /// When the user last logged on + LastSeenOn: Instant option +} with /// An empty web log user - let empty = - { Id = WebLogUserId.empty - WebLogId = WebLogId.empty - Email = "" - FirstName = "" - LastName = "" - PreferredName = "" - PasswordHash = "" - Url = None - AccessLevel = Author - CreatedOn = Noda.epoch - LastSeenOn = None - } + static member Empty = + { Id = WebLogUserId.Empty + WebLogId = WebLogId.Empty + Email = "" + FirstName = "" + LastName = "" + PreferredName = "" + PasswordHash = "" + Url = None + AccessLevel = Author + CreatedOn = Noda.epoch + LastSeenOn = None } /// Get the user's displayed name - let displayName user = - let name = - seq { match user.PreferredName with "" -> user.FirstName | n -> n; " "; user.LastName } - |> Seq.reduce (+) - name.Trim () - - /// Does a user have the required access level? - let hasAccess level user = - AccessLevel.hasAccess level user.AccessLevel + [] + member this.DisplayName = + (seq { (match this.PreferredName with "" -> this.FirstName | n -> n); " "; this.LastName } + |> Seq.reduce (+)).Trim() diff --git a/src/MyWebLog.Domain/MyWebLog.Domain.fsproj b/src/MyWebLog.Domain/MyWebLog.Domain.fsproj index 9511caa..fa34b80 100644 --- a/src/MyWebLog.Domain/MyWebLog.Domain.fsproj +++ b/src/MyWebLog.Domain/MyWebLog.Domain.fsproj @@ -7,9 +7,11 @@ - - - + + + + + diff --git a/src/MyWebLog.Domain/SupportTypes.fs b/src/MyWebLog.Domain/SupportTypes.fs index 4753583..2ba8266 100644 --- a/src/MyWebLog.Domain/SupportTypes.fs +++ b/src/MyWebLog.Domain/SupportTypes.fs @@ -1,42 +1,48 @@ namespace MyWebLog open System +open Markdig open NodaTime /// Support functions for domain definition [] module private Helpers = + open Markdown.ColorCode + /// Create a new ID (short GUID) // https://www.madskristensen.net/blog/A-shorter-and-URL-friendly-GUID let newId () = - Convert.ToBase64String(Guid.NewGuid().ToByteArray ()).Replace('/', '_').Replace('+', '-').Substring (0, 22) + Convert.ToBase64String(Guid.NewGuid().ToByteArray()).Replace('/', '_').Replace('+', '-')[..21] + + /// Pipeline with most extensions enabled + let markdownPipeline = MarkdownPipelineBuilder().UseSmartyPants().UseAdvancedExtensions().UseColorCode().Build() /// Functions to support NodaTime manipulation module Noda = /// The clock to use when getting "now" (will make mutable for testing) - let clock : IClock = SystemClock.Instance + let clock: IClock = SystemClock.Instance /// The Unix epoch let epoch = Instant.FromUnixTimeSeconds 0L - /// Truncate an instant to remove fractional seconds - let toSecondsPrecision (value : Instant) = - Instant.FromUnixTimeSeconds (value.ToUnixTimeSeconds ()) + let toSecondsPrecision (value: Instant) = + Instant.FromUnixTimeSeconds(value.ToUnixTimeSeconds()) /// The current Instant, with fractional seconds truncated - let now () = - toSecondsPrecision (clock.GetCurrentInstant ()) + let now = + clock.GetCurrentInstant >> toSecondsPrecision /// Convert a date/time to an Instant with whole seconds - let fromDateTime (dt : DateTime) = - toSecondsPrecision (Instant.FromDateTimeUtc (DateTime (dt.Ticks, DateTimeKind.Utc))) + let fromDateTime (dt: DateTime) = + Instant.FromDateTimeUtc(DateTime(dt.Ticks, DateTimeKind.Utc)) |> toSecondsPrecision /// A user's access level +[] type AccessLevel = /// The user may create and publish posts and edit the ones they have created | Author @@ -46,74 +52,71 @@ type AccessLevel = | WebLogAdmin /// The user may manage themes (which affects all web logs for an installation) | Administrator - -/// Functions to support access levels -module AccessLevel = - - /// Weightings for access levels - let private weights = - [ Author, 10 - Editor, 20 - WebLogAdmin, 30 - Administrator, 40 - ] - |> Map.ofList - - /// Convert an access level to its string representation - let toString = - function - | Author -> "Author" - | Editor -> "Editor" - | WebLogAdmin -> "WebLogAdmin" - | Administrator -> "Administrator" /// Parse an access level from its string representation - let parse it = - match it with - | "Author" -> Author - | "Editor" -> Editor - | "WebLogAdmin" -> WebLogAdmin + static member Parse level = + match level with + | "Author" -> Author + | "Editor" -> Editor + | "WebLogAdmin" -> WebLogAdmin | "Administrator" -> Administrator - | _ -> invalidOp $"{it} is not a valid access level" + | _ -> invalidArg (nameof level) $"{level} is not a valid access level" + + /// The string representation of this access level + override this.ToString() = + match this with + | Author -> "Author" + | Editor -> "Editor" + | WebLogAdmin -> "WebLogAdmin" + | Administrator -> "Administrator" /// Does a given access level allow an action that requires a certain access level? - let hasAccess needed held = - weights[needed] <= weights[held] + member this.HasAccess(needed: AccessLevel) = + let weights = + [ Author, 10 + Editor, 20 + WebLogAdmin, 30 + Administrator, 40 ] + |> Map.ofList + weights[needed] <= weights[this] /// An identifier for a category -type CategoryId = CategoryId of string - -/// Functions to support category IDs -module CategoryId = +[] +type CategoryId = + | CategoryId of string /// An empty category ID - let empty = CategoryId "" - - /// Convert a category ID to a string - let toString = function CategoryId ci -> ci + static member Empty = CategoryId "" /// Create a new category ID - let create () = CategoryId (newId ()) + static member Create = + newId >> CategoryId + + /// The string representation of this category ID + override this.ToString() = + match this with CategoryId it -> it /// An identifier for a comment -type CommentId = CommentId of string - -/// Functions to support comment IDs -module CommentId = +[] +type CommentId = + | CommentId of string /// An empty comment ID - let empty = CommentId "" - - /// Convert a comment ID to a string - let toString = function CommentId ci -> ci + static member Empty = CommentId "" /// Create a new comment ID - let create () = CommentId (newId ()) + static member Create = + newId >> CommentId + + /// The string representation of this comment ID + override this.ToString() = + match this with CommentId it -> it /// Statuses for post comments +[] type CommentStatus = /// The comment is approved | Approved @@ -122,133 +125,176 @@ type CommentStatus = /// The comment was unsolicited and unwelcome | Spam -/// Functions to support post comment statuses -module CommentStatus = - - /// Convert a comment status to a string - let toString = function Approved -> "Approved" | Pending -> "Pending" | Spam -> "Spam" - /// Parse a string into a comment status - let parse value = - match value with + static member Parse status = + match status with | "Approved" -> Approved - | "Pending" -> Pending - | "Spam" -> Spam - | it -> invalidOp $"{it} is not a valid post status" + | "Pending" -> Pending + | "Spam" -> Spam + | _ -> invalidArg (nameof status) $"{status} is not a valid comment status" + + /// Convert a comment status to a string + override this.ToString() = + match this with Approved -> "Approved" | Pending -> "Pending" | Spam -> "Spam" /// Valid values for the iTunes explicit rating +[] type ExplicitRating = | Yes | No | Clean - -/// Functions to support iTunes explicit ratings -module ExplicitRating = - /// Convert an explicit rating to a string - let toString : ExplicitRating -> string = - function - | Yes -> "yes" - | No -> "no" - | Clean -> "clean" /// Parse a string into an explicit rating - let parse : string -> ExplicitRating = - function - | "yes" -> Yes - | "no" -> No + static member Parse rating = + match rating with + | "yes" -> Yes + | "no" -> No | "clean" -> Clean - | x -> raise (invalidArg "rating" $"{x} is not a valid explicit rating") + | _ -> invalidArg (nameof rating) $"{rating} is not a valid explicit rating" + + /// The string value of this rating + override this.ToString() = + match this with Yes -> "yes" | No -> "no" | Clean -> "clean" + + +/// A location (specified by Podcast Index) +type Location = { + /// The name of the location (free-form text) + Name: string + + /// A geographic coordinate string (RFC 5870) + Geo: string + + /// An OpenStreetMap query + Osm: string option +} + + +/// A chapter in a podcast episode +type Chapter = { + /// The start time for the chapter + StartTime: Duration + + /// The title for this chapter + Title: string option + + /// A URL for an image for this chapter + ImageUrl: string option + + /// A URL with information pertaining to this chapter + Url: string option + + /// Whether this chapter is hidden + IsHidden: bool option + + /// The episode end time for the chapter + EndTime: Duration option + + /// A location that applies to a chapter + Location: Location option +} with + + /// An empty chapter + static member Empty = + { StartTime = Duration.Zero + Title = None + ImageUrl = None + Url = None + IsHidden = None + EndTime = None + Location = None } open NodaTime.Text /// A podcast episode -type Episode = - { /// The URL to the media file for the episode (may be permalink) - Media : string - - /// The length of the media file, in bytes - Length : int64 - - /// The duration of the episode - Duration : Duration option - - /// The media type of the file (overrides podcast default if present) - MediaType : string option - - /// The URL to the image file for this episode (overrides podcast image if present, may be permalink) - ImageUrl : string option - - /// A subtitle for this episode - Subtitle : string option - - /// This episode's explicit rating (overrides podcast rating if present) - Explicit : ExplicitRating option - - /// A link to a chapter file - ChapterFile : string option - - /// The MIME type for the chapter file - ChapterType : string option - - /// The URL for the transcript of the episode (may be permalink) - TranscriptUrl : string option - - /// The MIME type of the transcript - TranscriptType : string option - - /// The language in which the transcript is written - TranscriptLang : string option - - /// If true, the transcript will be declared (in the feed) to be a captions file - TranscriptCaptions : bool option - - /// The season number (for serialized podcasts) - SeasonNumber : int option - - /// A description of the season - SeasonDescription : string option - - /// The episode number - EpisodeNumber : double option - - /// A description of the episode - EpisodeDescription : string option - } +type Episode = { + /// The URL to the media file for the episode (may be permalink) + Media: string + + /// The length of the media file, in bytes + Length: int64 + + /// The duration of the episode + Duration: Duration option + + /// The media type of the file (overrides podcast default if present) + MediaType: string option + + /// The URL to the image file for this episode (overrides podcast image if present, may be permalink) + ImageUrl: string option + + /// A subtitle for this episode + Subtitle: string option + + /// This episode's explicit rating (overrides podcast rating if present) + Explicit: ExplicitRating option + + /// Chapters for this episode + Chapters: Chapter list option -/// Functions to support episodes -module Episode = + /// A link to a chapter file + ChapterFile: string option + + /// The MIME type for the chapter file + ChapterType: string option + + /// Whether the chapters have locations that should be displayed as waypoints + ChapterWaypoints: bool option + + /// The URL for the transcript of the episode (may be permalink) + TranscriptUrl: string option + + /// The MIME type of the transcript + TranscriptType: string option + + /// The language in which the transcript is written + TranscriptLang: string option + + /// If true, the transcript will be declared (in the feed) to be a captions file + TranscriptCaptions: bool option + + /// The season number (for serialized podcasts) + SeasonNumber: int option + + /// A description of the season + SeasonDescription: string option + + /// The episode number + EpisodeNumber: double option + + /// A description of the episode + EpisodeDescription: string option +} with /// An empty episode - let empty = - { Media = "" - Length = 0L - Duration = None - MediaType = None - ImageUrl = None - Subtitle = None - Explicit = None - ChapterFile = None - ChapterType = None - TranscriptUrl = None - TranscriptType = None - TranscriptLang = None - TranscriptCaptions = None - SeasonNumber = None - SeasonDescription = None - EpisodeNumber = None - EpisodeDescription = None - } + static member Empty = + { Media = "" + Length = 0L + Duration = None + MediaType = None + ImageUrl = None + Subtitle = None + Explicit = None + Chapters = None + ChapterFile = None + ChapterType = None + ChapterWaypoints = None + TranscriptUrl = None + TranscriptType = None + TranscriptLang = None + TranscriptCaptions = None + SeasonNumber = None + SeasonDescription = None + EpisodeNumber = None + EpisodeDescription = None } /// Format a duration for an episode - let formatDuration ep = - ep.Duration |> Option.map (DurationPattern.CreateWithInvariantCulture("H:mm:ss").Format) + member this.FormatDuration() = + this.Duration |> Option.map (DurationPattern.CreateWithInvariantCulture("H:mm:ss").Format) -open Markdig -open Markdown.ColorCode - /// Types of markup text type MarkupText = /// Markdown text @@ -256,99 +302,92 @@ type MarkupText = /// HTML text | Html of string -/// Functions to support markup text -module MarkupText = - - /// Pipeline with most extensions enabled - let private _pipeline = MarkdownPipelineBuilder().UseSmartyPants().UseAdvancedExtensions().UseColorCode().Build () - - /// Get the source type for the markup text - let sourceType = function Markdown _ -> "Markdown" | Html _ -> "HTML" - - /// Get the raw text, regardless of type - let text = function Markdown text -> text | Html text -> text - - /// Get the string representation of the markup text - let toString it = $"{sourceType it}: {text it}" - - /// Get the HTML representation of the markup text - let toHtml = function Markdown text -> Markdown.ToHtml (text, _pipeline) | Html text -> text - /// Parse a string into a MarkupText instance - let parse (it : string) = - match it with - | text when text.StartsWith "Markdown: " -> Markdown (text.Substring 10) - | text when text.StartsWith "HTML: " -> Html (text.Substring 6) - | text -> invalidOp $"Cannot derive type of text ({text})" + static member Parse(text: string) = + match text with + | _ when text.StartsWith "Markdown: " -> Markdown text[10..] + | _ when text.StartsWith "HTML: " -> Html text[6..] + | _ -> invalidArg (nameof text) $"Cannot derive type of text ({text})" + + /// The source type for the markup text + member this.SourceType = + match this with Markdown _ -> "Markdown" | Html _ -> "HTML" + + /// The raw text, regardless of type + member this.Text = + match this with Markdown text -> text | Html text -> text + + /// The string representation of the markup text + override this.ToString() = + $"{this.SourceType}: {this.Text}" + + /// The HTML representation of the markup text + member this.AsHtml() = + match this with Markdown text -> Markdown.ToHtml(text, markdownPipeline) | Html text -> text /// An item of metadata -[] -type MetaItem = - { /// The name of the metadata value - Name : string - - /// The metadata value - Value : string - } - -/// Functions to support metadata items -module MetaItem = - +[] +type MetaItem = { + /// The name of the metadata value + Name: string + + /// The metadata value + Value: string +} with + /// An empty metadata item - let empty = + static member Empty = { Name = ""; Value = "" } + /// A revision of a page or post -[] -type Revision = - { /// When this revision was saved - AsOf : Instant +[] +type Revision = { + /// When this revision was saved + AsOf: Instant - /// The text of the revision - Text : MarkupText - } - -/// Functions to support revisions -module Revision = + /// The text of the revision + Text: MarkupText +} with /// An empty revision - let empty = - { AsOf = Noda.epoch - Text = Html "" - } + static member Empty = + { AsOf = Noda.epoch; Text = Html "" } /// A permanent link -type Permalink = Permalink of string +[] +type Permalink = + | Permalink of string -/// Functions to support permalinks -module Permalink = - /// An empty permalink - let empty = Permalink "" - - /// Convert a permalink to a string - let toString = function Permalink p -> p + static member Empty = Permalink "" + + /// The string value of this permalink + override this.ToString() = + match this with Permalink it -> it /// An identifier for a page -type PageId = PageId of string +[] +type PageId = + | PageId of string -/// Functions to support page IDs -module PageId = - /// An empty page ID - let empty = PageId "" - - /// Convert a page ID to a string - let toString = function PageId pi -> pi + static member Empty = PageId "" /// Create a new page ID - let create () = PageId (newId ()) + static member Create = + newId >> PageId + + /// The string value of this page ID + override this.ToString() = + match this with PageId it -> it /// PodcastIndex.org podcast:medium allowed values +[] type PodcastMedium = | Podcast | Music @@ -358,84 +397,100 @@ type PodcastMedium = | Newsletter | Blog -/// Functions to support podcast medium -module PodcastMedium = - - /// Convert a podcast medium to a string - let toString = - function - | Podcast -> "podcast" - | Music -> "music" - | Video -> "video" - | Film -> "film" - | Audiobook -> "audiobook" - | Newsletter -> "newsletter" - | Blog -> "blog" - /// Parse a string into a podcast medium - let parse value = - match value with - | "podcast" -> Podcast - | "music" -> Music - | "video" -> Video - | "film" -> Film - | "audiobook" -> Audiobook + static member Parse medium = + match medium with + | "podcast" -> Podcast + | "music" -> Music + | "video" -> Video + | "film" -> Film + | "audiobook" -> Audiobook | "newsletter" -> Newsletter - | "blog" -> Blog - | it -> invalidOp $"{it} is not a valid podcast medium" + | "blog" -> Blog + | _ -> invalidArg (nameof medium) $"{medium} is not a valid podcast medium" + + /// The string value of this podcast medium + override this.ToString() = + match this with + | Podcast -> "podcast" + | Music -> "music" + | Video -> "video" + | Film -> "film" + | Audiobook -> "audiobook" + | Newsletter -> "newsletter" + | Blog -> "blog" /// Statuses for posts +[] type PostStatus = /// The post should not be publicly available | Draft /// The post is publicly viewable | Published -/// Functions to support post statuses -module PostStatus = - - /// Convert a post status to a string - let toString = function Draft -> "Draft" | Published -> "Published" - /// Parse a string into a post status - let parse value = - match value with + static member Parse status = + match status with | "Draft" -> Draft | "Published" -> Published - | it -> invalidOp $"{it} is not a valid post status" + | _ -> invalidArg (nameof status) $"{status} is not a valid post status" + + /// The string representation of this post status + override this.ToString() = + match this with Draft -> "Draft" | Published -> "Published" /// An identifier for a post -type PostId = PostId of string +[] +type PostId = + | PostId of string -/// Functions to support post IDs -module PostId = - /// An empty post ID - let empty = PostId "" - - /// Convert a post ID to a string - let toString = function PostId pi -> pi + static member Empty = PostId "" /// Create a new post ID - let create () = PostId (newId ()) + static member Create = + newId >> PostId + + /// Convert a post ID to a string + override this.ToString() = + match this with PostId it -> it + + +/// A redirection for a previously valid URL +[] +type RedirectRule = { + /// The From string or pattern + From: string + + /// The To string or pattern + To: string + + /// Whether to use regular expressions on this rule + IsRegex: bool +} with + + /// An empty redirect rule + static member Empty = + { From = ""; To = ""; IsRegex = false } /// An identifier for a custom feed -type CustomFeedId = CustomFeedId of string +[] +type CustomFeedId = + | CustomFeedId of string -/// Functions to support custom feed IDs -module CustomFeedId = - /// An empty custom feed ID - let empty = CustomFeedId "" - - /// Convert a custom feed ID to a string - let toString = function CustomFeedId pi -> pi + static member Empty = CustomFeedId "" /// Create a new custom feed ID - let create () = CustomFeedId (newId ()) + static member Create = + newId >> CustomFeedId + + /// Convert a custom feed ID to a string + override this.ToString() = + match this with CustomFeedId it -> it /// The source for a custom feed @@ -444,266 +499,282 @@ type CustomFeedSource = | Category of CategoryId /// A feed based on a particular tag | Tag of string - -/// Functions to support feed sources -module CustomFeedSource = - /// Create a string version of a feed source - let toString : CustomFeedSource -> string = - function - | Category (CategoryId catId) -> $"category:{catId}" - | Tag tag -> $"tag:{tag}" /// Parse a feed source from its string version - let parse : string -> CustomFeedSource = + static member Parse(source: string) = let value (it : string) = it.Split(":").[1] - function - | source when source.StartsWith "category:" -> (value >> CategoryId >> Category) source - | source when source.StartsWith "tag:" -> (value >> Tag) source - | source -> invalidArg "feedSource" $"{source} is not a valid feed source" + match source with + | _ when source.StartsWith "category:" -> (value >> CategoryId >> Category) source + | _ when source.StartsWith "tag:" -> (value >> Tag) source + | _ -> invalidArg (nameof source) $"{source} is not a valid feed source" + + /// Create a string version of a feed source + override this.ToString() = + match this with | Category (CategoryId catId) -> $"category:{catId}" | Tag tag -> $"tag:{tag}" /// Options for a feed that describes a podcast -type PodcastOptions = - { /// The title of the podcast - Title : string - - /// A subtitle for the podcast - Subtitle : string option - - /// The number of items in the podcast feed - ItemsInFeed : int - - /// A summary of the podcast (iTunes field) - Summary : string - - /// The display name of the podcast author (iTunes field) - DisplayedAuthor : string - - /// The e-mail address of the user who registered the podcast at iTunes - Email : string - - /// The link to the image for the podcast - ImageUrl : Permalink - - /// The category from Apple Podcasts (iTunes) under which this podcast is categorized - AppleCategory : string - - /// A further refinement of the categorization of this podcast (Apple Podcasts/iTunes field / values) - AppleSubcategory : string option - - /// The explictness rating (iTunes field) - Explicit : ExplicitRating - - /// The default media type for files in this podcast - DefaultMediaType : string option - - /// The base URL for relative URL media files for this podcast (optional; defaults to web log base) - MediaBaseUrl : string option - - /// A GUID for this podcast - PodcastGuid : Guid option - - /// A URL at which information on supporting the podcast may be found (supports permalinks) - FundingUrl : string option - - /// The text to be displayed in the funding item within the feed - FundingText : string option - - /// The medium (what the podcast IS, not what it is ABOUT) - Medium : PodcastMedium option - } +[] +type PodcastOptions = { + /// The title of the podcast + Title: string + + /// A subtitle for the podcast + Subtitle: string option + + /// The number of items in the podcast feed + ItemsInFeed: int + + /// A summary of the podcast (iTunes field) + Summary: string + + /// The display name of the podcast author (iTunes field) + DisplayedAuthor: string + + /// The e-mail address of the user who registered the podcast at iTunes + Email: string + + /// The link to the image for the podcast + ImageUrl: Permalink + + /// The category from Apple Podcasts (iTunes) under which this podcast is categorized + AppleCategory: string + + /// A further refinement of the categorization of this podcast (Apple Podcasts/iTunes field / values) + AppleSubcategory: string option + + /// The explictness rating (iTunes field) + Explicit: ExplicitRating + + /// The default media type for files in this podcast + DefaultMediaType: string option + + /// The base URL for relative URL media files for this podcast (optional; defaults to web log base) + MediaBaseUrl: string option + + /// A GUID for this podcast + PodcastGuid: Guid option + + /// A URL at which information on supporting the podcast may be found (supports permalinks) + FundingUrl: string option + + /// The text to be displayed in the funding item within the feed + FundingText: string option + + /// The medium (what the podcast IS, not what it is ABOUT) + Medium: PodcastMedium option +} with + + /// A default set of podcast options + static member Empty = + { Title = "" + Subtitle = None + ItemsInFeed = 0 + Summary = "" + DisplayedAuthor = "" + Email = "" + ImageUrl = Permalink.Empty + AppleCategory = "" + AppleSubcategory = None + Explicit = No + DefaultMediaType = None + MediaBaseUrl = None + PodcastGuid = None + FundingUrl = None + FundingText = None + Medium = None } /// A custom feed -type CustomFeed = - { /// The ID of the custom feed - Id : CustomFeedId - - /// The source for the custom feed - Source : CustomFeedSource - - /// The path for the custom feed - Path : Permalink - - /// Podcast options, if the feed defines a podcast - Podcast : PodcastOptions option - } - -/// Functions to support custom feeds -module CustomFeed = +[] +type CustomFeed = { + /// The ID of the custom feed + Id: CustomFeedId + + /// The source for the custom feed + Source: CustomFeedSource + + /// The path for the custom feed + Path: Permalink + + /// Podcast options, if the feed defines a podcast + Podcast: PodcastOptions option +} with /// An empty custom feed - let empty = - { Id = CustomFeedId "" - Source = Category (CategoryId "") - Path = Permalink "" - Podcast = None - } + static member Empty = + { Id = CustomFeedId.Empty + Source = Category CategoryId.Empty + Path = Permalink.Empty + Podcast = None } /// Really Simple Syndication (RSS) options for this web log [] -type RssOptions = - { /// Whether the site feed of posts is enabled - IsFeedEnabled : bool - - /// The name of the file generated for the site feed - FeedName : string - - /// Override the "posts per page" setting for the site feed - ItemsInFeed : int option - - /// Whether feeds are enabled for all categories - IsCategoryEnabled : bool - - /// Whether feeds are enabled for all tags - IsTagEnabled : bool - - /// A copyright string to be placed in all feeds - Copyright : string option - - /// Custom feeds for this web log - CustomFeeds: CustomFeed list - } - -/// Functions to support RSS options -module RssOptions = +type RssOptions = { + /// Whether the site feed of posts is enabled + IsFeedEnabled: bool + + /// The name of the file generated for the site feed + FeedName: string + + /// Override the "posts per page" setting for the site feed + ItemsInFeed: int option + + /// Whether feeds are enabled for all categories + IsCategoryEnabled: bool + + /// Whether feeds are enabled for all tags + IsTagEnabled: bool + + /// A copyright string to be placed in all feeds + Copyright: string option + + /// Custom feeds for this web log + CustomFeeds: CustomFeed list +} with /// An empty set of RSS options - let empty = - { IsFeedEnabled = true - FeedName = "feed.xml" - ItemsInFeed = None - IsCategoryEnabled = true - IsTagEnabled = true - Copyright = None - CustomFeeds = [] - } + static member Empty = + { IsFeedEnabled = true + FeedName = "feed.xml" + ItemsInFeed = None + IsCategoryEnabled = true + IsTagEnabled = true + Copyright = None + CustomFeeds = [] } /// An identifier for a tag mapping -type TagMapId = TagMapId of string +[] +type TagMapId = + | TagMapId of string -/// Functions to support tag mapping IDs -module TagMapId = - /// An empty tag mapping ID - let empty = TagMapId "" - - /// Convert a tag mapping ID to a string - let toString = function TagMapId tmi -> tmi + static member Empty = TagMapId "" /// Create a new tag mapping ID - let create () = TagMapId (newId ()) + static member Create = + newId >> TagMapId + + /// Convert a tag mapping ID to a string + override this.ToString() = + match this with TagMapId it -> it /// An identifier for a theme (represents its path) -type ThemeId = ThemeId of string - -/// Functions to support theme IDs -module ThemeId = - let toString = function ThemeId ti -> ti +[] +type ThemeId = + | ThemeId of string + + /// An empty theme ID + static member Empty = ThemeId "" + + /// The string representation of a theme ID + override this.ToString() = + match this with ThemeId it -> it /// An identifier for a theme asset -type ThemeAssetId = ThemeAssetId of ThemeId * string +[] +type ThemeAssetId = + | ThemeAssetId of ThemeId * string -/// Functions to support theme asset IDs -module ThemeAssetId = - - /// Convert a theme asset ID into a path string - let toString = function ThemeAssetId (ThemeId theme, asset) -> $"{theme}/{asset}" + /// An empty theme asset ID + static member Empty = ThemeAssetId(ThemeId.Empty, "") /// Convert a string into a theme asset ID - let ofString (it : string) = + static member Parse(it : string) = let themeIdx = it.IndexOf "/" - ThemeAssetId (ThemeId it[..(themeIdx - 1)], it[(themeIdx + 1)..]) + if themeIdx < 0 then + invalidArg "id" $"Invalid format; expected [theme_id]/[asset_id] (received {it})" + else + ThemeAssetId(ThemeId it[..(themeIdx - 1)], it[(themeIdx + 1)..]) + + /// Convert a theme asset ID into a path string + override this.ToString() = + match this with ThemeAssetId (ThemeId theme, asset) -> $"{theme}/{asset}" /// A template for a theme -type ThemeTemplate = - { /// The name of the template - Name : string - - /// The text of the template - Text : string - } - -/// Functions to support theme templates -module ThemeTemplate = +[] +type ThemeTemplate = { + /// The name of the template + Name: string + + /// The text of the template + Text: string +} with /// An empty theme template - let empty = - { Name = "" - Text = "" - } + static member Empty = + { Name = ""; Text = "" } /// Where uploads should be placed +[] type UploadDestination = | Database | Disk -/// Functions to support upload destinations -module UploadDestination = - - /// Convert an upload destination to its string representation - let toString = function Database -> "Database" | Disk -> "Disk" - /// Parse an upload destination from its string representation - let parse value = - match value with + static member Parse destination = + match destination with | "Database" -> Database - | "Disk" -> Disk - | it -> invalidOp $"{it} is not a valid upload destination" + | "Disk" -> Disk + | _ -> invalidArg (nameof destination) $"{destination} is not a valid upload destination" + + /// The string representation of an upload destination + override this.ToString() = + match this with Database -> "Database" | Disk -> "Disk" /// An identifier for an upload -type UploadId = UploadId of string +[] +type UploadId = + | UploadId of string -/// Functions to support upload IDs -module UploadId = - /// An empty upload ID - let empty = UploadId "" - - /// Convert an upload ID to a string - let toString = function UploadId ui -> ui + static member Empty = UploadId "" /// Create a new upload ID - let create () = UploadId (newId ()) + static member Create = + newId >> UploadId + + /// The string representation of an upload ID + override this.ToString() = + match this with UploadId it -> it /// An identifier for a web log -type WebLogId = WebLogId of string +[] +type WebLogId = + | WebLogId of string -/// Functions to support web log IDs -module WebLogId = - /// An empty web log ID - let empty = WebLogId "" - - /// Convert a web log ID to a string - let toString = function WebLogId wli -> wli + static member Empty = WebLogId "" /// Create a new web log ID - let create () = WebLogId (newId ()) - + static member Create = + newId >> WebLogId + + /// Convert a web log ID to a string + override this.ToString() = + match this with WebLogId it -> it /// An identifier for a web log user -type WebLogUserId = WebLogUserId of string - -/// Functions to support web log user IDs -module WebLogUserId = +[] +type WebLogUserId = + | WebLogUserId of string /// An empty web log user ID - let empty = WebLogUserId "" - - /// Convert a web log user ID to a string - let toString = function WebLogUserId wli -> wli + static member Empty = WebLogUserId "" /// Create a new web log user ID - let create () = WebLogUserId (newId ()) - - + static member Create = + newId >> WebLogUserId + + /// The string representation of a web log user ID + override this.ToString() = + match this with WebLogUserId it -> it diff --git a/src/MyWebLog.Domain/ViewModels.fs b/src/MyWebLog.Domain/ViewModels.fs index f7d204f..d52cd10 100644 --- a/src/MyWebLog.Domain/ViewModels.fs +++ b/src/MyWebLog.Domain/ViewModels.fs @@ -3,14 +3,15 @@ open System open MyWebLog open NodaTime +open NodaTime.Text /// Helper functions for view models [] module private Helpers = /// Create a string option if a string is blank - let noneIfBlank (it : string) = - match (defaultArg (Option.ofObj it) "").Trim () with "" -> None | trimmed -> Some trimmed + let noneIfBlank it = + match (defaultArg (Option.ofObj it) "").Trim() with "" -> None | trimmed -> Some trimmed /// Helper functions that are needed outside this file @@ -19,170 +20,111 @@ module PublicHelpers = /// If the web log is not being served from the domain root, add the path information to relative URLs in page and /// post text - let addBaseToRelativeUrls extra (text : string) = + let addBaseToRelativeUrls extra (text: string) = if extra = "" then text - else text.Replace("href=\"/", $"href=\"{extra}/").Replace ("src=\"/", $"src=\"{extra}/") + else + text.Replace("href=\"/", $"href=\"{extra}/").Replace("href=/", $"href={extra}/") + .Replace("src=\"/", $"src=\"{extra}/").Replace("src=/", $"src={extra}/") /// The model used to display the admin dashboard [] -type DashboardModel = - { /// The number of published posts - Posts : int +type DashboardModel = { + /// The number of published posts + Posts: int - /// The number of post drafts - Drafts : int + /// The number of post drafts + Drafts: int - /// The number of pages - Pages : int + /// The number of pages + Pages: int - /// The number of pages in the page list - ListedPages : int + /// The number of pages in the page list + ListedPages: int - /// The number of categories - Categories : int + /// The number of categories + Categories: int - /// The top-level categories - TopLevelCategories : int - } + /// The top-level categories + TopLevelCategories: int +} /// Details about a category, used to display category lists [] -type DisplayCategory = - { /// The ID of the category - Id : string - - /// The slug for the category - Slug : string - - /// The name of the category - Name : string - - /// A description of the category - Description : string option - - /// The parent category names for this (sub)category - ParentNames : string[] - - /// The number of posts in this category - PostCount : int - } - - -/// A display version of a custom feed definition -type DisplayCustomFeed = - { /// The ID of the custom feed - Id : string - - /// The source of the custom feed - Source : string - - /// The relative path at which the custom feed is served - Path : string - - /// Whether this custom feed is for a podcast - IsPodcast : bool - } +type DisplayCategory = { + /// The ID of the category + Id: string - /// Create a display version from a custom feed - static member fromFeed (cats : DisplayCategory[]) (feed : CustomFeed) : DisplayCustomFeed = - let source = - match feed.Source with - | Category (CategoryId catId) -> $"Category: {(cats |> Array.find (fun cat -> cat.Id = catId)).Name}" - | Tag tag -> $"Tag: {tag}" - { Id = CustomFeedId.toString feed.Id - Source = source - Path = Permalink.toString feed.Path - IsPodcast = Option.isSome feed.Podcast - } + /// The slug for the category + Slug: string + + /// The name of the category + Name: string + + /// A description of the category + Description: string option + + /// The parent category names for this (sub)category + ParentNames: string array + + /// The number of posts in this category + PostCount: int +} /// Details about a page used to display page lists [] -type DisplayPage = - { /// The ID of this page - Id : string +type DisplayPage = { + /// The ID of this page + Id: string - /// The ID of the author of this page - AuthorId : string - - /// The title of the page - Title : string + /// The ID of the author of this page + AuthorId: string + + /// The title of the page + Title: string - /// The link at which this page is displayed - Permalink : string + /// The link at which this page is displayed + Permalink: string - /// When this page was published - PublishedOn : DateTime + /// When this page was published + PublishedOn: DateTime - /// When this page was last updated - UpdatedOn : DateTime + /// When this page was last updated + UpdatedOn: DateTime - /// Whether this page shows as part of the web log's navigation - IsInPageList : bool - - /// Is this the default page? - IsDefault : bool - - /// The text of the page - Text : string - - /// The metadata for the page - Metadata : MetaItem list - } + /// Whether this page shows as part of the web log's navigation + IsInPageList: bool + + /// Is this the default page? + IsDefault: bool + + /// The text of the page + Text: string + + /// The metadata for the page + Metadata: MetaItem list +} with /// Create a minimal display page (no text or metadata) from a database page - static member fromPageMinimal webLog (page : Page) = - let pageId = PageId.toString page.Id - { Id = pageId - AuthorId = WebLogUserId.toString page.AuthorId - Title = page.Title - Permalink = Permalink.toString page.Permalink - PublishedOn = WebLog.localTime webLog page.PublishedOn - UpdatedOn = WebLog.localTime webLog page.UpdatedOn - IsInPageList = page.IsInPageList - IsDefault = pageId = webLog.DefaultPage - Text = "" - Metadata = [] - } + static member FromPageMinimal (webLog: WebLog) (page: Page) = + { Id = string page.Id + AuthorId = string page.AuthorId + Title = page.Title + Permalink = string page.Permalink + PublishedOn = webLog.LocalTime page.PublishedOn + UpdatedOn = webLog.LocalTime page.UpdatedOn + IsInPageList = page.IsInPageList + IsDefault = string page.Id = webLog.DefaultPage + Text = "" + Metadata = [] } /// Create a display page from a database page - static member fromPage webLog (page : Page) = - let _, extra = WebLog.hostAndPath webLog - let pageId = PageId.toString page.Id - { Id = pageId - AuthorId = WebLogUserId.toString page.AuthorId - Title = page.Title - Permalink = Permalink.toString page.Permalink - PublishedOn = WebLog.localTime webLog page.PublishedOn - UpdatedOn = WebLog.localTime webLog page.UpdatedOn - IsInPageList = page.IsInPageList - IsDefault = pageId = webLog.DefaultPage - Text = addBaseToRelativeUrls extra page.Text - Metadata = page.Metadata - } - - -/// Information about a revision used for display -[] -type DisplayRevision = - { /// The as-of date/time for the revision - AsOf : DateTime - - /// The as-of date/time for the revision in the web log's local time zone - AsOfLocal : DateTime - - /// The format of the text of the revision - Format : string - } -with - - /// Create a display revision from an actual revision - static member fromRevision webLog (rev : Revision) = - { AsOf = rev.AsOf.ToDateTimeUtc () - AsOfLocal = WebLog.localTime webLog rev.AsOf - Format = MarkupText.sourceType rev.Text + static member FromPage webLog page = + { DisplayPage.FromPageMinimal webLog page with + Text = addBaseToRelativeUrls webLog.ExtraPath page.Text + Metadata = page.Metadata } @@ -190,247 +132,350 @@ open System.IO /// Information about a theme used for display [] -type DisplayTheme = - { /// The ID / path slug of the theme - Id : string - - /// The name of the theme - Name : string - - /// The version of the theme - Version : string - - /// How many templates are contained in the theme - TemplateCount : int - - /// Whether the theme is in use by any web logs - IsInUse : bool - - /// Whether the theme .zip file exists on the filesystem - IsOnDisk : bool - } -with +type DisplayTheme = { + /// The ID / path slug of the theme + Id: string + + /// The name of the theme + Name: string + + /// The version of the theme + Version: string + + /// How many templates are contained in the theme + TemplateCount: int + + /// Whether the theme is in use by any web logs + IsInUse: bool + + /// Whether the theme .zip file exists on the filesystem + IsOnDisk: bool +} with /// Create a display theme from a theme - static member fromTheme inUseFunc (theme : Theme) = - { Id = ThemeId.toString theme.Id - Name = theme.Name - Version = theme.Version - TemplateCount = List.length theme.Templates - IsInUse = inUseFunc theme.Id - IsOnDisk = File.Exists $"{ThemeId.toString theme.Id}-theme.zip" - } + static member FromTheme inUseFunc (theme: Theme) = + let fileName = if string theme.Id = "default" then "default-theme.zip" else $"./themes/{theme.Id}-theme.zip" + { Id = string theme.Id + Name = theme.Name + Version = theme.Version + TemplateCount = List.length theme.Templates + IsInUse = inUseFunc theme.Id + IsOnDisk = File.Exists fileName } /// Information about an uploaded file used for display [] -type DisplayUpload = - { /// The ID of the uploaded file - Id : string - - /// The name of the uploaded file - Name : string - - /// The path at which the file is served - Path : string - - /// The date/time the file was updated - UpdatedOn : DateTime option - - /// The source for this file (created from UploadDestination DU) - Source : string - } +type DisplayUpload = { + /// The ID of the uploaded file + Id: string + + /// The name of the uploaded file + Name: string + + /// The path at which the file is served + Path: string + + /// The date/time the file was updated + UpdatedOn: DateTime option + + /// The source for this file (created from UploadDestination DU) + Source: string +} with /// Create a display uploaded file - static member fromUpload webLog source (upload : Upload) = - let path = Permalink.toString upload.Path + static member FromUpload (webLog: WebLog) (source: UploadDestination) (upload: Upload) = + let path = string upload.Path let name = Path.GetFileName path - { Id = UploadId.toString upload.Id - Name = name - Path = path.Replace (name, "") - UpdatedOn = Some (WebLog.localTime webLog upload.UpdatedOn) - Source = UploadDestination.toString source - } - - -/// View model to display a user's information -[] -type DisplayUser = - { /// The ID of the user - Id : string - - /// The user name (e-mail address) - Email : string - - /// The user's first name - FirstName : string - - /// The user's last name - LastName : string - - /// The user's preferred name - PreferredName : string - - /// The URL of the user's personal site - Url : string - - /// The user's access level - AccessLevel : string - - /// When the user was created - CreatedOn : DateTime - - /// When the user last logged on - LastSeenOn : Nullable - } - - /// Construct a displayed user from a web log user - static member fromUser webLog (user : WebLogUser) = - { Id = WebLogUserId.toString user.Id - Email = user.Email - FirstName = user.FirstName - LastName = user.LastName - PreferredName = user.PreferredName - Url = defaultArg user.Url "" - AccessLevel = AccessLevel.toString user.AccessLevel - CreatedOn = WebLog.localTime webLog user.CreatedOn - LastSeenOn = user.LastSeenOn |> Option.map (WebLog.localTime webLog) |> Option.toNullable - } + { Id = string upload.Id + Name = name + Path = path.Replace(name, "") + UpdatedOn = Some (webLog.LocalTime upload.UpdatedOn) + Source = string source } /// View model for editing categories [] -type EditCategoryModel = - { /// The ID of the category being edited - CategoryId : string - - /// The name of the category - Name : string - - /// The category's URL slug - Slug : string - - /// A description of the category (optional) - Description : string - - /// The ID of the category for which this is a subcategory (optional) - ParentId : string - } +type EditCategoryModel = { + /// The ID of the category being edited + CategoryId: string + + /// The name of the category + Name: string + + /// The category's URL slug + Slug: string + + /// A description of the category (optional) + Description: string + + /// The ID of the category for which this is a subcategory (optional) + ParentId: string +} with /// Create an edit model from an existing category - static member fromCategory (cat : Category) = - { CategoryId = CategoryId.toString cat.Id - Name = cat.Name - Slug = cat.Slug - Description = defaultArg cat.Description "" - ParentId = cat.ParentId |> Option.map CategoryId.toString |> Option.defaultValue "" - } + static member FromCategory (cat: Category) = + { CategoryId = string cat.Id + Name = cat.Name + Slug = cat.Slug + Description = defaultArg cat.Description "" + ParentId = cat.ParentId |> Option.map string |> Option.defaultValue "" } /// Is this a new category? - member this.IsNew = this.CategoryId = "new" + member this.IsNew = + this.CategoryId = "new" + + +/// View model to add/edit an episode chapter +[] +type EditChapterModel = { + /// The ID of the post to which the chapter belongs + PostId: string + + /// The index in the chapter list (-1 means new) + Index: int + + /// The start time of the chapter (H:mm:ss.FF format) + StartTime: string + + /// The title of the chapter + Title: string + + /// An image to display for this chapter + ImageUrl: string + + /// A URL with information about this chapter + Url: string + + /// Whether this chapter should be displayed in podcast players + IsHidden: bool + + /// The end time of the chapter (HH:MM:SS.FF format) + EndTime: string + + /// The name of a location + LocationName: string + + /// The geographic coordinates of the location + LocationGeo: string + + /// An OpenStreetMap query for this location + LocationOsm: string + + /// Whether to add another chapter after adding this one + AddAnother: bool +} with + + /// Create a display chapter from a chapter + static member FromChapter (postId: PostId) idx (chapter: Chapter) = + let pattern = DurationPattern.CreateWithInvariantCulture "H:mm:ss.FF" + { PostId = string postId + Index = idx + StartTime = pattern.Format chapter.StartTime + Title = defaultArg chapter.Title "" + ImageUrl = defaultArg chapter.ImageUrl "" + Url = defaultArg chapter.Url "" + IsHidden = defaultArg chapter.IsHidden false + EndTime = chapter.EndTime |> Option.map pattern.Format |> Option.defaultValue "" + LocationName = chapter.Location |> Option.map _.Name |> Option.defaultValue "" + LocationGeo = chapter.Location |> Option.map _.Geo |> Option.defaultValue "" + LocationOsm = chapter.Location |> Option.map _.Osm |> Option.flatten |> Option.defaultValue "" + AddAnother = false } + + /// Create a chapter from the values in this model + member this.ToChapter () = + let parseDuration name value = + let pattern = + match value |> Seq.fold (fun count chr -> if chr = ':' then count + 1 else count) 0 with + | 0 -> "S" + | 1 -> "M:ss" + | 2 -> "H:mm:ss" + | _ -> invalidArg name "Max time format is H:mm:ss" + |> function + | it -> DurationPattern.CreateWithInvariantCulture $"{it}.FFFFFFFFF" + let result = pattern.Parse value + if result.Success then result.Value else raise result.Exception + let location = + match noneIfBlank this.LocationName with + | None -> None + | Some name -> Some { Name = name; Geo = this.LocationGeo; Osm = noneIfBlank this.LocationOsm } + { StartTime = parseDuration (nameof this.StartTime) this.StartTime + Title = noneIfBlank this.Title + ImageUrl = noneIfBlank this.ImageUrl + Url = noneIfBlank this.Url + IsHidden = if this.IsHidden then Some true else None + EndTime = noneIfBlank this.EndTime |> Option.map (parseDuration (nameof this.EndTime)) + Location = location } + + +/// View model common to page and post edits +type EditCommonModel() = + + /// Find the latest revision within a list of revisions + let findLatestRevision (revs: Revision list) = + match revs |> List.sortByDescending _.AsOf |> List.tryHead with Some rev -> rev | None -> Revision.Empty + + /// The ID of the page or post + member val Id = "" with get, set + + /// The title of the page or post + member val Title = "" with get, set + + /// The permalink for the page or post + member val Permalink = "" with get, set + + /// The entity to which this model applies ("page" or "post") + member val Entity = "" with get, set + + /// Whether to provide a link to manage chapters + member val IncludeChapterLink = false with get, set + + /// The template to use to display the page + member val Template = "" with get, set + + /// The source type ("HTML" or "Markdown") + member val Source = "" with get, set + + /// The text of the page or post + member val Text = "" with get, set + + /// Names of metadata items + member val MetaNames: string array = [||] with get, set + + /// Values of metadata items + member val MetaValues: string array = [||] with get, set + + /// Whether this is a new page or post + member this.IsNew with get () = this.Id = "new" + + /// Fill the properties of this object from a page + member this.PopulateFromPage (page: Page) = + let latest = findLatestRevision page.Revisions + let metaItems = if page.Metadata.Length = 0 then [ MetaItem.Empty ] else page.Metadata + this.Id <- string page.Id + this.Title <- page.Title + this.Permalink <- string page.Permalink + this.Entity <- "page" + this.Template <- defaultArg page.Template "" + this.Source <- latest.Text.SourceType + this.Text <- latest.Text.Text + this.MetaNames <- metaItems |> List.map _.Name |> Array.ofList + this.MetaValues <- metaItems |> List.map _.Value |> Array.ofList + + /// Fill the properties of this object from a post + member this.PopulateFromPost (post: Post) = + let latest = findLatestRevision post.Revisions + let metaItems = if post.Metadata.Length = 0 then [ MetaItem.Empty ] else post.Metadata + this.Id <- string post.Id + this.Title <- post.Title + this.Permalink <- string post.Permalink + this.Entity <- "post" + this.IncludeChapterLink <- Option.isSome post.Episode && Option.isSome post.Episode.Value.Chapters + this.Template <- defaultArg post.Template "" + this.Source <- latest.Text.SourceType + this.Text <- latest.Text.Text + this.MetaNames <- metaItems |> List.map _.Name |> Array.ofList + this.MetaValues <- metaItems |> List.map _.Value |> Array.ofList /// View model to edit a custom RSS feed [] -type EditCustomFeedModel = - { /// The ID of the feed being editing - Id : string - - /// The type of source for this feed ("category" or "tag") - SourceType : string - - /// The category ID or tag on which this feed is based - SourceValue : string - - /// The relative path at which this feed is served - Path : string - - /// Whether this feed defines a podcast - IsPodcast : bool - - /// The title of the podcast - Title : string - - /// A subtitle for the podcast - Subtitle : string - - /// The number of items in the podcast feed - ItemsInFeed : int - - /// A summary of the podcast (iTunes field) - Summary : string - - /// The display name of the podcast author (iTunes field) - DisplayedAuthor : string - - /// The e-mail address of the user who registered the podcast at iTunes - Email : string - - /// The link to the image for the podcast - ImageUrl : string - - /// The category from Apple Podcasts (iTunes) under which this podcast is categorized - AppleCategory : string - - /// A further refinement of the categorization of this podcast (Apple Podcasts/iTunes field / values) - AppleSubcategory : string - - /// The explictness rating (iTunes field) - Explicit : string - - /// The default media type for files in this podcast - DefaultMediaType : string - - /// The base URL for relative URL media files for this podcast (optional; defaults to web log base) - MediaBaseUrl : string - - /// The URL for funding information for the podcast - FundingUrl : string - - /// The text for the funding link - FundingText : string - - /// A unique identifier to follow this podcast - PodcastGuid : string - - /// The medium for the content of this podcast - Medium : string - } +type EditCustomFeedModel = { + /// The ID of the feed being editing + Id: string + + /// The type of source for this feed ("category" or "tag") + SourceType: string + + /// The category ID or tag on which this feed is based + SourceValue: string + + /// The relative path at which this feed is served + Path: string + + /// Whether this feed defines a podcast + IsPodcast: bool + + /// The title of the podcast + Title: string + + /// A subtitle for the podcast + Subtitle: string + + /// The number of items in the podcast feed + ItemsInFeed: int + + /// A summary of the podcast (iTunes field) + Summary: string + + /// The display name of the podcast author (iTunes field) + DisplayedAuthor: string + + /// The e-mail address of the user who registered the podcast at iTunes + Email: string + + /// The link to the image for the podcast + ImageUrl: string + + /// The category from Apple Podcasts (iTunes) under which this podcast is categorized + AppleCategory: string + + /// A further refinement of the categorization of this podcast (Apple Podcasts/iTunes field / values) + AppleSubcategory: string + + /// The explictness rating (iTunes field) + Explicit: string + + /// The default media type for files in this podcast + DefaultMediaType: string + + /// The base URL for relative URL media files for this podcast (optional; defaults to web log base) + MediaBaseUrl: string + + /// The URL for funding information for the podcast + FundingUrl: string + + /// The text for the funding link + FundingText: string + + /// A unique identifier to follow this podcast + PodcastGuid: string + + /// The medium for the content of this podcast + Medium: string +} with /// An empty custom feed model - static member empty = - { Id = "" - SourceType = "category" - SourceValue = "" - Path = "" - IsPodcast = false - Title = "" - Subtitle = "" - ItemsInFeed = 25 - Summary = "" - DisplayedAuthor = "" - Email = "" - ImageUrl = "" - AppleCategory = "" - AppleSubcategory = "" - Explicit = "no" - DefaultMediaType = "audio/mpeg" - MediaBaseUrl = "" - FundingUrl = "" - FundingText = "" - PodcastGuid = "" - Medium = "" - } + static member Empty = + { Id = "" + SourceType = "category" + SourceValue = "" + Path = "" + IsPodcast = false + Title = "" + Subtitle = "" + ItemsInFeed = 25 + Summary = "" + DisplayedAuthor = "" + Email = "" + ImageUrl = "" + AppleCategory = "" + AppleSubcategory = "" + Explicit = "no" + DefaultMediaType = "audio/mpeg" + MediaBaseUrl = "" + FundingUrl = "" + FundingText = "" + PodcastGuid = "" + Medium = "" } /// Create a model from a custom feed - static member fromFeed (feed : CustomFeed) = + static member FromFeed (feed: CustomFeed) = let rss = - { EditCustomFeedModel.empty with - Id = CustomFeedId.toString feed.Id + { EditCustomFeedModel.Empty with + Id = string feed.Id SourceType = match feed.Source with Category _ -> "category" | Tag _ -> "tag" SourceValue = match feed.Source with Category (CategoryId catId) -> catId | Tag tag -> tag - Path = Permalink.toString feed.Path - } + Path = string feed.Path } match feed.Podcast with | Some p -> { rss with @@ -441,23 +486,20 @@ type EditCustomFeedModel = Summary = p.Summary DisplayedAuthor = p.DisplayedAuthor Email = p.Email - ImageUrl = Permalink.toString p.ImageUrl + ImageUrl = string p.ImageUrl AppleCategory = p.AppleCategory AppleSubcategory = defaultArg p.AppleSubcategory "" - Explicit = ExplicitRating.toString p.Explicit + Explicit = string p.Explicit DefaultMediaType = defaultArg p.DefaultMediaType "" MediaBaseUrl = defaultArg p.MediaBaseUrl "" FundingUrl = defaultArg p.FundingUrl "" FundingText = defaultArg p.FundingText "" - PodcastGuid = p.PodcastGuid - |> Option.map (fun it -> it.ToString().ToLowerInvariant ()) - |> Option.defaultValue "" - Medium = p.Medium |> Option.map PodcastMedium.toString |> Option.defaultValue "" - } + PodcastGuid = p.PodcastGuid |> Option.map _.ToString().ToLowerInvariant() |> Option.defaultValue "" + Medium = p.Medium |> Option.map string |> Option.defaultValue "" } | None -> rss /// Update a feed with values from this model - member this.UpdateFeed (feed : CustomFeed) = + member this.UpdateFeed (feed: CustomFeed) = { feed with Source = if this.SourceType = "tag" then Tag this.SourceValue else Category (CategoryId this.SourceValue) Path = Permalink this.Path @@ -473,105 +515,64 @@ type EditCustomFeedModel = ImageUrl = Permalink this.ImageUrl AppleCategory = this.AppleCategory AppleSubcategory = noneIfBlank this.AppleSubcategory - Explicit = ExplicitRating.parse this.Explicit + Explicit = ExplicitRating.Parse this.Explicit DefaultMediaType = noneIfBlank this.DefaultMediaType MediaBaseUrl = noneIfBlank this.MediaBaseUrl PodcastGuid = noneIfBlank this.PodcastGuid |> Option.map Guid.Parse FundingUrl = noneIfBlank this.FundingUrl FundingText = noneIfBlank this.FundingText - Medium = noneIfBlank this.Medium |> Option.map PodcastMedium.parse - } + Medium = noneIfBlank this.Medium |> Option.map PodcastMedium.Parse } else - None - } + None } /// View model for a user to edit their own information [] -type EditMyInfoModel = - { /// The user's first name - FirstName : string - - /// The user's last name - LastName : string - - /// The user's preferred name - PreferredName : string - - /// A new password for the user - NewPassword : string - - /// A new password for the user, confirmed - NewPasswordConfirm : string - } +type EditMyInfoModel = { + /// The user's first name + FirstName: string + + /// The user's last name + LastName: string + + /// The user's preferred name + PreferredName: string + + /// A new password for the user + NewPassword: string + + /// A new password for the user, confirmed + NewPasswordConfirm: string +} with /// Create an edit model from a user - static member fromUser (user : WebLogUser) = - { FirstName = user.FirstName - LastName = user.LastName - PreferredName = user.PreferredName - NewPassword = "" - NewPasswordConfirm = "" - } + static member FromUser (user: WebLogUser) = + { FirstName = user.FirstName + LastName = user.LastName + PreferredName = user.PreferredName + NewPassword = "" + NewPasswordConfirm = "" } /// View model to edit a page -[] -type EditPageModel = - { /// The ID of the page being edited - PageId : string - - /// The title of the page - Title : string - - /// The permalink for the page - Permalink : string - - /// The template to use to display the page - Template : string - - /// Whether this page is shown in the page list - IsShownInPageList : bool - - /// The source format for the text - Source : string - - /// The text of the page - Text : string - - /// Names of metadata items - MetaNames : string[] - - /// Values of metadata items - MetaValues : string[] - } +type EditPageModel() = + inherit EditCommonModel() + /// Whether this page is shown in the page list + member val IsShownInPageList = false with get, set + /// Create an edit model from an existing page - static member fromPage (page : Page) = - let latest = - match page.Revisions |> List.sortByDescending (fun r -> r.AsOf) |> List.tryHead with - | Some rev -> rev - | None -> Revision.empty - let page = if page.Metadata |> List.isEmpty then { page with Metadata = [ MetaItem.empty ] } else page - { PageId = PageId.toString page.Id - Title = page.Title - Permalink = Permalink.toString page.Permalink - Template = defaultArg page.Template "" - IsShownInPageList = page.IsInPageList - Source = MarkupText.sourceType latest.Text - Text = MarkupText.text latest.Text - MetaNames = page.Metadata |> List.map (fun m -> m.Name) |> Array.ofList - MetaValues = page.Metadata |> List.map (fun m -> m.Value) |> Array.ofList - } - - /// Whether this is a new page - member this.IsNew = this.PageId = "new" + static member FromPage(page: Page) = + let model = EditPageModel() + model.PopulateFromPage page + model.IsShownInPageList <- page.IsInPageList + model /// Update a page with values from this model - member this.UpdatePage (page : Page) now = - let revision = { AsOf = now; Text = MarkupText.parse $"{this.Source}: {this.Text}" } + member this.UpdatePage (page: Page) now = + let revision = { AsOf = now; Text = MarkupText.Parse $"{this.Source}: {this.Text}" } // Detect a permalink change, and add the prior one to the prior list - match Permalink.toString page.Permalink with + match string page.Permalink with | "" -> page | link when link = this.Permalink -> page | _ -> { page with PriorPermalinks = page.Permalink :: page.PriorPermalinks } @@ -583,172 +584,140 @@ type EditPageModel = UpdatedOn = now IsInPageList = this.IsShownInPageList Template = match this.Template with "" -> None | tmpl -> Some tmpl - Text = MarkupText.toHtml revision.Text + Text = revision.Text.AsHtml() Metadata = Seq.zip this.MetaNames this.MetaValues |> Seq.filter (fun it -> fst it > "") |> Seq.map (fun it -> { Name = fst it; Value = snd it }) - |> Seq.sortBy (fun it -> $"{it.Name.ToLower ()} {it.Value.ToLower ()}") + |> Seq.sortBy (fun it -> $"{it.Name.ToLower()} {it.Value.ToLower()}") |> List.ofSeq Revisions = match page.Revisions |> List.tryHead with | Some r when r.Text = revision.Text -> page.Revisions - | _ -> revision :: page.Revisions - } + | _ -> revision :: page.Revisions } /// View model to edit a post -[] -type EditPostModel = - { /// The ID of the post being edited - PostId : string - - /// The title of the post - Title : string - - /// The permalink for the post - Permalink : string - - /// The source format for the text - Source : string - - /// The text of the post - Text : string - - /// The tags for the post - Tags : string - - /// The template used to display the post - Template : string - - /// The category IDs for the post - CategoryIds : string[] - - /// The post status - Status : string - - /// Whether this post should be published - DoPublish : bool - - /// Names of metadata items - MetaNames : string[] - - /// Values of metadata items - MetaValues : string[] - - /// Whether to override the published date/time - SetPublished : bool - - /// The published date/time to override - PubOverride : Nullable - - /// Whether all revisions should be purged and the override date set as the updated date as well - SetUpdated : bool - - /// Whether this post has a podcast episode - IsEpisode : bool - - /// The URL for the media for this episode (may be permalink) - Media : string - - /// The size (in bytes) of the media for this episode - Length : int64 - - /// The duration of the media for this episode - Duration : string - - /// The media type (optional, defaults to podcast-defined media type) - MediaType : string - - /// The URL for the image for this episode (may be permalink; optional, defaults to podcast image) - ImageUrl : string - - /// A subtitle for the episode (optional) - Subtitle : string - - /// The explicit rating for this episode (optional, defaults to podcast setting) - Explicit : string - - /// The URL for the chapter file for the episode (may be permalink; optional) - ChapterFile : string - - /// The type of the chapter file (optional; defaults to application/json+chapters if chapterFile is provided) - ChapterType : string - - /// The URL for the transcript (may be permalink; optional) - TranscriptUrl : string - - /// The MIME type for the transcript (optional, recommended if transcriptUrl is provided) - TranscriptType : string - - /// The language of the transcript (optional) - TranscriptLang : string - - /// Whether the provided transcript should be presented as captions - TranscriptCaptions : bool - - /// The season number (optional) - SeasonNumber : int - - /// A description of this season (optional, ignored if season number is not provided) - SeasonDescription : string - - /// The episode number (decimal; optional) - EpisodeNumber : string - - /// A description of this episode (optional, ignored if episode number is not provided) - EpisodeDescription : string - } +type EditPostModel() = + inherit EditCommonModel() + + /// The tags for the post + member val Tags = "" with get, set + + /// The category IDs for the post + member val CategoryIds: string array = [||] with get, set + + /// The post status + member val Status = "" with get, set + + /// Whether this post should be published + member val DoPublish = false with get, set + + /// Whether to override the published date/time + member val SetPublished = false with get, set + + /// The published date/time to override + member val PubOverride = Nullable() with get, set + + /// Whether all revisions should be purged and the override date set as the updated date as well + member val SetUpdated = false with get, set + + /// Whether this post has a podcast episode + member val IsEpisode = false with get, set + + /// The URL for the media for this episode (may be permalink) + member val Media = "" with get, set + + /// The size (in bytes) of the media for this episode + member val Length = 0L with get, set + + /// The duration of the media for this episode + member val Duration = "" with get, set + + /// The media type (optional, defaults to podcast-defined media type) + member val MediaType = "" with get, set + + /// The URL for the image for this episode (may be permalink; optional, defaults to podcast image) + member val ImageUrl = "" with get, set + + /// A subtitle for the episode (optional) + member val Subtitle = "" with get, set + + /// The explicit rating for this episode (optional, defaults to podcast setting) + member val Explicit = "" with get, set + + /// The chapter source ("internal" for chapters defined here, "external" for a file link, "none" if none defined) + member val ChapterSource = "" with get, set + + /// The URL for the chapter file for the episode (may be permalink; optional) + member val ChapterFile = "" with get, set + + /// The type of the chapter file (optional; defaults to application/json+chapters if chapterFile is provided) + member val ChapterType = "" with get, set + + /// Whether the chapter file (or chapters) contains/contain waypoints + member val ContainsWaypoints = false with get, set + + /// The URL for the transcript (may be permalink; optional) + member val TranscriptUrl = "" with get, set + + /// The MIME type for the transcript (optional, recommended if transcriptUrl is provided) + member val TranscriptType = "" with get, set + + /// The language of the transcript (optional) + member val TranscriptLang = "" with get, set + + /// Whether the provided transcript should be presented as captions + member val TranscriptCaptions = false with get, set + + /// The season number (optional) + member val SeasonNumber = 0 with get, set + + /// A description of this season (optional, ignored if season number is not provided) + member val SeasonDescription = "" with get, set + + /// The episode number (decimal; optional) + member val EpisodeNumber = "" with get, set + + /// A description of this episode (optional, ignored if episode number is not provided) + member val EpisodeDescription = "" with get, set /// Create an edit model from an existing past - static member fromPost webLog (post : Post) = - let latest = - match post.Revisions |> List.sortByDescending (fun r -> r.AsOf) |> List.tryHead with - | Some rev -> rev - | None -> Revision.empty - let post = if post.Metadata |> List.isEmpty then { post with Metadata = [ MetaItem.empty ] } else post - let episode = defaultArg post.Episode Episode.empty - { PostId = PostId.toString post.Id - Title = post.Title - Permalink = Permalink.toString post.Permalink - Source = MarkupText.sourceType latest.Text - Text = MarkupText.text latest.Text - Tags = String.Join (", ", post.Tags) - Template = defaultArg post.Template "" - CategoryIds = post.CategoryIds |> List.map CategoryId.toString |> Array.ofList - Status = PostStatus.toString post.Status - DoPublish = false - MetaNames = post.Metadata |> List.map (fun m -> m.Name) |> Array.ofList - MetaValues = post.Metadata |> List.map (fun m -> m.Value) |> Array.ofList - SetPublished = false - PubOverride = post.PublishedOn |> Option.map (WebLog.localTime webLog) |> Option.toNullable - SetUpdated = false - IsEpisode = Option.isSome post.Episode - Media = episode.Media - Length = episode.Length - Duration = defaultArg (Episode.formatDuration episode) "" - MediaType = defaultArg episode.MediaType "" - ImageUrl = defaultArg episode.ImageUrl "" - Subtitle = defaultArg episode.Subtitle "" - Explicit = defaultArg (episode.Explicit |> Option.map ExplicitRating.toString) "" - ChapterFile = defaultArg episode.ChapterFile "" - ChapterType = defaultArg episode.ChapterType "" - TranscriptUrl = defaultArg episode.TranscriptUrl "" - TranscriptType = defaultArg episode.TranscriptType "" - TranscriptLang = defaultArg episode.TranscriptLang "" - TranscriptCaptions = defaultArg episode.TranscriptCaptions false - SeasonNumber = defaultArg episode.SeasonNumber 0 - SeasonDescription = defaultArg episode.SeasonDescription "" - EpisodeNumber = defaultArg (episode.EpisodeNumber |> Option.map string) "" - EpisodeDescription = defaultArg episode.EpisodeDescription "" - } - - /// Whether this is a new post - member this.IsNew = this.PostId = "new" + static member FromPost (webLog: WebLog) (post: Post) = + let model = EditPostModel() + model.PopulateFromPost post + let episode = defaultArg post.Episode Episode.Empty + model.Tags <- post.Tags |> String.concat ", " + model.CategoryIds <- post.CategoryIds |> List.map string |> Array.ofList + model.Status <- string post.Status + model.PubOverride <- post.PublishedOn |> Option.map webLog.LocalTime |> Option.toNullable + model.IsEpisode <- Option.isSome post.Episode + model.Media <- episode.Media + model.Length <- episode.Length + model.Duration <- defaultArg (episode.FormatDuration()) "" + model.MediaType <- defaultArg episode.MediaType "" + model.ImageUrl <- defaultArg episode.ImageUrl "" + model.Subtitle <- defaultArg episode.Subtitle "" + model.Explicit <- defaultArg (episode.Explicit |> Option.map string) "" + model.ChapterSource <- if Option.isSome episode.Chapters then "internal" + elif Option.isSome episode.ChapterFile then "external" + else "none" + model.ChapterFile <- defaultArg episode.ChapterFile "" + model.ChapterType <- defaultArg episode.ChapterType "" + model.ContainsWaypoints <- defaultArg episode.ChapterWaypoints false + model.TranscriptUrl <- defaultArg episode.TranscriptUrl "" + model.TranscriptType <- defaultArg episode.TranscriptType "" + model.TranscriptLang <- defaultArg episode.TranscriptLang "" + model.TranscriptCaptions <- defaultArg episode.TranscriptCaptions false + model.SeasonNumber <- defaultArg episode.SeasonNumber 0 + model.SeasonDescription <- defaultArg episode.SeasonDescription "" + model.EpisodeNumber <- defaultArg (episode.EpisodeNumber |> Option.map string) "" + model.EpisodeDescription <- defaultArg episode.EpisodeDescription "" + model /// Update a post with values from the submitted form - member this.UpdatePost (post : Post) now = - let revision = { AsOf = now; Text = MarkupText.parse $"{this.Source}: {this.Text}" } + member this.UpdatePost (post: Post) now = + let revision = { AsOf = now; Text = MarkupText.Parse $"{this.Source}: {this.Text}" } // Detect a permalink change, and add the prior one to the prior list - match Permalink.toString post.Permalink with + match string post.Permalink with | "" -> post | link when link = this.Permalink -> post | _ -> { post with PriorPermalinks = post.Permalink :: post.PriorPermalinks } @@ -759,20 +728,20 @@ type EditPostModel = Permalink = Permalink this.Permalink PublishedOn = if this.DoPublish then Some now else post.PublishedOn UpdatedOn = now - Text = MarkupText.toHtml revision.Text + Text = revision.Text.AsHtml() Tags = this.Tags.Split "," |> Seq.ofArray - |> Seq.map (fun it -> it.Trim().ToLower ()) + |> Seq.map _.Trim().ToLower() |> Seq.filter (fun it -> it <> "") |> Seq.sort |> List.ofSeq - Template = match this.Template.Trim () with "" -> None | tmpl -> Some tmpl + Template = match this.Template.Trim() with "" -> None | tmpl -> Some tmpl CategoryIds = this.CategoryIds |> Array.map CategoryId |> List.ofArray Status = if this.DoPublish then Published else post.Status Metadata = Seq.zip this.MetaNames this.MetaValues |> Seq.filter (fun it -> fst it > "") |> Seq.map (fun it -> { Name = fst it; Value = snd it }) - |> Seq.sortBy (fun it -> $"{it.Name.ToLower ()} {it.Value.ToLower ()}") + |> Seq.sortBy (fun it -> $"{it.Name.ToLower()} {it.Value.ToLower()}") |> List.ofSeq Revisions = match post.Revisions |> List.tryHead with | Some r when r.Text = revision.Text -> post.Revisions @@ -787,9 +756,20 @@ type EditPostModel = MediaType = noneIfBlank this.MediaType ImageUrl = noneIfBlank this.ImageUrl Subtitle = noneIfBlank this.Subtitle - Explicit = noneIfBlank this.Explicit |> Option.map ExplicitRating.parse - ChapterFile = noneIfBlank this.ChapterFile - ChapterType = noneIfBlank this.ChapterType + Explicit = noneIfBlank this.Explicit |> Option.map ExplicitRating.Parse + Chapters = if this.ChapterSource = "internal" then + match post.Episode with + | Some e when Option.isSome e.Chapters -> e.Chapters + | Some _ + | None -> Some [] + else None + ChapterFile = if this.ChapterSource = "external" then noneIfBlank this.ChapterFile + else None + ChapterType = if this.ChapterSource = "external" then noneIfBlank this.ChapterType + else None + ChapterWaypoints = if this.ChapterSource = "none" then None + elif this.ContainsWaypoints then Some true + else None TranscriptUrl = noneIfBlank this.TranscriptUrl TranscriptType = noneIfBlank this.TranscriptType TranscriptLang = noneIfBlank this.TranscriptLang @@ -803,356 +783,398 @@ type EditPostModel = EpisodeDescription = noneIfBlank this.EpisodeDescription } else - None - } + None } + + +/// View model to add/edit a redirect rule +[] +type EditRedirectRuleModel = { + /// The ID (index) of the rule being edited + RuleId: int + + /// The "from" side of the rule + From: string + + /// The "to" side of the rule + To: string + + /// Whether this rule uses a regular expression + IsRegex: bool + + /// Whether a new rule should be inserted at the top or appended to the end (ignored for edits) + InsertAtTop: bool +} with + + /// Create a model from an existing rule + static member FromRule idx (rule: RedirectRule) = + { RuleId = idx + From = rule.From + To = rule.To + IsRegex = rule.IsRegex + InsertAtTop = false } + + /// Update a rule with the values from this model + member this.ToRule() = + { From = this.From + To = this.To + IsRegex = this.IsRegex } /// View model to edit RSS settings [] -type EditRssModel = - { /// Whether the site feed of posts is enabled - IsFeedEnabled : bool - - /// The name of the file generated for the site feed - FeedName : string - - /// Override the "posts per page" setting for the site feed - ItemsInFeed : int - - /// Whether feeds are enabled for all categories - IsCategoryEnabled : bool - - /// Whether feeds are enabled for all tags - IsTagEnabled : bool - - /// A copyright string to be placed in all feeds - Copyright : string - } +type EditRssModel = { + /// Whether the site feed of posts is enabled + IsFeedEnabled: bool + + /// The name of the file generated for the site feed + FeedName: string + + /// Override the "posts per page" setting for the site feed + ItemsInFeed: int + + /// Whether feeds are enabled for all categories + IsCategoryEnabled: bool + + /// Whether feeds are enabled for all tags + IsTagEnabled: bool + + /// A copyright string to be placed in all feeds + Copyright: string +} with /// Create an edit model from a set of RSS options - static member fromRssOptions (rss : RssOptions) = - { IsFeedEnabled = rss.IsFeedEnabled - FeedName = rss.FeedName - ItemsInFeed = defaultArg rss.ItemsInFeed 0 - IsCategoryEnabled = rss.IsCategoryEnabled - IsTagEnabled = rss.IsTagEnabled - Copyright = defaultArg rss.Copyright "" - } + static member FromRssOptions (rss: RssOptions) = + { IsFeedEnabled = rss.IsFeedEnabled + FeedName = rss.FeedName + ItemsInFeed = defaultArg rss.ItemsInFeed 0 + IsCategoryEnabled = rss.IsCategoryEnabled + IsTagEnabled = rss.IsTagEnabled + Copyright = defaultArg rss.Copyright "" } /// Update RSS options from values in this model - member this.UpdateOptions (rss : RssOptions) = + member this.UpdateOptions (rss: RssOptions) = { rss with IsFeedEnabled = this.IsFeedEnabled FeedName = this.FeedName ItemsInFeed = if this.ItemsInFeed = 0 then None else Some this.ItemsInFeed IsCategoryEnabled = this.IsCategoryEnabled IsTagEnabled = this.IsTagEnabled - Copyright = noneIfBlank this.Copyright - } + Copyright = noneIfBlank this.Copyright } /// View model to edit a tag mapping [] -type EditTagMapModel = - { /// The ID of the tag mapping being edited - Id : string - - /// The tag being mapped to a different link value - Tag : string - - /// The link value for the tag - UrlValue : string - } +type EditTagMapModel = { + /// The ID of the tag mapping being edited + Id: string - /// Whether this is a new tag mapping - member this.IsNew = this.Id = "new" + /// The tag being mapped to a different link value + Tag: string + + /// The link value for the tag + UrlValue: string +} with /// Create an edit model from the tag mapping - static member fromMapping (tagMap : TagMap) : EditTagMapModel = - { Id = TagMapId.toString tagMap.Id - Tag = tagMap.Tag - UrlValue = tagMap.UrlValue - } + static member FromMapping (tagMap: TagMap) : EditTagMapModel = + { Id = string tagMap.Id + Tag = tagMap.Tag + UrlValue = tagMap.UrlValue } + + /// Whether this is a new tag mapping + member this.IsNew = + this.Id = "new" /// View model to display a user's information [] -type EditUserModel = - { /// The ID of the user - Id : string +type EditUserModel = { + /// The ID of the user + Id: string - /// The user's access level - AccessLevel : string - - /// The user name (e-mail address) - Email : string - - /// The URL of the user's personal site - Url : string - - /// The user's first name - FirstName : string - - /// The user's last name - LastName : string - - /// The user's preferred name - PreferredName : string - - /// The user's password - Password : string - - /// Confirmation of the user's password - PasswordConfirm : string - } + /// The user's access level + AccessLevel: string - /// Construct a displayed user from a web log user - static member fromUser (user : WebLogUser) = - { Id = WebLogUserId.toString user.Id - AccessLevel = AccessLevel.toString user.AccessLevel - Url = defaultArg user.Url "" - Email = user.Email - FirstName = user.FirstName - LastName = user.LastName - PreferredName = user.PreferredName - Password = "" - PasswordConfirm = "" - } + /// The user name (e-mail address) + Email: string + + /// The URL of the user's personal site + Url: string + + /// The user's first name + FirstName: string + + /// The user's last name + LastName: string + + /// The user's preferred name + PreferredName: string + + /// The user's password + Password: string + + /// Confirmation of the user's password + PasswordConfirm: string +} with + + /// Construct a user edit form from a web log user + static member FromUser (user: WebLogUser) = + { Id = string user.Id + AccessLevel = string user.AccessLevel + Url = defaultArg user.Url "" + Email = user.Email + FirstName = user.FirstName + LastName = user.LastName + PreferredName = user.PreferredName + Password = "" + PasswordConfirm = "" } /// Is this a new user? - member this.IsNew = this.Id = "new" + member this.IsNew = + this.Id = "new" /// Update a user with values from this model (excludes password) - member this.UpdateUser (user : WebLogUser) = + member this.UpdateUser (user: WebLogUser) = { user with - AccessLevel = AccessLevel.parse this.AccessLevel + AccessLevel = AccessLevel.Parse this.AccessLevel Email = this.Email Url = noneIfBlank this.Url FirstName = this.FirstName LastName = this.LastName - PreferredName = this.PreferredName - } + PreferredName = this.PreferredName } /// The model to use to allow a user to log on [] -type LogOnModel = - { /// The user's e-mail address - EmailAddress : string +type LogOnModel = { + /// The user's e-mail address + EmailAddress : string + + /// The user's password + Password : string - /// The user's password - Password : string - - /// Where the user should be redirected once they have logged on - ReturnTo : string option - } + /// Where the user should be redirected once they have logged on + ReturnTo : string option +} with /// An empty log on model - static member empty = + static member Empty = { EmailAddress = ""; Password = ""; ReturnTo = None } +/// View model to manage chapters +[] +type ManageChaptersModel = { + /// The post ID for the chapters being edited + Id: string + + /// The title of the post for which chapters are being edited + Title: string + + /// The chapters for the post + Chapters: Chapter list +} with + + /// Create a model from a post and its episode's chapters + static member Create (post: Post) = + { Id = string post.Id + Title = post.Title + Chapters = post.Episode.Value.Chapters.Value } + + /// View model to manage permalinks [] -type ManagePermalinksModel = - { /// The ID for the entity being edited - Id : string - - /// The type of entity being edited ("page" or "post") - Entity : string - - /// The current title of the page or post - CurrentTitle : string - - /// The current permalink of the page or post - CurrentPermalink : string - - /// The prior permalinks for the page or post - Prior : string[] - } +type ManagePermalinksModel = { + /// The ID for the entity being edited + Id: string + + /// The type of entity being edited ("page" or "post") + Entity: string + + /// The current title of the page or post + CurrentTitle: string + + /// The current permalink of the page or post + CurrentPermalink: string + + /// The prior permalinks for the page or post + Prior: string array +} with /// Create a permalink model from a page - static member fromPage (pg : Page) = - { Id = PageId.toString pg.Id - Entity = "page" - CurrentTitle = pg.Title - CurrentPermalink = Permalink.toString pg.Permalink - Prior = pg.PriorPermalinks |> List.map Permalink.toString |> Array.ofList - } + static member FromPage (page: Page) = + { Id = string page.Id + Entity = "page" + CurrentTitle = page.Title + CurrentPermalink = string page.Permalink + Prior = page.PriorPermalinks |> List.map string |> Array.ofList } /// Create a permalink model from a post - static member fromPost (post : Post) = - { Id = PostId.toString post.Id - Entity = "post" - CurrentTitle = post.Title - CurrentPermalink = Permalink.toString post.Permalink - Prior = post.PriorPermalinks |> List.map Permalink.toString |> Array.ofList - } + static member FromPost (post: Post) = + { Id = string post.Id + Entity = "post" + CurrentTitle = post.Title + CurrentPermalink = string post.Permalink + Prior = post.PriorPermalinks |> List.map string |> Array.ofList } /// View model to manage revisions [] -type ManageRevisionsModel = - { /// The ID for the entity being edited - Id : string - - /// The type of entity being edited ("page" or "post") - Entity : string - - /// The current title of the page or post - CurrentTitle : string - - /// The revisions for the page or post - Revisions : DisplayRevision[] - } +type ManageRevisionsModel = { + /// The ID for the entity being edited + Id: string + + /// The type of entity being edited ("page" or "post") + Entity: string + + /// The current title of the page or post + CurrentTitle: string + + /// The revisions for the page or post + Revisions: Revision list +} with /// Create a revision model from a page - static member fromPage webLog (pg : Page) = - { Id = PageId.toString pg.Id - Entity = "page" - CurrentTitle = pg.Title - Revisions = pg.Revisions |> List.map (DisplayRevision.fromRevision webLog) |> Array.ofList - } + static member FromPage (page: Page) = + { Id = string page.Id + Entity = "page" + CurrentTitle = page.Title + Revisions = page.Revisions } /// Create a revision model from a post - static member fromPost webLog (post : Post) = - { Id = PostId.toString post.Id - Entity = "post" - CurrentTitle = post.Title - Revisions = post.Revisions |> List.map (DisplayRevision.fromRevision webLog) |> Array.ofList - } + static member FromPost (post: Post) = + { Id = string post.Id + Entity = "post" + CurrentTitle = post.Title + Revisions = post.Revisions } /// View model for posts in a list [] -type PostListItem = - { /// The ID of the post - Id : string - - /// The ID of the user who authored the post - AuthorId : string - - /// The status of the post - Status : string - - /// The title of the post - Title : string - - /// The permalink for the post - Permalink : string - - /// When this post was published - PublishedOn : Nullable - - /// When this post was last updated - UpdatedOn : DateTime - - /// The text of the post - Text : string - - /// The IDs of the categories for this post - CategoryIds : string list - - /// Tags for the post - Tags : string list - - /// The podcast episode information for this post - Episode : Episode option - - /// Metadata for the post - Metadata : MetaItem list - } +type PostListItem = { + /// The ID of the post + Id: string + + /// The ID of the user who authored the post + AuthorId: string + + /// The status of the post + Status: string + + /// The title of the post + Title: string + + /// The permalink for the post + Permalink: string + + /// When this post was published + PublishedOn: Nullable + + /// When this post was last updated + UpdatedOn: DateTime + + /// The text of the post + Text: string + + /// The IDs of the categories for this post + CategoryIds: string list + + /// Tags for the post + Tags: string list + + /// The podcast episode information for this post + Episode: Episode option + + /// Metadata for the post + Metadata: MetaItem list +} with /// Create a post list item from a post - static member fromPost (webLog : WebLog) (post : Post) = - let _, extra = WebLog.hostAndPath webLog - let inTZ = WebLog.localTime webLog - { Id = PostId.toString post.Id - AuthorId = WebLogUserId.toString post.AuthorId - Status = PostStatus.toString post.Status - Title = post.Title - Permalink = Permalink.toString post.Permalink - PublishedOn = post.PublishedOn |> Option.map inTZ |> Option.toNullable - UpdatedOn = inTZ post.UpdatedOn - Text = addBaseToRelativeUrls extra post.Text - CategoryIds = post.CategoryIds |> List.map CategoryId.toString - Tags = post.Tags - Episode = post.Episode - Metadata = post.Metadata - } + static member FromPost (webLog: WebLog) (post: Post) = + { Id = string post.Id + AuthorId = string post.AuthorId + Status = string post.Status + Title = post.Title + Permalink = string post.Permalink + PublishedOn = post.PublishedOn |> Option.map webLog.LocalTime |> Option.toNullable + UpdatedOn = webLog.LocalTime post.UpdatedOn + Text = addBaseToRelativeUrls webLog.ExtraPath post.Text + CategoryIds = post.CategoryIds |> List.map string + Tags = post.Tags + Episode = post.Episode + Metadata = post.Metadata } /// View model for displaying posts -type PostDisplay = - { /// The posts to be displayed - Posts : PostListItem[] - - /// Author ID -> name lookup - Authors : MetaItem list - - /// A subtitle for the page - Subtitle : string option - - /// The link to view newer (more recent) posts - NewerLink : string option - - /// The name of the next newer post (single-post only) - NewerName : string option - - /// The link to view older (less recent) posts - OlderLink : string option - - /// The name of the next older post (single-post only) - OlderName : string option - } +type PostDisplay = { + /// The posts to be displayed + Posts: PostListItem array + + /// Author ID -> name lookup + Authors: MetaItem list + + /// A subtitle for the page + Subtitle: string option + + /// The link to view newer (more recent) posts + NewerLink: string option + + /// The name of the next newer post (single-post only) + NewerName: string option + + /// The link to view older (less recent) posts + OlderLink: string option + + /// The name of the next older post (single-post only) + OlderName: string option +} /// View model for editing web log settings [] -type SettingsModel = - { /// The name of the web log - Name : string +type SettingsModel = { + /// The name of the web log + Name: string - /// The slug of the web log - Slug : string - - /// The subtitle of the web log - Subtitle : string + /// The slug of the web log + Slug: string + + /// The subtitle of the web log + Subtitle: string - /// The default page - DefaultPage : string + /// The default page + DefaultPage: string - /// How many posts should appear on index pages - PostsPerPage : int + /// How many posts should appear on index pages + PostsPerPage: int - /// The time zone in which dates/times should be displayed - TimeZone : string - - /// The theme to use to display the web log - ThemeId : string - - /// Whether to automatically load htmx - AutoHtmx : bool - - /// The default location for uploads - Uploads : string - } + /// The time zone in which dates/times should be displayed + TimeZone: string + + /// The theme to use to display the web log + ThemeId: string + + /// Whether to automatically load htmx + AutoHtmx: bool + + /// The default location for uploads + Uploads: string +} with /// Create a settings model from a web log - static member fromWebLog (webLog : WebLog) = - { Name = webLog.Name - Slug = webLog.Slug - Subtitle = defaultArg webLog.Subtitle "" - DefaultPage = webLog.DefaultPage - PostsPerPage = webLog.PostsPerPage - TimeZone = webLog.TimeZone - ThemeId = ThemeId.toString webLog.ThemeId - AutoHtmx = webLog.AutoHtmx - Uploads = UploadDestination.toString webLog.Uploads - } + static member FromWebLog(webLog: WebLog) = + { Name = webLog.Name + Slug = webLog.Slug + Subtitle = defaultArg webLog.Subtitle "" + DefaultPage = webLog.DefaultPage + PostsPerPage = webLog.PostsPerPage + TimeZone = webLog.TimeZone + ThemeId = string webLog.ThemeId + AutoHtmx = webLog.AutoHtmx + Uploads = string webLog.Uploads } /// Update a web log with settings from the form - member this.update (webLog : WebLog) = + member this.Update(webLog: WebLog) = { webLog with Name = this.Name Slug = this.Slug @@ -1162,53 +1184,49 @@ type SettingsModel = TimeZone = this.TimeZone ThemeId = ThemeId this.ThemeId AutoHtmx = this.AutoHtmx - Uploads = UploadDestination.parse this.Uploads - } + Uploads = UploadDestination.Parse this.Uploads } /// View model for uploading a file [] -type UploadFileModel = - { /// The upload destination - Destination : string - } +type UploadFileModel = { + /// The upload destination + Destination : string +} /// View model for uploading a theme [] -type UploadThemeModel = - { /// Whether the uploaded theme should overwrite an existing theme - DoOverwrite : bool - } +type UploadThemeModel = { + /// Whether the uploaded theme should overwrite an existing theme + DoOverwrite : bool +} /// A message displayed to the user [] -type UserMessage = - { /// The level of the message - Level : string - - /// The message - Message : string - - /// Further details about the message - Detail : string option - } - -/// Functions to support user messages -module UserMessage = +type UserMessage = { + /// The level of the message + Level: string + + /// The message + Message: string + + /// Further details about the message + Detail: string option +} with /// An empty user message (use one of the others for pre-filled level) - let empty = { Level = ""; Message = ""; Detail = None } + static member Empty = { Level = ""; Message = ""; Detail = None } /// A blank success message - let success = { empty with Level = "success" } + static member Success = { UserMessage.Empty with Level = "success" } /// A blank informational message - let info = { empty with Level = "primary" } + static member Info = { UserMessage.Empty with Level = "primary" } /// A blank warning message - let warning = { empty with Level = "warning" } + static member Warning = { UserMessage.Empty with Level = "warning" } /// A blank error message - let error = { empty with Level = "danger" } + static member Error = { UserMessage.Empty with Level = "danger" } diff --git a/src/MyWebLog.Tests/Data/CategoryDataTests.fs b/src/MyWebLog.Tests/Data/CategoryDataTests.fs new file mode 100644 index 0000000..d09fafe --- /dev/null +++ b/src/MyWebLog.Tests/Data/CategoryDataTests.fs @@ -0,0 +1,150 @@ +/// +/// Integration tests for implementations +/// +module CategoryDataTests + +open Expecto +open MyWebLog +open MyWebLog.Data + +/// The ID of the root web log +let rootId = WebLogId "uSitJEuD3UyzWC9jgOHc8g" + +/// The ID of the Favorites category +let private favoritesId = CategoryId "S5JflPsJ9EG7gA2LD4m92A" + +let ``Add succeeds`` (data: IData) = task { + let category = + { Category.Empty with Id = CategoryId "added-cat"; WebLogId = WebLogId "test"; Name = "Added"; Slug = "added" } + do! data.Category.Add category + let! stored = data.Category.FindById (CategoryId "added-cat") (WebLogId "test") + Expect.isSome stored "The category should have been added" +} + +let ``CountAll succeeds when categories exist`` (data: IData) = task { + let! count = data.Category.CountAll rootId + Expect.equal count 3 "There should have been 3 categories" +} + +let ``CountAll succeeds when categories do not exist`` (data: IData) = task { + let! count = data.Category.CountAll WebLogId.Empty + Expect.equal count 0 "There should have been no categories" +} + +let ``CountTopLevel succeeds when top-level categories exist`` (data: IData) = task { + let! count = data.Category.CountTopLevel rootId + Expect.equal count 2 "There should have been 2 top-level categories" +} + +let ``CountTopLevel succeeds when no top-level categories exist`` (data: IData) = task { + let! count = data.Category.CountTopLevel WebLogId.Empty + Expect.equal count 0 "There should have been no top-level categories" +} + +let ``FindAllForView succeeds`` (data: IData) = task { + let! all = data.Category.FindAllForView rootId + Expect.equal all.Length 3 "There should have been 3 categories returned" + Expect.equal all[0].Name "Favorites" "The first category is incorrect" + Expect.equal all[0].PostCount 1 "There should be one post in this category" + Expect.equal all[1].Name "Spitball" "The second category is incorrect" + Expect.equal all[1].PostCount 2 "There should be two posts in this category" + Expect.equal all[2].Name "Moonshot" "The third category is incorrect" + Expect.equal all[2].PostCount 1 "There should be one post in this category" +} + +let ``FindById succeeds when a category is found`` (data: IData) = task { + let! cat = data.Category.FindById favoritesId rootId + Expect.isSome cat "There should have been a category returned" + Expect.equal cat.Value.Name "Favorites" "The category retrieved is incorrect" + Expect.equal cat.Value.Slug "favorites" "The slug is incorrect" + Expect.equal cat.Value.Description (Some "Favorite posts") "The description is incorrect" + Expect.isNone cat.Value.ParentId "There should have been no parent ID" +} + +let ``FindById succeeds when a category is not found`` (data: IData) = task { + let! cat = data.Category.FindById CategoryId.Empty rootId + Expect.isNone cat "There should not have been a category returned" +} + +let ``FindByWebLog succeeds when categories exist`` (data: IData) = task { + let! cats = data.Category.FindByWebLog rootId + Expect.equal cats.Length 3 "There should be 3 categories" + Expect.exists cats (fun it -> it.Name = "Favorites") "Favorites category not found" + Expect.exists cats (fun it -> it.Name = "Spitball") "Spitball category not found" + Expect.exists cats (fun it -> it.Name = "Moonshot") "Moonshot category not found" +} + +let ``FindByWebLog succeeds when no categories exist`` (data: IData) = task { + let! cats = data.Category.FindByWebLog WebLogId.Empty + Expect.isEmpty cats "There should have been no categories returned" +} + +let ``Update succeeds`` (data: IData) = task { + match! data.Category.FindById favoritesId rootId with + | Some cat -> + do! data.Category.Update { cat with Name = "My Favorites"; Slug = "my-favorites"; Description = None } + match! data.Category.FindById favoritesId rootId with + | Some updated -> + Expect.equal updated.Name "My Favorites" "Name not updated properly" + Expect.equal updated.Slug "my-favorites" "Slug not updated properly" + Expect.isNone updated.Description "Description should have been removed" + | None -> Expect.isTrue false "The updated favorites category could not be retrieved" + | None -> Expect.isTrue false "The favorites category could not be retrieved" +} + +let ``Delete succeeds when the category is deleted (no posts)`` (data: IData) = task { + let! result = data.Category.Delete (CategoryId "added-cat") (WebLogId "test") + Expect.equal result CategoryDeleted "The category should have been deleted" + let! cat = data.Category.FindById (CategoryId "added-cat") (WebLogId "test") + Expect.isNone cat "The deleted category should not still exist" +} + +let ``Delete succeeds when the category does not exist`` (data: IData) = task { + let! result = data.Category.Delete CategoryId.Empty (WebLogId "none") + Expect.equal result CategoryNotFound "The category should not have been found" +} + +let ``Delete succeeds when reassigning parent category to None`` (data: IData) = task { + let moonshotId = CategoryId "ScVpyu1e7UiP7bDdge3ZEw" + let spitballId = CategoryId "jw6N69YtTEWVHAO33jHU-w" + let! result = data.Category.Delete spitballId rootId + Expect.equal result ReassignedChildCategories "Child categories should have been reassigned" + match! data.Category.FindById moonshotId rootId with + | Some cat -> Expect.isNone cat.ParentId "Parent ID should have been cleared" + | None -> Expect.isTrue false "Unable to find former child category" +} + +let ``Delete succeeds when reassigning parent category to Some`` (data: IData) = task { + do! data.Category.Add { Category.Empty with Id = CategoryId "a"; WebLogId = WebLogId "test"; Name = "A" } + do! data.Category.Add + { Category.Empty with + Id = CategoryId "b" + WebLogId = WebLogId "test" + Name = "B" + ParentId = Some (CategoryId "a") } + do! data.Category.Add + { Category.Empty with + Id = CategoryId "c" + WebLogId = WebLogId "test" + Name = "C" + ParentId = Some (CategoryId "b") } + let! result = data.Category.Delete (CategoryId "b") (WebLogId "test") + Expect.equal result ReassignedChildCategories "Child categories should have been reassigned" + match! data.Category.FindById (CategoryId "c") (WebLogId "test") with + | Some cat -> Expect.equal cat.ParentId (Some (CategoryId "a")) "Parent category ID not reassigned properly" + | None -> Expect.isTrue false "Expected former child category not found" +} + +let ``Delete succeeds and removes category from posts`` (data: IData) = task { + let moonshotId = CategoryId "ScVpyu1e7UiP7bDdge3ZEw" + let postId = PostId "RCsCU2puYEmkpzotoi8p4g" + match! data.Post.FindById postId rootId with + | Some post -> + Expect.equal post.CategoryIds [ moonshotId ] "Post category IDs are not as expected" + let! result = data.Category.Delete moonshotId rootId + Expect.equal result CategoryDeleted "The category should have been deleted (no children)" + match! data.Post.FindById postId rootId with + | Some p -> Expect.isEmpty p.CategoryIds "Category ID was not removed" + | None -> Expect.isTrue false "The expected updated post was not found" + | None -> Expect.isTrue false "The expected test post was not found" +} diff --git a/src/MyWebLog.Tests/Data/ConvertersTests.fs b/src/MyWebLog.Tests/Data/ConvertersTests.fs new file mode 100644 index 0000000..3f9f053 --- /dev/null +++ b/src/MyWebLog.Tests/Data/ConvertersTests.fs @@ -0,0 +1,296 @@ +module ConvertersTests + +open Expecto +open Microsoft.FSharpLu.Json +open MyWebLog +open MyWebLog.Converters.Json +open Newtonsoft.Json + +/// Unit tests for the CategoryIdConverter type +let categoryIdConverterTests = testList "CategoryIdConverter" [ + let opts = JsonSerializerSettings() + opts.Converters.Add(CategoryIdConverter()) + test "succeeds when serializing" { + let after = JsonConvert.SerializeObject(CategoryId "test-cat-id", opts) + Expect.equal after "\"test-cat-id\"" "Category ID serialized incorrectly" + } + test "succeeds when deserializing" { + let after = JsonConvert.DeserializeObject("\"test-cat-id\"", opts) + Expect.equal after (CategoryId "test-cat-id") "Category ID not serialized incorrectly" + } +] + +/// Unit tests for the CommentIdConverter type +let commentIdConverterTests = testList "CommentIdConverter" [ + let opts = JsonSerializerSettings() + opts.Converters.Add(CommentIdConverter()) + test "succeeds when serializing" { + let after = JsonConvert.SerializeObject(CommentId "test-id", opts) + Expect.equal after "\"test-id\"" "Comment ID serialized incorrectly" + } + test "succeeds when deserializing" { + let after = JsonConvert.DeserializeObject("\"my-test\"", opts) + Expect.equal after (CommentId "my-test") "Comment ID deserialized incorrectly" + } +] + +/// Unit tests for the CommentStatusConverter type +let commentStatusConverterTests = testList "CommentStatusConverter" [ + let opts = JsonSerializerSettings() + opts.Converters.Add(CommentStatusConverter()) + test "succeeds when serializing" { + let after = JsonConvert.SerializeObject(Approved, opts) + Expect.equal after "\"Approved\"" "Comment status serialized incorrectly" + } + test "succeeds when deserializing" { + let after = JsonConvert.DeserializeObject("\"Spam\"", opts) + Expect.equal after Spam "Comment status deserialized incorrectly" + } +] + +/// Unit tests for the CustomFeedIdConverter type +let customFeedIdConverterTests = testList "CustomFeedIdConverter" [ + let opts = JsonSerializerSettings() + opts.Converters.Add(CustomFeedIdConverter()) + test "succeeds when serializing" { + let after = JsonConvert.SerializeObject(CustomFeedId "my-feed", opts) + Expect.equal after "\"my-feed\"" "Custom feed ID serialized incorrectly" + } + test "succeeds when deserializing" { + let after = JsonConvert.DeserializeObject("\"feed-me\"", opts) + Expect.equal after (CustomFeedId "feed-me") "Custom feed ID deserialized incorrectly" + } +] + +/// Unit tests for the CustomFeedSourceConverter type +let customFeedSourceConverterTests = testList "CustomFeedSourceConverter" [ + let opts = JsonSerializerSettings() + opts.Converters.Add(CustomFeedSourceConverter()) + test "succeeds when serializing" { + let after = JsonConvert.SerializeObject(Category (CategoryId "abc-123"), opts) + Expect.equal after "\"category:abc-123\"" "Custom feed source serialized incorrectly" + } + test "succeeds when deserializing" { + let after = JsonConvert.DeserializeObject("\"tag:testing\"", opts) + Expect.equal after (Tag "testing") "Custom feed source deserialized incorrectly" + } +] + +/// Unit tests for the ExplicitRating type +let explicitRatingConverterTests = testList "ExplicitRatingConverter" [ + let opts = JsonSerializerSettings() + opts.Converters.Add(ExplicitRatingConverter()) + test "succeeds when serializing" { + let after = JsonConvert.SerializeObject(Yes, opts) + Expect.equal after "\"yes\"" "Explicit rating serialized incorrectly" + } + test "succeeds when deserializing" { + let after = JsonConvert.DeserializeObject("\"clean\"", opts) + Expect.equal after Clean "Explicit rating deserialized incorrectly" + } +] + +/// Unit tests for the MarkupText type +let markupTextConverterTests = testList "MarkupTextConverter" [ + let opts = JsonSerializerSettings() + opts.Converters.Add(MarkupTextConverter()) + test "succeeds when serializing" { + let after = JsonConvert.SerializeObject(Html "

test

", opts) + Expect.equal after "\"HTML:

test

\"" "Markup text serialized incorrectly" + } + test "succeeds when deserializing" { + let after = JsonConvert.DeserializeObject("\"Markdown: #### test\"", opts) + Expect.equal after (Markdown "#### test") "Markup text deserialized incorrectly" + } +] + +/// Unit tests for the PermalinkConverter type +let permalinkConverterTests = testList "PermalinkConverter" [ + let opts = JsonSerializerSettings() + opts.Converters.Add(PermalinkConverter()) + test "succeeds when serializing" { + let after = JsonConvert.SerializeObject(Permalink "2022/test", opts) + Expect.equal after "\"2022/test\"" "Permalink serialized incorrectly" + } + test "succeeds when deserializing" { + let after = JsonConvert.DeserializeObject("\"2023/unit.html\"", opts) + Expect.equal after (Permalink "2023/unit.html") "Permalink deserialized incorrectly" + } +] + +/// Unit tests for the PageIdConverter type +let pageIdConverterTests = testList "PageIdConverter" [ + let opts = JsonSerializerSettings() + opts.Converters.Add(PageIdConverter()) + test "succeeds when serializing" { + let after = JsonConvert.SerializeObject(PageId "test-page", opts) + Expect.equal after "\"test-page\"" "Page ID serialized incorrectly" + } + test "succeeds when deserializing" { + let after = JsonConvert.DeserializeObject("\"page-test\"", opts) + Expect.equal after (PageId "page-test") "Page ID deserialized incorrectly" + } +] + +/// Unit tests for the PodcastMedium type +let podcastMediumConverterTests = testList "PodcastMediumConverter" [ + let opts = JsonSerializerSettings() + opts.Converters.Add(PodcastMediumConverter()) + test "succeeds when serializing" { + let after = JsonConvert.SerializeObject(Audiobook, opts) + Expect.equal after "\"audiobook\"" "Podcast medium serialized incorrectly" + } + test "succeeds when deserializing" { + let after = JsonConvert.DeserializeObject("\"newsletter\"", opts) + Expect.equal after Newsletter "Podcast medium deserialized incorrectly" + } +] + +/// Unit tests for the PostIdConverter type +let postIdConverterTests = testList "PostIdConverter" [ + let opts = JsonSerializerSettings() + opts.Converters.Add(PostIdConverter()) + test "succeeds when serializing" { + let after = JsonConvert.SerializeObject(PostId "test-post", opts) + Expect.equal after "\"test-post\"" "Post ID serialized incorrectly" + } + test "succeeds when deserializing" { + let after = JsonConvert.DeserializeObject("\"post-test\"", opts) + Expect.equal after (PostId "post-test") "Post ID deserialized incorrectly" + } +] + +/// Unit tests for the TagMapIdConverter type +let tagMapIdConverterTests = testList "TagMapIdConverter" [ + let opts = JsonSerializerSettings() + opts.Converters.Add(TagMapIdConverter()) + test "succeeds when serializing" { + let after = JsonConvert.SerializeObject(TagMapId "test-map", opts) + Expect.equal after "\"test-map\"" "Tag map ID serialized incorrectly" + } + test "succeeds when deserializing" { + let after = JsonConvert.DeserializeObject("\"map-test\"", opts) + Expect.equal after (TagMapId "map-test") "Tag map ID deserialized incorrectly" + } +] + +/// Unit tests for the ThemeAssetIdConverter type +let themeAssetIdConverterTests = testList "ThemeAssetIdConverter" [ + let opts = JsonSerializerSettings() + opts.Converters.Add(ThemeAssetIdConverter()) + test "succeeds when serializing" { + let after = JsonConvert.SerializeObject(ThemeAssetId (ThemeId "test", "unit.jpg"), opts) + Expect.equal after "\"test/unit.jpg\"" "Theme asset ID serialized incorrectly" + } + test "succeeds when deserializing" { + let after = JsonConvert.DeserializeObject("\"theme/test.png\"", opts) + Expect.equal after (ThemeAssetId (ThemeId "theme", "test.png")) "Theme asset ID deserialized incorrectly" + } +] + +/// Unit tests for the ThemeIdConverter type +let themeIdConverterTests = testList "ThemeIdConverter" [ + let opts = JsonSerializerSettings() + opts.Converters.Add(ThemeIdConverter()) + test "succeeds when serializing" { + let after = JsonConvert.SerializeObject(ThemeId "test-theme", opts) + Expect.equal after "\"test-theme\"" "Theme ID serialized incorrectly" + } + test "succeeds when deserializing" { + let after = JsonConvert.DeserializeObject("\"theme-test\"", opts) + Expect.equal after (ThemeId "theme-test") "Theme ID deserialized incorrectly" + } +] + +/// Unit tests for the UploadIdConverter type +let uploadIdConverterTests = testList "UploadIdConverter" [ + let opts = JsonSerializerSettings() + opts.Converters.Add(UploadIdConverter()) + test "succeeds when serializing" { + let after = JsonConvert.SerializeObject(UploadId "test-up", opts) + Expect.equal after "\"test-up\"" "Upload ID serialized incorrectly" + } + test "succeeds when deserializing" { + let after = JsonConvert.DeserializeObject("\"up-test\"", opts) + Expect.equal after (UploadId "up-test") "Upload ID deserialized incorrectly" + } +] + +/// Unit tests for the WebLogIdConverter type +let webLogIdConverterTests = testList "WebLogIdConverter" [ + let opts = JsonSerializerSettings() + opts.Converters.Add(WebLogIdConverter()) + test "succeeds when serializing" { + let after = JsonConvert.SerializeObject(WebLogId "test-web", opts) + Expect.equal after "\"test-web\"" "Web log ID serialized incorrectly" + } + test "succeeds when deserializing" { + let after = JsonConvert.DeserializeObject("\"web-test\"", opts) + Expect.equal after (WebLogId "web-test") "Web log ID deserialized incorrectly" + } +] + +/// Unit tests for the WebLogUserIdConverter type +let webLogUserIdConverterTests = testList "WebLogUserIdConverter" [ + let opts = JsonSerializerSettings() + opts.Converters.Add(WebLogUserIdConverter()) + test "succeeds when serializing" { + let after = JsonConvert.SerializeObject(WebLogUserId "test-user", opts) + Expect.equal after "\"test-user\"" "Web log user ID serialized incorrectly" + } + test "succeeds when deserializing" { + let after = JsonConvert.DeserializeObject("\"user-test\"", opts) + Expect.equal after (WebLogUserId "user-test") "Web log user ID deserialized incorrectly" + } +] + +open NodaTime.Serialization.JsonNet + +/// Unit tests for the Json.configure function +let configureTests = test "Json.configure succeeds" { + let has typ (converter: JsonConverter) = converter.GetType() = typ + let ser = configure (JsonSerializer.Create()) + Expect.hasCountOf ser.Converters 1u (has typeof) "Category ID converter not found" + Expect.hasCountOf ser.Converters 1u (has typeof) "Comment ID converter not found" + Expect.hasCountOf ser.Converters 1u (has typeof) "Comment status converter not found" + Expect.hasCountOf ser.Converters 1u (has typeof) "Custom feed ID converter not found" + Expect.hasCountOf ser.Converters 1u (has typeof) "Custom feed source converter not found" + Expect.hasCountOf ser.Converters 1u (has typeof) "Explicit rating converter not found" + Expect.hasCountOf ser.Converters 1u (has typeof) "Markup text converter not found" + Expect.hasCountOf ser.Converters 1u (has typeof) "Permalink converter not found" + Expect.hasCountOf ser.Converters 1u (has typeof) "Page ID converter not found" + Expect.hasCountOf ser.Converters 1u (has typeof) "Podcast medium converter not found" + Expect.hasCountOf ser.Converters 1u (has typeof) "Post ID converter not found" + Expect.hasCountOf ser.Converters 1u (has typeof) "Tag map ID converter not found" + Expect.hasCountOf ser.Converters 1u (has typeof) "Theme asset ID converter not found" + Expect.hasCountOf ser.Converters 1u (has typeof) "Theme ID converter not found" + Expect.hasCountOf ser.Converters 1u (has typeof) "Upload ID converter not found" + Expect.hasCountOf ser.Converters 1u (has typeof) "Web log ID converter not found" + Expect.hasCountOf ser.Converters 1u (has typeof) "Web log user ID converter not found" + Expect.hasCountOf ser.Converters 1u (has typeof) "F# type converter not found" + Expect.hasCountOf ser.Converters 1u (has (NodaConverters.InstantConverter.GetType())) "NodaTime converter not found" + Expect.equal ser.NullValueHandling NullValueHandling.Ignore "Null handling set incorrectly" + Expect.equal ser.MissingMemberHandling MissingMemberHandling.Ignore "Missing member handling set incorrectly" +} + +/// All tests for the Data.Converters file +let all = testList "Converters" [ + categoryIdConverterTests + commentIdConverterTests + commentStatusConverterTests + customFeedIdConverterTests + customFeedSourceConverterTests + explicitRatingConverterTests + markupTextConverterTests + permalinkConverterTests + pageIdConverterTests + podcastMediumConverterTests + postIdConverterTests + tagMapIdConverterTests + themeAssetIdConverterTests + themeIdConverterTests + uploadIdConverterTests + webLogIdConverterTests + webLogUserIdConverterTests + configureTests +] diff --git a/src/MyWebLog.Tests/Data/PageDataTests.fs b/src/MyWebLog.Tests/Data/PageDataTests.fs new file mode 100644 index 0000000..4d71130 --- /dev/null +++ b/src/MyWebLog.Tests/Data/PageDataTests.fs @@ -0,0 +1,267 @@ +/// +/// Integration tests for implementations +/// +module PageDataTests + +open System +open Expecto +open MyWebLog +open MyWebLog.Data +open NodaTime + +/// The ID of the root web log +let private rootId = CategoryDataTests.rootId + +/// The ID of the "A cool page" page +let coolPageId = PageId "hgc_BLEZ50SoAWLuPNISvA" + +/// The published and updated time of the "A cool page" page +let private coolPagePublished = Instant.FromDateTimeOffset(DateTimeOffset.Parse "2024-01-20T22:14:28Z") + +/// The ID of the "Yet Another Page" page +let private otherPageId = PageId "KouRjvSmm0Wz6TMD8xf67A" + +let ``Add succeeds`` (data: IData) = task { + let page = + { Id = PageId "added-page" + WebLogId = WebLogId "test" + AuthorId = WebLogUserId "the-author" + Title = "A New Page" + Permalink = Permalink "2024/the-page.htm" + PublishedOn = Noda.epoch + Duration.FromDays 3 + UpdatedOn = Noda.epoch + Duration.FromDays 3 + Duration.FromMinutes 2L + IsInPageList = true + Template = Some "new-page-template" + Text = "

A new page

" + Metadata = [ { Name = "Meta Item"; Value = "Meta Value" } ] + PriorPermalinks = [ Permalink "2024/the-new-page.htm" ] + Revisions = [ { AsOf = Noda.epoch + Duration.FromDays 3; Text = Html "

A new page

" } ] } + do! data.Page.Add page + let! stored = data.Page.FindFullById (PageId "added-page") (WebLogId "test") + Expect.isSome stored "The page should have been added" + let pg = stored.Value + Expect.equal pg.Id page.Id "ID not saved properly" + Expect.equal pg.WebLogId page.WebLogId "Web log ID not saved properly" + Expect.equal pg.AuthorId page.AuthorId "Author ID not saved properly" + Expect.equal pg.Title page.Title "Title not saved properly" + Expect.equal pg.Permalink page.Permalink "Permalink not saved properly" + Expect.equal pg.PublishedOn page.PublishedOn "Published On not saved properly" + Expect.equal pg.UpdatedOn page.UpdatedOn "Updated On not saved properly" + Expect.equal pg.IsInPageList page.IsInPageList "Is in page list flag not saved properly" + Expect.equal pg.Template page.Template "Template not saved properly" + Expect.equal pg.Text page.Text "Text not saved properly" + Expect.equal pg.Metadata page.Metadata "Metadata not saved properly" + Expect.equal pg.PriorPermalinks page.PriorPermalinks "Prior permalinks not saved properly" + Expect.equal pg.Revisions page.Revisions "Revisions not saved properly" +} + +let ``All succeeds`` (data: IData) = task { + let! pages = data.Page.All rootId + Expect.hasLength pages 2 "There should have been 2 pages retrieved" + pages |> List.iteri (fun idx pg -> + Expect.equal pg.Text "" $"Page {idx} should have had no text" + Expect.isEmpty pg.Metadata $"Page {idx} should have had no metadata" + Expect.isEmpty pg.Revisions $"Page {idx} should have had no revisions" + Expect.isEmpty pg.PriorPermalinks $"Page {idx} should have had no prior permalinks") + let! others = data.Page.All (WebLogId "not-there") + Expect.isEmpty others "There should not be pages retrieved" +} + +let ``CountAll succeeds`` (data: IData) = task { + let! pages = data.Page.CountAll rootId + Expect.equal pages 2 "There should have been 2 pages counted" +} + +let ``CountListed succeeds`` (data: IData) = task { + let! pages = data.Page.CountListed rootId + Expect.equal pages 1 "There should have been 1 page in the page list" +} + +let ``FindById succeeds when a page is found`` (data: IData) = task { + let! page = data.Page.FindById coolPageId rootId + Expect.isSome page "A page should have been returned" + let pg = page.Value + Expect.equal pg.Id coolPageId "The wrong page was retrieved" + Expect.equal pg.WebLogId rootId "The page's web log did not match the called parameter" + Expect.equal pg.AuthorId (WebLogUserId "5EM2rimH9kONpmd2zQkiVA") "Author ID is incorrect" + Expect.equal pg.Title "Page Title" "Title is incorrect" + Expect.equal pg.Permalink (Permalink "a-cool-page.html") "Permalink is incorrect" + Expect.equal pg.PublishedOn coolPagePublished "Published On is incorrect" + Expect.equal pg.UpdatedOn coolPagePublished "Updated On is incorrect" + Expect.isFalse pg.IsInPageList "Is in page list flag should not have been set" + Expect.equal pg.Text "

A Cool Page

\n

It really is cool!

\n" "Text is incorrect" + Expect.equal + pg.Metadata [ { Name = "Cool"; Value = "true" }; { Name = "Warm"; Value = "false" } ] "Metadata is incorrect" + Expect.isEmpty pg.Revisions "Revisions should not have been retrieved" + Expect.isEmpty pg.PriorPermalinks "Prior permalinks should not have been retrieved" +} + +let ``FindById succeeds when a page is not found (incorrect weblog)`` (data: IData) = task { + let! page = data.Page.FindById coolPageId (WebLogId "wrong") + Expect.isNone page "The page should not have been retrieved" +} + +let ``FindById succeeds when a page is not found (bad page ID)`` (data: IData) = task { + let! page = data.Page.FindById (PageId "missing") rootId + Expect.isNone page "The page should not have been retrieved" +} + +let ``FindByPermalink succeeds when a page is found`` (data: IData) = task { + let! page = data.Page.FindByPermalink (Permalink "a-cool-page.html") rootId + Expect.isSome page "A page should have been returned" + let pg = page.Value + Expect.equal pg.Id coolPageId "The wrong page was retrieved" + Expect.isEmpty pg.Revisions "Revisions should not have been retrieved" + Expect.isEmpty pg.PriorPermalinks "Prior permalinks should not have been retrieved" +} + +let ``FindByPermalink succeeds when a page is not found (incorrect weblog)`` (data: IData) = task { + let! page = data.Page.FindByPermalink (Permalink "a-cool-page.html") (WebLogId "wrong") + Expect.isNone page "The page should not have been retrieved" +} + +let ``FindByPermalink succeeds when a page is not found (no such permalink)`` (data: IData) = task { + let! page = data.Page.FindByPermalink (Permalink "1970/no-www-then.html") rootId + Expect.isNone page "The page should not have been retrieved" +} + +let ``FindCurrentPermalink succeeds when a page is found`` (data: IData) = task { + let! link = data.Page.FindCurrentPermalink [ Permalink "a-cool-pg.html"; Permalink "a-cool-pg.html/" ] rootId + Expect.isSome link "A permalink should have been returned" + Expect.equal link (Some (Permalink "a-cool-page.html")) "The wrong permalink was retrieved" +} + +let ``FindCurrentPermalink succeeds when a page is not found`` (data: IData) = task { + let! link = data.Page.FindCurrentPermalink [ Permalink "blah/"; Permalink "blah" ] rootId + Expect.isNone link "A permalink should not have been returned" +} + +let ``FindFullById succeeds when a page is found`` (data: IData) = task { + let! page = data.Page.FindFullById coolPageId rootId + Expect.isSome page "A page should have been returned" + let pg = page.Value + Expect.equal pg.Id coolPageId "The wrong page was retrieved" + Expect.equal pg.WebLogId rootId "The page's web log did not match the called parameter" + Expect.equal + pg.Revisions + [ { AsOf = coolPagePublished; Text = Markdown "# A Cool Page\n\nIt really is cool!" } ] + "Revisions are incorrect" + Expect.equal pg.PriorPermalinks [ Permalink "a-cool-pg.html" ] "Prior permalinks are incorrect" +} + +let ``FindFullById succeeds when a page is not found`` (data: IData) = task { + let! page = data.Page.FindFullById (PageId "not-there") rootId + Expect.isNone page "A page should not have been retrieved" +} + +let ``FindFullByWebLog succeeds when pages are found`` (data: IData) = task { + let! pages = data.Page.FindFullByWebLog rootId + Expect.hasLength pages 2 "There should have been 2 pages returned" + pages |> List.iter (fun pg -> + Expect.contains [ coolPageId; otherPageId ] pg.Id $"Page ID {pg.Id} unexpected" + if pg.Id = coolPageId then + Expect.isNonEmpty pg.Metadata "Metadata should have been retrieved" + Expect.isNonEmpty pg.PriorPermalinks "Prior permalinks should have been retrieved" + Expect.isNonEmpty pg.Revisions "Revisions should have been retrieved") +} + +let ``FindFullByWebLog succeeds when pages are not found`` (data: IData) = task { + let! pages = data.Page.FindFullByWebLog (WebLogId "does-not-exist") + Expect.isEmpty pages "No pages should have been retrieved" +} + +let ``FindListed succeeds when pages are found`` (data: IData) = task { + let! pages = data.Page.FindListed rootId + Expect.hasLength pages 1 "There should have been 1 page returned" + Expect.equal pages[0].Id otherPageId "An unexpected page was returned" + Expect.equal pages[0].Text "" "Text should not have been returned" + Expect.isEmpty pages[0].PriorPermalinks "Prior permalinks should not have been retrieved" + Expect.isEmpty pages[0].Revisions "Revisions should not have been retrieved" +} + +let ``FindListed succeeds when pages are not found`` (data: IData) = task { + let! pages = data.Page.FindListed (WebLogId "none") + Expect.isEmpty pages "No pages should have been retrieved" +} + +let ``FindPageOfPages succeeds when pages are found`` (data: IData) = task { + let! pages = data.Page.FindPageOfPages rootId 1 + Expect.hasLength pages 2 "There should have been 2 page returned" + Expect.equal pages[0].Id coolPageId "Pages not sorted correctly" + pages |> List.iteri (fun idx pg -> + Expect.notEqual pg.Text "" $"Text for page {idx} should have been retrieved" + Expect.isEmpty pg.Metadata $"Metadata for page {idx} should not have been retrieved" + Expect.isEmpty pg.PriorPermalinks $"Prior permalinks for page {idx} should not have been retrieved" + Expect.isEmpty pg.Revisions $"Revisions for page {idx} should not have been retrieved") +} + +let ``FindPageOfPages succeeds when pages are not found`` (data: IData) = task { + let! pages = data.Page.FindPageOfPages rootId 2 + Expect.isEmpty pages "No pages should have been retrieved" +} + +let ``Update succeeds when the page exists`` (data: IData) = task { + let! page = data.Page.FindFullById coolPageId rootId + Expect.isSome page "A page should have been returned" + do! data.Page.Update + { page.Value with + Title = "This Is Neat" + Permalink = Permalink "neat-page.html" + UpdatedOn = page.Value.PublishedOn + Duration.FromHours 5 + IsInPageList = true + Text = "

I have been updated" + Metadata = [ List.head page.Value.Metadata ] + PriorPermalinks = [ Permalink "a-cool-page.html" ] + Revisions = + { AsOf = page.Value.PublishedOn + Duration.FromHours 5; Text = Html "

I have been updated" } + :: page.Value.Revisions } + let! updated = data.Page.FindFullById coolPageId rootId + Expect.isSome updated "The updated page should have been returned" + let pg = updated.Value + Expect.equal pg.Title "This Is Neat" "Title is incorrect" + Expect.equal pg.Permalink (Permalink "neat-page.html") "Permalink is incorrect" + Expect.equal pg.PublishedOn coolPagePublished "Published On is incorrect" + Expect.equal pg.UpdatedOn (coolPagePublished + Duration.FromHours 5) "Updated On is incorrect" + Expect.isTrue pg.IsInPageList "Is in page list flag should have been set" + Expect.equal pg.Text "

I have been updated" "Text is incorrect" + Expect.equal pg.Metadata [ { Name = "Cool"; Value = "true" } ] "Metadata is incorrect" + Expect.equal pg.PriorPermalinks [ Permalink "a-cool-page.html" ] "Prior permalinks are incorrect" + Expect.equal + pg.Revisions + [ { AsOf = coolPagePublished + Duration.FromHours 5; Text = Html "

I have been updated" } + { AsOf = coolPagePublished; Text = Markdown "# A Cool Page\n\nIt really is cool!" } ] + "Revisions are incorrect" +} + +let ``Update succeeds when the page does not exist`` (data: IData) = task { + let pageId = PageId "missing-page" + do! data.Page.Update { Page.Empty with Id = pageId; WebLogId = rootId } + let! page = data.Page.FindById pageId rootId + Expect.isNone page "A page should not have been retrieved" +} + +let ``UpdatePriorPermalinks succeeds when the page exists`` (data: IData) = task { + let links = [ Permalink "link-1.html"; Permalink "link-1.aspx"; Permalink "link-3.php" ] + let! found = data.Page.UpdatePriorPermalinks otherPageId rootId links + Expect.isTrue found "The permalinks should have been updated" + let! page = data.Page.FindFullById otherPageId rootId + Expect.isSome page "The page should have been found" + Expect.equal page.Value.PriorPermalinks links "The prior permalinks were not correct" +} + +let ``UpdatePriorPermalinks succeeds when the page does not exist`` (data: IData) = task { + let! found = + data.Page.UpdatePriorPermalinks (PageId "no-page") WebLogId.Empty + [ Permalink "link-1.html"; Permalink "link-1.aspx"; Permalink "link-3.php" ] + Expect.isFalse found "The permalinks should not have been updated" +} + +let ``Delete succeeds when a page is deleted`` (data: IData) = task { + let! deleted = data.Page.Delete coolPageId rootId + Expect.isTrue deleted "The page should have been deleted" +} + +let ``Delete succeeds when a page is not deleted`` (data: IData) = task { + let! deleted = data.Page.Delete coolPageId rootId // this was deleted above + Expect.isFalse deleted "A page should not have been deleted" +} diff --git a/src/MyWebLog.Tests/Data/PostDataTests.fs b/src/MyWebLog.Tests/Data/PostDataTests.fs new file mode 100644 index 0000000..8fdffdb --- /dev/null +++ b/src/MyWebLog.Tests/Data/PostDataTests.fs @@ -0,0 +1,431 @@ +///

+/// Integration tests for implementations +/// +module PostDataTests + +open System +open Expecto +open MyWebLog +open MyWebLog.Data +open NodaTime + +/// The ID of the root web log +let private rootId = CategoryDataTests.rootId + +/// The ID of podcast episode 1 +let private episode1 = PostId "osxMfWGlAkyugUbJ1-xD1g" + +/// The published instant for episode 1 +let private episode1Published = Instant.FromDateTimeOffset(DateTimeOffset.Parse "2024-01-20T22:24:01Z") + +/// The ID of podcast episode 2 +let episode2 = PostId "l4_Eh4aFO06SqqJjOymNzA" + +/// The ID of "Something May Happen" post +let private something = PostId "QweKbWQiOkqqrjEdgP9wwg" + +/// The published instant for "Something May Happen" post +let private somethingPublished = Instant.FromDateTimeOffset(DateTimeOffset.Parse "2024-01-20T22:32:59Z") + +/// The ID of "An Incomplete Thought" post +let private incomplete = PostId "VweKbWQiOkqqrjEdgP9wwg" + +/// The ID of "Test Post 1" post +let private testPost1 = PostId "RCsCU2puYEmkpzotoi8p4g" + +/// The published instant for "Test Post 1" post +let private testPost1Published = Instant.FromDateTimeOffset(DateTimeOffset.Parse "2024-01-20T22:17:29Z") + +/// The category IDs for "Spitball" (parent) and "Moonshot" +let private testCatIds = [ CategoryId "jw6N69YtTEWVHAO33jHU-w"; CategoryId "ScVpyu1e7UiP7bDdge3ZEw" ] + +/// Ensure that a list of posts has text for each post +let private ensureHasText (posts: Post list) = + for post in posts do Expect.isNotEmpty post.Text $"Text should not be blank (post ID {post.Id})" + +/// Ensure that a list of posts has no revisions or prior permalinks +let private ensureEmpty posts = + for post in posts do + Expect.isEmpty post.Revisions $"There should have been no revisions (post ID {post.Id})" + Expect.isEmpty post.PriorPermalinks $"There should have been no prior permalinks (post ID {post.Id})" + +let ``Add succeeds`` (data: IData) = task { + let post = + { Id = PostId "a-new-post" + WebLogId = WebLogId "test" + AuthorId = WebLogUserId "test-author" + Status = Published + Title = "A New Test Post" + Permalink = Permalink "2020/test-post.html" + PublishedOn = Some (Noda.epoch + Duration.FromMinutes 1L) + UpdatedOn = Noda.epoch + Duration.FromMinutes 3L + Template = Some "fancy" + Text = "

Test text here" + CategoryIds = [ CategoryId "a"; CategoryId "b" ] + Tags = [ "x"; "y"; "zed" ] + Episode = Some { Episode.Empty with Media = "test-ep.mp3" } + Metadata = [ { Name = "Meta"; Value = "Data" } ] + PriorPermalinks = [ Permalink "2020/test-post-a.html" ] + Revisions = [ { AsOf = Noda.epoch + Duration.FromMinutes 1L; Text = Html "

Test text here" } ] } + do! data.Post.Add post + let! stored = data.Post.FindFullById post.Id post.WebLogId + Expect.isSome stored "The added post should have been retrieved" + let it = stored.Value + Expect.equal it.Id post.Id "ID not saved properly" + Expect.equal it.WebLogId post.WebLogId "Web log ID not saved properly" + Expect.equal it.AuthorId post.AuthorId "Author ID not saved properly" + Expect.equal it.Status post.Status "Status not saved properly" + Expect.equal it.Title post.Title "Title not saved properly" + Expect.equal it.Permalink post.Permalink "Permalink not saved properly" + Expect.equal it.PublishedOn post.PublishedOn "Published On not saved properly" + Expect.equal it.UpdatedOn post.UpdatedOn "Updated On not saved properly" + Expect.equal it.Template post.Template "Template not saved properly" + Expect.equal it.Text post.Text "Text not saved properly" + Expect.equal it.CategoryIds post.CategoryIds "Category IDs not saved properly" + Expect.equal it.Tags post.Tags "Tags not saved properly" + Expect.equal it.Episode post.Episode "Episode not saved properly" + Expect.equal it.Metadata post.Metadata "Metadata items not saved properly" + Expect.equal it.PriorPermalinks post.PriorPermalinks "Prior permalinks not saved properly" + Expect.equal it.Revisions post.Revisions "Revisions not saved properly" +} + +let ``CountByStatus succeeds`` (data: IData) = task { + let! count = data.Post.CountByStatus Published rootId + Expect.equal count 4 "There should be 4 published posts" +} + +let ``FindById succeeds when a post is found`` (data: IData) = task { + let! post = data.Post.FindById episode1 rootId + Expect.isSome post "There should have been a post returned" + let it = post.Value + Expect.equal it.Id episode1 "An incorrect post was retrieved" + Expect.equal it.WebLogId rootId "The post belongs to an incorrect web log" + Expect.equal it.AuthorId (WebLogUserId "5EM2rimH9kONpmd2zQkiVA") "Author ID is incorrect" + Expect.equal it.Status Published "Status is incorrect" + Expect.equal it.Title "Episode 1" "Title is incorrect" + Expect.equal it.Permalink (Permalink "2024/episode-1.html") "Permalink is incorrect" + Expect.equal it.PublishedOn (Some episode1Published) "Published On is incorrect" + Expect.equal it.UpdatedOn episode1Published "Updated On is incorrect" + Expect.equal it.Text "

It's the launch of my new podcast - y'all come listen!" "Text is incorrect" + Expect.equal it.CategoryIds [ CategoryId "S5JflPsJ9EG7gA2LD4m92A" ] "Category IDs are incorrect" + Expect.equal it.Tags [ "general"; "podcast" ] "Tags are incorrect" + Expect.isSome it.Episode "There should be an episode associated with this post" + let ep = it.Episode.Value + Expect.equal ep.Media "episode-1.mp3" "Episode media is incorrect" + Expect.equal ep.Length 124302L "Episode length is incorrect" + Expect.equal + ep.Duration (Some (Duration.FromMinutes 12L + Duration.FromSeconds 22L)) "Episode duration is incorrect" + Expect.equal ep.ImageUrl (Some "images/ep1-cover.png") "Episode image URL is incorrect" + Expect.equal ep.Subtitle (Some "An introduction to this podcast") "Episode subtitle is incorrect" + Expect.equal ep.Explicit (Some Clean) "Episode explicit rating is incorrect" + Expect.equal ep.ChapterFile (Some "uploads/chapters.json") "Episode chapter file is incorrect" + Expect.equal ep.TranscriptUrl (Some "uploads/transcript.srt") "Episode transcript URL is incorrect" + Expect.equal ep.TranscriptType (Some "application/srt") "Episode transcript type is incorrect" + Expect.equal ep.TranscriptLang (Some "en") "Episode transcript language is incorrect" + Expect.equal ep.TranscriptCaptions (Some true) "Episode transcript caption flag is incorrect" + Expect.equal ep.SeasonNumber (Some 1) "Episode season number is incorrect" + Expect.equal ep.SeasonDescription (Some "The First Season") "Episode season description is incorrect" + Expect.equal ep.EpisodeNumber (Some 1.) "Episode number is incorrect" + Expect.equal ep.EpisodeDescription (Some "The first episode ever!") "Episode description is incorrect" + Expect.equal + it.Metadata + [ { Name = "Density"; Value = "Non-existent" }; { Name = "Intensity"; Value = "Low" } ] + "Metadata is incorrect" + ensureEmpty [ it ] +} + +let ``FindById succeeds when a post is not found (incorrect weblog)`` (data: IData) = task { + let! post = data.Post.FindById episode1 (WebLogId "wrong") + Expect.isNone post "The post should not have been retrieved" +} + +let ``FindById succeeds when a post is not found (bad post ID)`` (data: IData) = task { + let! post = data.Post.FindById (PostId "absent") rootId + Expect.isNone post "The post should not have been retrieved" +} + +let ``FindByPermalink succeeds when a post is found`` (data: IData) = task { + let! post = data.Post.FindByPermalink (Permalink "2024/episode-1.html") rootId + Expect.isSome post "A post should have been returned" + let it = post.Value + Expect.equal it.Id episode1 "The wrong post was retrieved" + ensureEmpty [ it ] +} + +let ``FindByPermalink succeeds when a post is not found (incorrect weblog)`` (data: IData) = task { + let! post = data.Post.FindByPermalink (Permalink "2024/episode-1.html") (WebLogId "incorrect") + Expect.isNone post "The post should not have been retrieved" +} + +let ``FindByPermalink succeeds when a post is not found (no such permalink)`` (data: IData) = task { + let! post = data.Post.FindByPermalink (Permalink "404") rootId + Expect.isNone post "The post should not have been retrieved" +} + +let ``FindCurrentPermalink succeeds when a post is found`` (data: IData) = task { + let! link = data.Post.FindCurrentPermalink [ Permalink "2024/ep-1.html"; Permalink "2024/ep-1.html/" ] rootId + Expect.isSome link "A permalink should have been returned" + Expect.equal link (Some (Permalink "2024/episode-1.html")) "The wrong permalink was retrieved" +} + +let ``FindCurrentPermalink succeeds when a post is not found`` (data: IData) = task { + let! link = data.Post.FindCurrentPermalink [ Permalink "oops/"; Permalink "oops" ] rootId + Expect.isNone link "A permalink should not have been returned" +} + +let ``FindFullById succeeds when a post is found`` (data: IData) = task { + let! post = data.Post.FindFullById episode1 rootId + Expect.isSome post "A post should have been returned" + let it = post.Value + Expect.equal it.Id episode1 "The wrong post was retrieved" + Expect.equal it.WebLogId rootId "The post's web log did not match the called parameter" + Expect.equal + it.Revisions + [ { AsOf = episode1Published; Text = Html "

It's the launch of my new podcast - y'all come listen!" } ] + "Revisions are incorrect" + Expect.equal it.PriorPermalinks [ Permalink "2024/ep-1.html" ] "Prior permalinks are incorrect" +} + +let ``FindFullById succeeds when a post is not found`` (data: IData) = task { + let! post = data.Post.FindFullById (PostId "no-post") rootId + Expect.isNone post "A page should not have been retrieved" +} + +let ``FindFullByWebLog succeeds when posts are found`` (data: IData) = task { + let! posts = data.Post.FindFullByWebLog rootId + Expect.hasLength posts 5 "There should have been 5 posts returned" + let allPosts = [ testPost1; episode1; episode2; something; incomplete ] + posts |> List.iter (fun it -> + Expect.contains allPosts it.Id $"Post ID {it.Id} unexpected" + if it.Id = episode1 then + Expect.isNonEmpty it.Metadata "Metadata should have been retrieved" + Expect.isNonEmpty it.PriorPermalinks "Prior permalinks should have been retrieved" + Expect.isNonEmpty it.Revisions "Revisions should have been retrieved") +} + +let ``FindFullByWebLog succeeds when posts are not found`` (data: IData) = task { + let! posts = data.Post.FindFullByWebLog (WebLogId "nonexistent") + Expect.isEmpty posts "No posts should have been retrieved" +} + +let ``FindPageOfCategorizedPosts succeeds when posts are found`` (data: IData) = task { + let! posts = data.Post.FindPageOfCategorizedPosts rootId testCatIds 1 1 + Expect.hasLength posts 2 "There should be 2 posts returned" + Expect.equal posts[0].Id something "The wrong post was returned for page 1" + ensureEmpty posts + let! posts = data.Post.FindPageOfCategorizedPosts rootId testCatIds 2 1 + Expect.hasLength posts 1 "There should be 1 post returned" + Expect.equal posts[0].Id testPost1 "The wrong post was returned for page 2" + ensureEmpty posts +} + +let ``FindPageOfCategorizedPosts succeeds when finding a too-high page number`` (data: IData) = task { + let! posts = data.Post.FindPageOfCategorizedPosts rootId testCatIds 17 2 + Expect.hasLength posts 0 "There should have been no posts returned (not enough posts)" +} + +let ``FindPageOfCategorizedPosts succeeds when a category has no posts`` (data: IData) = task { + let! posts = data.Post.FindPageOfCategorizedPosts rootId [ CategoryId "nope" ] 1 1 + Expect.hasLength posts 0 "There should have been no posts returned (none match)" +} + +let ``FindPageOfPosts succeeds when posts are found`` (data: IData) = task { + let ensureNoText (posts: Post list) = + for post in posts do Expect.equal post.Text "" $"There should be no text (post ID {post.Id})" + let! posts = data.Post.FindPageOfPosts rootId 1 2 + Expect.hasLength posts 3 "There should have been 3 posts returned for page 1" + Expect.equal posts[0].Id incomplete "Page 1, post 1 is incorrect" + Expect.equal posts[1].Id something "Page 1, post 2 is incorrect" + Expect.equal posts[2].Id episode2 "Page 1, post 3 is incorrect" + ensureNoText posts + ensureEmpty posts + let! posts = data.Post.FindPageOfPosts rootId 2 2 + Expect.hasLength posts 3 "There should have been 3 posts returned for page 2" + Expect.equal posts[0].Id episode2 "Page 2, post 1 is incorrect" + Expect.equal posts[1].Id episode1 "Page 2, post 2 is incorrect" + Expect.equal posts[2].Id testPost1 "Page 2, post 3 is incorrect" + ensureNoText posts + ensureEmpty posts + let! posts = data.Post.FindPageOfPosts rootId 3 2 + Expect.hasLength posts 1 "There should have been 1 post returned for page 3" + Expect.equal posts[0].Id testPost1 "Page 3, post 1 is incorrect" + ensureNoText posts + ensureEmpty posts +} + +let ``FindPageOfPosts succeeds when finding a too-high page number`` (data: IData) = task { + let! posts = data.Post.FindPageOfPosts rootId 88 3 + Expect.isEmpty posts "There should have been no posts returned (not enough posts)" +} + +let ``FindPageOfPosts succeeds when there are no posts`` (data: IData) = task { + let! posts = data.Post.FindPageOfPosts (WebLogId "no-posts") 1 25 + Expect.isEmpty posts "There should have been no posts returned (no posts)" +} + +let ``FindPageOfPublishedPosts succeeds when posts are found`` (data: IData) = task { + let! posts = data.Post.FindPageOfPublishedPosts rootId 1 3 + Expect.hasLength posts 4 "There should have been 4 posts returned for page 1" + Expect.equal posts[0].Id something "Page 1, post 1 is incorrect" + Expect.equal posts[1].Id episode2 "Page 1, post 2 is incorrect" + Expect.equal posts[2].Id episode1 "Page 1, post 3 is incorrect" + Expect.equal posts[3].Id testPost1 "Page 1, post 4 is incorrect" + ensureHasText posts + ensureEmpty posts + let! posts = data.Post.FindPageOfPublishedPosts rootId 2 2 + Expect.hasLength posts 2 "There should have been 2 posts returned for page 2" + Expect.equal posts[0].Id episode1 "Page 2, post 1 is incorrect" + Expect.equal posts[1].Id testPost1 "Page 2, post 2 is incorrect" + ensureHasText posts + ensureEmpty posts +} + +let ``FindPageOfPublishedPosts succeeds when finding a too-high page number`` (data: IData) = task { + let! posts = data.Post.FindPageOfPublishedPosts rootId 7 22 + Expect.isEmpty posts "There should have been no posts returned (not enough posts)" +} + +let ``FindPageOfPublishedPosts succeeds when there are no posts`` (data: IData) = task { + let! posts = data.Post.FindPageOfPublishedPosts (WebLogId "empty") 1 8 + Expect.isEmpty posts "There should have been no posts returned (no posts)" +} + +let ``FindPageOfTaggedPosts succeeds when posts are found`` (data: IData) = task { + let! posts = data.Post.FindPageOfTaggedPosts rootId "f#" 1 1 + Expect.hasLength posts 2 "There should have been 2 posts returned" + Expect.equal posts[0].Id something "Page 1, post 1 is incorrect" + Expect.equal posts[1].Id testPost1 "Page 1, post 2 is incorrect" + ensureHasText posts + ensureEmpty posts + let! posts = data.Post.FindPageOfTaggedPosts rootId "f#" 2 1 + Expect.hasLength posts 1 "There should have been 1 posts returned" + Expect.equal posts[0].Id testPost1 "Page 2, post 1 is incorrect" + ensureHasText posts + ensureEmpty posts +} + +let ``FindPageOfTaggedPosts succeeds when posts are found (excluding drafts)`` (data: IData) = task { + let! posts = data.Post.FindPageOfTaggedPosts rootId "speculation" 1 10 + Expect.hasLength posts 1 "There should have been 1 post returned" + Expect.equal posts[0].Id something "Post 1 is incorrect" + ensureHasText posts + ensureEmpty posts +} + +let ``FindPageOfTaggedPosts succeeds when finding a too-high page number`` (data: IData) = task { + let! posts = data.Post.FindPageOfTaggedPosts rootId "f#" 436 18 + Expect.isEmpty posts "There should have been no posts returned (not enough posts)" +} + +let ``FindPageOfTaggedPosts succeeds when there are no posts`` (data: IData) = task { + let! posts = data.Post.FindPageOfTaggedPosts rootId "non-existent-tag" 1 8 + Expect.isEmpty posts "There should have been no posts returned (no posts)" +} + +let ``FindSurroundingPosts succeeds when there is no next newer post`` (data: IData) = task { + let! older, newer = data.Post.FindSurroundingPosts rootId somethingPublished + Expect.isSome older "There should have been an older post" + Expect.equal older.Value.Id episode2 "The next older post is incorrect" + ensureHasText [ older.Value ] + ensureEmpty [ older.Value ] + Expect.isNone newer "There should not have been a newer post" +} + +let ``FindSurroundingPosts succeeds when there is no next older post`` (data: IData) = task { + let! older, newer = data.Post.FindSurroundingPosts rootId testPost1Published + Expect.isNone older "There should not have been an older post" + Expect.isSome newer "There should have been a newer post" + Expect.equal newer.Value.Id episode1 "The next newer post is incorrect" + ensureHasText [ newer.Value ] + ensureEmpty [ newer.Value ] +} + +let ``FindSurroundingPosts succeeds when older and newer exist`` (data: IData) = task { + let! older, newer = data.Post.FindSurroundingPosts rootId episode1Published + Expect.isSome older "There should have been an older post" + Expect.equal older.Value.Id testPost1 "The next older post is incorrect" + Expect.isSome newer "There should have been a newer post" + Expect.equal newer.Value.Id episode2 "The next newer post is incorrect" + ensureHasText [ older.Value; newer.Value ] + ensureEmpty [ older.Value; newer.Value ] +} + +let ``Update succeeds when the post exists`` (data: IData) = task { + let! before = data.Post.FindFullById (PostId "a-new-post") (WebLogId "test") + Expect.isSome before "The post to be updated should have been found" + do! data.Post.Update + { before.Value with + AuthorId = WebLogUserId "someone-else" + Status = Draft + Title = "An Updated Test Post" + Permalink = Permalink "2021/updated-post.html" + PublishedOn = None + UpdatedOn = Noda.epoch + Duration.FromDays 4 + Template = Some "other" + Text = "

Updated text here" + CategoryIds = [ CategoryId "c"; CategoryId "d"; CategoryId "e" ] + Tags = [ "alpha"; "beta"; "nu"; "zeta" ] + Episode = None + Metadata = [ { Name = "Howdy"; Value = "Pardner" } ] + PriorPermalinks = Permalink "2020/test-post.html" :: before.Value.PriorPermalinks + Revisions = + { AsOf = Noda.epoch + Duration.FromDays 4; Text = Html "

Updated text here" } + :: before.Value.Revisions } + let! after = data.Post.FindFullById (PostId "a-new-post") (WebLogId "test") + Expect.isSome after "The updated post should have been found" + let post = after.Value + Expect.equal post.AuthorId (WebLogUserId "someone-else") "Updated author is incorrect" + Expect.equal post.Status Draft "Updated status is incorrect" + Expect.equal post.Title "An Updated Test Post" "Updated title is incorrect" + Expect.equal post.Permalink (Permalink "2021/updated-post.html") "Updated permalink is incorrect" + Expect.isNone post.PublishedOn "Updated post should not have had a published-on date/time" + Expect.equal post.UpdatedOn (Noda.epoch + Duration.FromDays 4) "Updated updated-on date/time is incorrect" + Expect.equal post.Template (Some "other") "Updated template is incorrect" + Expect.equal post.Text "

Updated text here" "Updated text is incorrect" + Expect.equal + post.CategoryIds [ CategoryId "c"; CategoryId "d"; CategoryId "e" ] "Updated category IDs are incorrect" + Expect.equal post.Tags [ "alpha"; "beta"; "nu"; "zeta" ] "Updated tags are incorrect" + Expect.isNone post.Episode "Update episode is incorrect" + Expect.equal post.Metadata [ { Name = "Howdy"; Value = "Pardner" } ] "Updated metadata is incorrect" + Expect.equal + post.PriorPermalinks + [ Permalink "2020/test-post.html"; Permalink "2020/test-post-a.html" ] + "Updated prior permalinks are incorrect" + Expect.equal + post.Revisions + [ { AsOf = Noda.epoch + Duration.FromDays 4; Text = Html "

Updated text here" } + { AsOf = Noda.epoch + Duration.FromMinutes 1L; Text = Html "

Test text here" } ] + "Updated revisions are incorrect" +} + +let ``Update succeeds when the post does not exist`` (data: IData) = task { + let postId = PostId "lost-post" + do! data.Post.Update { Post.Empty with Id = postId; WebLogId = rootId } + let! post = data.Post.FindById postId rootId + Expect.isNone post "A post should not have been retrieved" +} + +let ``UpdatePriorPermalinks succeeds when the post exists`` (data: IData) = task { + let links = [ Permalink "2024/ep-1.html"; Permalink "2023/ep-1.html" ] + let! found = data.Post.UpdatePriorPermalinks episode1 rootId links + Expect.isTrue found "The permalinks should have been updated" + let! post = data.Post.FindFullById episode1 rootId + Expect.isSome post "The post should have been found" + Expect.equal post.Value.PriorPermalinks links "The prior permalinks were not correct" +} + +let ``UpdatePriorPermalinks succeeds when the post does not exist`` (data: IData) = task { + let! found = + data.Post.UpdatePriorPermalinks (PostId "silence") WebLogId.Empty [ Permalink "a.html"; Permalink "b.html" ] + Expect.isFalse found "The permalinks should not have been updated" +} + +let ``Delete succeeds when a post is deleted`` (data: IData) = task { + let! deleted = data.Post.Delete episode2 rootId + Expect.isTrue deleted "The post should have been deleted" +} + +let ``Delete succeeds when a post is not deleted`` (data: IData) = task { + let! deleted = data.Post.Delete episode2 rootId // this was deleted above + Expect.isFalse deleted "A post should not have been deleted" +} diff --git a/src/MyWebLog.Tests/Data/PostgresDataTests.fs b/src/MyWebLog.Tests/Data/PostgresDataTests.fs new file mode 100644 index 0000000..690559f --- /dev/null +++ b/src/MyWebLog.Tests/Data/PostgresDataTests.fs @@ -0,0 +1,722 @@ +module PostgresDataTests + +open BitBadger.Documents.Postgres +open Expecto +open Microsoft.Extensions.Logging.Abstractions +open MyWebLog +open MyWebLog.Converters +open MyWebLog.Data +open Newtonsoft.Json +open Npgsql +open ThrowawayDb.Postgres + +/// JSON serializer +let private ser = Json.configure (JsonSerializer.CreateDefault()) + +/// The throwaway database (deleted when disposed) +let mutable private db: ThrowawayDatabase option = None + +/// Create a PostgresData instance for testing +let private mkData () = + PostgresData(NullLogger(), ser) :> IData + +/// The host for the PostgreSQL test database (defaults to localhost) +let private testHost = + RethinkDbDataTests.env "PG_HOST" "localhost" + +/// The database name for the PostgreSQL test database (defaults to postgres) +let private testDb = + RethinkDbDataTests.env "PG_DB" "postgres" + +/// The user ID for the PostgreSQL test database (defaults to postgres) +let private testUser = + RethinkDbDataTests.env "PG_USER" "postgres" + +/// The password for the PostgreSQL test database (defaults to postgres) +let private testPw = + RethinkDbDataTests.env "PG_PW" "postgres" + +/// Create a fresh environment from the root backup +let private freshEnvironment () = task { + if Option.isSome db then db.Value.Dispose() + db <- Some (ThrowawayDatabase.Create $"Host={testHost};Database={testDb};User ID={testUser};Password={testPw}") + let source = NpgsqlDataSourceBuilder db.Value.ConnectionString + let _ = source.UseNodaTime() + Configuration.useDataSource (source.Build()) + let env = mkData () + do! env.StartUp() + // This exercises Restore for all implementations; all tests are dependent on it working as expected + do! Maintenance.Backup.restoreBackup "root-weblog.json" None false false env +} + +/// Set up the environment for the PostgreSQL tests +let private environmentSetUp = testTask "creating database" { + do! freshEnvironment () +} + +/// Integration tests for the Category implementation in PostgreSQL +let private categoryTests = testList "Category" [ + testTask "Add succeeds" { + do! CategoryDataTests.``Add succeeds`` (mkData ()) + } + testList "CountAll" [ + testTask "succeeds when categories exist" { + do! CategoryDataTests.``CountAll succeeds when categories exist`` (mkData ()) + } + testTask "succeeds when categories do not exist" { + do! CategoryDataTests.``CountAll succeeds when categories do not exist`` (mkData ()) + } + ] + testList "CountTopLevel" [ + testTask "succeeds when top-level categories exist" { + do! CategoryDataTests.``CountTopLevel succeeds when top-level categories exist`` (mkData ()) + } + testTask "succeeds when no top-level categories exist" { + do! CategoryDataTests.``CountTopLevel succeeds when no top-level categories exist`` (mkData ()) + } + ] + testTask "FindAllForView succeeds" { + do! CategoryDataTests.``FindAllForView succeeds`` (mkData ()) + } + testList "FindById" [ + testTask "succeeds when a category is found" { + do! CategoryDataTests.``FindById succeeds when a category is found`` (mkData ()) + } + testTask "succeeds when a category is not found" { + do! CategoryDataTests.``FindById succeeds when a category is not found`` (mkData ()) + } + ] + testList "FindByWebLog" [ + testTask "succeeds when categories exist" { + do! CategoryDataTests.``FindByWebLog succeeds when categories exist`` (mkData ()) + } + testTask "succeeds when no categories exist" { + do! CategoryDataTests.``FindByWebLog succeeds when no categories exist`` (mkData ()) + } + ] + testTask "Update succeeds" { + do! CategoryDataTests.``Update succeeds`` (mkData ()) + } + testList "Delete" [ + testTask "succeeds when the category is deleted (no posts)" { + do! CategoryDataTests.``Delete succeeds when the category is deleted (no posts)`` (mkData ()) + } + testTask "succeeds when the category does not exist" { + do! CategoryDataTests.``Delete succeeds when the category does not exist`` (mkData ()) + } + testTask "succeeds when reassigning parent category to None" { + do! CategoryDataTests.``Delete succeeds when reassigning parent category to None`` (mkData ()) + } + testTask "succeeds when reassigning parent category to Some" { + do! CategoryDataTests.``Delete succeeds when reassigning parent category to Some`` (mkData ()) + } + testTask "succeeds and removes category from posts" { + do! CategoryDataTests.``Delete succeeds and removes category from posts`` (mkData ()) + } + ] +] + +/// Integration tests for the Page implementation in PostgreSQL +let private pageTests = testList "Page" [ + testTask "Add succeeds" { + do! PageDataTests.``Add succeeds`` (mkData ()) + } + testTask "All succeeds" { + do! PageDataTests.``All succeeds`` (mkData ()) + } + testTask "CountAll succeeds" { + do! PageDataTests.``CountAll succeeds`` (mkData ()) + } + testTask "CountListed succeeds" { + do! PageDataTests.``CountListed succeeds`` (mkData ()) + } + testList "FindById" [ + testTask "succeeds when a page is found" { + do! PageDataTests.``FindById succeeds when a page is found`` (mkData ()) + } + testTask "succeeds when a page is not found (incorrect weblog)" { + do! PageDataTests.``FindById succeeds when a page is not found (incorrect weblog)`` (mkData ()) + } + testTask "succeeds when a page is not found (bad page ID)" { + do! PageDataTests.``FindById succeeds when a page is not found (bad page ID)`` (mkData ()) + } + ] + testList "FindByPermalink" [ + testTask "succeeds when a page is found" { + do! PageDataTests.``FindByPermalink succeeds when a page is found`` (mkData ()) + } + testTask "succeeds when a page is not found (incorrect weblog)" { + do! PageDataTests.``FindByPermalink succeeds when a page is not found (incorrect weblog)`` (mkData ()) + } + testTask "succeeds when a page is not found (no such permalink)" { + do! PageDataTests.``FindByPermalink succeeds when a page is not found (no such permalink)`` (mkData ()) + } + ] + testList "FindCurrentPermalink" [ + testTask "succeeds when a page is found" { + do! PageDataTests.``FindCurrentPermalink succeeds when a page is found`` (mkData ()) + } + testTask "succeeds when a page is not found" { + do! PageDataTests.``FindCurrentPermalink succeeds when a page is not found`` (mkData ()) + } + ] + testList "FindFullById" [ + testTask "succeeds when a page is found" { + do! PageDataTests.``FindFullById succeeds when a page is found`` (mkData ()) + } + testTask "succeeds when a page is not found" { + do! PageDataTests.``FindFullById succeeds when a page is not found`` (mkData ()) + } + ] + testList "FindFullByWebLog" [ + testTask "succeeds when pages are found" { + do! PageDataTests.``FindFullByWebLog succeeds when pages are found`` (mkData ()) + } + testTask "succeeds when a pages are not found" { + do! PageDataTests.``FindFullByWebLog succeeds when pages are not found`` (mkData ()) + } + ] + testList "FindListed" [ + testTask "succeeds when pages are found" { + do! PageDataTests.``FindListed succeeds when pages are found`` (mkData ()) + } + testTask "succeeds when a pages are not found" { + do! PageDataTests.``FindListed succeeds when pages are not found`` (mkData ()) + } + ] + testList "FindPageOfPages" [ + testTask "succeeds when pages are found" { + do! PageDataTests.``FindPageOfPages succeeds when pages are found`` (mkData ()) + } + testTask "succeeds when a pages are not found" { + do! PageDataTests.``FindPageOfPages succeeds when pages are not found`` (mkData ()) + } + ] + testList "Update" [ + testTask "succeeds when the page exists" { + do! PageDataTests.``Update succeeds when the page exists`` (mkData ()) + } + testTask "succeeds when the page does not exist" { + do! PageDataTests.``Update succeeds when the page does not exist`` (mkData ()) + } + ] + testList "UpdatePriorPermalinks" [ + testTask "succeeds when the page exists" { + do! PageDataTests.``UpdatePriorPermalinks succeeds when the page exists`` (mkData ()) + } + testTask "succeeds when the page does not exist" { + do! PageDataTests.``UpdatePriorPermalinks succeeds when the page does not exist`` (mkData ()) + } + ] + testList "Delete" [ + testTask "succeeds when a page is deleted" { + do! PageDataTests.``Delete succeeds when a page is deleted`` (mkData ()) + let! revisions = + Custom.scalar + "SELECT COUNT(*) AS it FROM page_revision WHERE page_id = @id" + [ idParam PageDataTests.coolPageId ] + toCount + Expect.equal revisions 0 "All revisions for the page should have been deleted" + } + testTask "succeeds when a page is not deleted" { + do! PageDataTests.``Delete succeeds when a page is not deleted`` (mkData ()) + } + ] +] + +/// Integration tests for the Post implementation in PostgreSQL +let private postTests = testList "Post" [ + testTask "Add succeeds" { + // We'll need the root website categories restored for these tests + do! freshEnvironment () + do! PostDataTests.``Add succeeds`` (mkData ()) + } + testTask "CountByStatus succeeds" { + do! PostDataTests.``CountByStatus succeeds`` (mkData ()) + } + testList "FindById" [ + testTask "succeeds when a post is found" { + do! PostDataTests.``FindById succeeds when a post is found`` (mkData ()) + } + testTask "succeeds when a post is not found (incorrect weblog)" { + do! PostDataTests.``FindById succeeds when a post is not found (incorrect weblog)`` (mkData ()) + } + testTask "succeeds when a post is not found (bad post ID)" { + do! PostDataTests.``FindById succeeds when a post is not found (bad post ID)`` (mkData ()) + } + ] + testList "FindByPermalink" [ + testTask "succeeds when a post is found" { + do! PostDataTests.``FindByPermalink succeeds when a post is found`` (mkData ()) + } + testTask "succeeds when a post is not found (incorrect weblog)" { + do! PostDataTests.``FindByPermalink succeeds when a post is not found (incorrect weblog)`` (mkData ()) + } + testTask "succeeds when a post is not found (no such permalink)" { + do! PostDataTests.``FindByPermalink succeeds when a post is not found (no such permalink)`` (mkData ()) + } + ] + testList "FindCurrentPermalink" [ + testTask "succeeds when a post is found" { + do! PostDataTests.``FindCurrentPermalink succeeds when a post is found`` (mkData ()) + } + testTask "succeeds when a post is not found" { + do! PostDataTests.``FindCurrentPermalink succeeds when a post is not found`` (mkData ()) + } + ] + testList "FindFullById" [ + testTask "succeeds when a post is found" { + do! PostDataTests.``FindFullById succeeds when a post is found`` (mkData ()) + } + testTask "succeeds when a post is not found" { + do! PostDataTests.``FindFullById succeeds when a post is not found`` (mkData ()) + } + ] + testList "FindFullByWebLog" [ + testTask "succeeds when posts are found" { + do! PostDataTests.``FindFullByWebLog succeeds when posts are found`` (mkData ()) + } + testTask "succeeds when a posts are not found" { + do! PostDataTests.``FindFullByWebLog succeeds when posts are not found`` (mkData ()) + } + ] + testList "FindPageOfCategorizedPosts" [ + testTask "succeeds when posts are found" { + do! PostDataTests.``FindPageOfCategorizedPosts succeeds when posts are found`` (mkData ()) + } + testTask "succeeds when finding a too-high page number" { + do! PostDataTests.``FindPageOfCategorizedPosts succeeds when finding a too-high page number`` (mkData ()) + } + testTask "succeeds when a category has no posts" { + do! PostDataTests.``FindPageOfCategorizedPosts succeeds when a category has no posts`` (mkData ()) + } + ] + testList "FindPageOfPosts" [ + testTask "succeeds when posts are found" { + do! PostDataTests.``FindPageOfPosts succeeds when posts are found`` (mkData ()) + } + testTask "succeeds when finding a too-high page number" { + do! PostDataTests.``FindPageOfPosts succeeds when finding a too-high page number`` (mkData ()) + } + testTask "succeeds when there are no posts" { + do! PostDataTests.``FindPageOfPosts succeeds when there are no posts`` (mkData ()) + } + ] + testList "FindPageOfPublishedPosts" [ + testTask "succeeds when posts are found" { + do! PostDataTests.``FindPageOfPublishedPosts succeeds when posts are found`` (mkData ()) + } + testTask "succeeds when finding a too-high page number" { + do! PostDataTests.``FindPageOfPublishedPosts succeeds when finding a too-high page number`` (mkData ()) + } + testTask "succeeds when there are no posts" { + do! PostDataTests.``FindPageOfPublishedPosts succeeds when there are no posts`` (mkData ()) + } + ] + testList "FindPageOfTaggedPosts" [ + testTask "succeeds when posts are found" { + do! PostDataTests.``FindPageOfTaggedPosts succeeds when posts are found`` (mkData ()) + } + testTask "succeeds when posts are found (excluding drafts)" { + do! PostDataTests.``FindPageOfTaggedPosts succeeds when posts are found (excluding drafts)`` (mkData ()) + } + testTask "succeeds when finding a too-high page number" { + do! PostDataTests.``FindPageOfTaggedPosts succeeds when finding a too-high page number`` (mkData ()) + } + testTask "succeeds when there are no posts" { + do! PostDataTests.``FindPageOfTaggedPosts succeeds when there are no posts`` (mkData ()) + } + ] + testList "FindSurroundingPosts" [ + testTask "succeeds when there is no next newer post" { + do! PostDataTests.``FindSurroundingPosts succeeds when there is no next newer post`` (mkData ()) + } + testTask "succeeds when there is no next older post" { + do! PostDataTests.``FindSurroundingPosts succeeds when there is no next older post`` (mkData ()) + } + testTask "succeeds when older and newer exist" { + do! PostDataTests.``FindSurroundingPosts succeeds when older and newer exist`` (mkData ()) + } + ] + testList "Update" [ + testTask "succeeds when the post exists" { + do! PostDataTests.``Update succeeds when the post exists`` (mkData ()) + } + testTask "succeeds when the post does not exist" { + do! PostDataTests.``Update succeeds when the post does not exist`` (mkData ()) + } + ] + testList "UpdatePriorPermalinks" [ + testTask "succeeds when the post exists" { + do! PostDataTests.``UpdatePriorPermalinks succeeds when the post exists`` (mkData ()) + } + testTask "succeeds when the post does not exist" { + do! PostDataTests.``UpdatePriorPermalinks succeeds when the post does not exist`` (mkData ()) + } + ] + testList "Delete" [ + testTask "succeeds when a post is deleted" { + do! PostDataTests.``Delete succeeds when a post is deleted`` (mkData ()) + let! revisions = + Custom.scalar + "SELECT COUNT(*) AS it FROM post_revision WHERE post_id = @id" + [ idParam PostDataTests.episode2 ] + toCount + Expect.equal revisions 0 "All revisions for the post should have been deleted" + } + testTask "succeeds when a post is not deleted" { + do! PostDataTests.``Delete succeeds when a post is not deleted`` (mkData ()) + } + ] +] + +let private tagMapTests = testList "TagMap" [ + testList "FindById" [ + testTask "succeeds when a tag mapping is found" { + do! TagMapDataTests.``FindById succeeds when a tag mapping is found`` (mkData ()) + } + testTask "succeeds when a tag mapping is not found (incorrect weblog)" { + do! TagMapDataTests.``FindById succeeds when a tag mapping is not found (incorrect weblog)`` (mkData ()) + } + testTask "succeeds when a tag mapping is not found (bad tag map ID)" { + do! TagMapDataTests.``FindById succeeds when a tag mapping is not found (bad tag map ID)`` (mkData ()) + } + ] + testList "FindByUrlValue" [ + testTask "succeeds when a tag mapping is found" { + do! TagMapDataTests.``FindByUrlValue succeeds when a tag mapping is found`` (mkData ()) + } + testTask "succeeds when a tag mapping is not found (incorrect weblog)" { + do! TagMapDataTests.``FindByUrlValue succeeds when a tag mapping is not found (incorrect weblog)`` + (mkData ()) + } + testTask "succeeds when a tag mapping is not found (no such value)" { + do! TagMapDataTests.``FindByUrlValue succeeds when a tag mapping is not found (no such value)`` (mkData ()) + } + ] + testList "FindByWebLog" [ + testTask "succeeds when tag mappings are found" { + do! TagMapDataTests.``FindByWebLog succeeds when tag mappings are found`` (mkData ()) + } + testTask "succeeds when no tag mappings are found" { + do! TagMapDataTests.``FindByWebLog succeeds when no tag mappings are found`` (mkData ()) + } + ] + testList "FindMappingForTags" [ + testTask "succeeds when mappings exist" { + do! TagMapDataTests.``FindMappingForTags succeeds when mappings exist`` (mkData ()) + } + testTask "succeeds when no mappings exist" { + do! TagMapDataTests.``FindMappingForTags succeeds when no mappings exist`` (mkData ()) + } + ] + testList "Save" [ + testTask "succeeds when adding a tag mapping" { + do! TagMapDataTests.``Save succeeds when adding a tag mapping`` (mkData ()) + } + testTask "succeeds when updating a tag mapping" { + do! TagMapDataTests.``Save succeeds when updating a tag mapping`` (mkData ()) + } + ] + testList "Delete" [ + testTask "succeeds when a tag mapping is deleted" { + do! TagMapDataTests.``Delete succeeds when a tag mapping is deleted`` (mkData ()) + } + testTask "succeeds when a tag mapping is not deleted" { + do! TagMapDataTests.``Delete succeeds when a tag mapping is not deleted`` (mkData ()) + } + ] +] + +let private themeTests = testList "Theme" [ + testTask "All succeeds" { + do! ThemeDataTests.``All succeeds`` (mkData ()) + } + testList "Exists" [ + testTask "succeeds when the theme exists" { + do! ThemeDataTests.``Exists succeeds when the theme exists`` (mkData ()) + } + testTask "succeeds when the theme does not exist" { + do! ThemeDataTests.``Exists succeeds when the theme does not exist`` (mkData ()) + } + ] + testList "FindById" [ + testTask "succeeds when the theme exists" { + do! ThemeDataTests.``FindById succeeds when the theme exists`` (mkData ()) + } + testTask "succeeds when the theme does not exist" { + do! ThemeDataTests.``FindById succeeds when the theme does not exist`` (mkData ()) + } + ] + testList "FindByIdWithoutText" [ + testTask "succeeds when the theme exists" { + do! ThemeDataTests.``FindByIdWithoutText succeeds when the theme exists`` (mkData ()) + } + testTask "succeeds when the theme does not exist" { + do! ThemeDataTests.``FindByIdWithoutText succeeds when the theme does not exist`` (mkData ()) + } + ] + testList "Save" [ + testTask "succeeds when adding a theme" { + do! ThemeDataTests.``Save succeeds when adding a theme`` (mkData ()) + } + testTask "succeeds when updating a theme" { + do! ThemeDataTests.``Save succeeds when updating a theme`` (mkData ()) + } + ] + testList "Delete" [ + testTask "succeeds when a theme is deleted" { + do! ThemeDataTests.``Delete succeeds when a theme is deleted`` (mkData ()) + } + testTask "succeeds when a theme is not deleted" { + do! ThemeDataTests.``Delete succeeds when a theme is not deleted`` (mkData ()) + } + ] +] + +let private themeAssetTests = testList "ThemeAsset" [ + testList "Save" [ + testTask "succeeds when adding an asset" { + do! ThemeDataTests.Asset.``Save succeeds when adding an asset`` (mkData ()) + } + testTask "succeeds when updating an asset" { + do! ThemeDataTests.Asset.``Save succeeds when updating an asset`` (mkData ()) + } + ] + testTask "All succeeds" { + do! ThemeDataTests.Asset.``All succeeds`` (mkData ()) + } + testList "FindById" [ + testTask "succeeds when an asset is found" { + do! ThemeDataTests.Asset.``FindById succeeds when an asset is found`` (mkData ()) + } + testTask "succeeds when an asset is not found" { + do! ThemeDataTests.Asset.``FindById succeeds when an asset is not found`` (mkData ()) + } + ] + testList "FindByTheme" [ + testTask "succeeds when assets exist" { + do! ThemeDataTests.Asset.``FindByTheme succeeds when assets exist`` (mkData ()) + } + testTask "succeeds when assets do not exist" { + do! ThemeDataTests.Asset.``FindByTheme succeeds when assets do not exist`` (mkData ()) + } + ] + testList "FindByThemeWithData" [ + testTask "succeeds when assets exist" { + do! ThemeDataTests.Asset.``FindByThemeWithData succeeds when assets exist`` (mkData ()) + } + testTask "succeeds when assets do not exist" { + do! ThemeDataTests.Asset.``FindByThemeWithData succeeds when assets do not exist`` (mkData ()) + } + ] + testList "DeleteByTheme" [ + testTask "succeeds when assets are deleted" { + do! ThemeDataTests.Asset.``DeleteByTheme succeeds when assets are deleted`` (mkData ()) + } + testTask "succeeds when no assets are deleted" { + do! ThemeDataTests.Asset.``DeleteByTheme succeeds when no assets are deleted`` (mkData ()) + } + ] +] + +let private uploadTests = testList "Upload" [ + testTask "Add succeeds" { + do! UploadDataTests.``Add succeeds`` (mkData ()) + } + testList "FindByPath" [ + testTask "succeeds when an upload is found" { + do! UploadDataTests.``FindByPath succeeds when an upload is found`` (mkData ()) + } + testTask "succeeds when an upload is not found (incorrect weblog)" { + do! UploadDataTests.``FindByPath succeeds when an upload is not found (incorrect weblog)`` (mkData ()) + } + testTask "succeeds when an upload is not found (bad path)" { + do! UploadDataTests.``FindByPath succeeds when an upload is not found (bad path)`` (mkData ()) + } + ] + testList "FindByWebLog" [ + testTask "succeeds when uploads exist" { + do! UploadDataTests.``FindByWebLog succeeds when uploads exist`` (mkData ()) + } + testTask "succeeds when no uploads exist" { + do! UploadDataTests.``FindByWebLog succeeds when no uploads exist`` (mkData ()) + } + ] + testList "FindByWebLogWithData" [ + testTask "succeeds when uploads exist" { + do! UploadDataTests.``FindByWebLogWithData succeeds when uploads exist`` (mkData ()) + } + testTask "succeeds when no uploads exist" { + do! UploadDataTests.``FindByWebLogWithData succeeds when no uploads exist`` (mkData ()) + } + ] + testList "Delete" [ + testTask "succeeds when an upload is deleted" { + do! UploadDataTests.``Delete succeeds when an upload is deleted`` (mkData ()) + } + testTask "succeeds when an upload is not deleted" { + do! UploadDataTests.``Delete succeeds when an upload is not deleted`` (mkData ()) + } + ] +] + +let private webLogUserTests = testList "WebLogUser" [ + testTask "Add succeeds" { + // This restore ensures all the posts and pages exist + do! freshEnvironment () + do! WebLogUserDataTests.``Add succeeds`` (mkData ()) + } + testList "FindByEmail" [ + testTask "succeeds when a user is found" { + do! WebLogUserDataTests.``FindByEmail succeeds when a user is found`` (mkData ()) + } + testTask "succeeds when a user is not found (incorrect weblog)" { + do! WebLogUserDataTests.``FindByEmail succeeds when a user is not found (incorrect weblog)`` (mkData ()) + } + testTask "succeeds when a user is not found (bad email)" { + do! WebLogUserDataTests.``FindByEmail succeeds when a user is not found (bad email)`` (mkData ()) + } + ] + testList "FindById" [ + testTask "succeeds when a user is found" { + do! WebLogUserDataTests.``FindById succeeds when a user is found`` (mkData ()) + } + testTask "succeeds when a user is not found (incorrect weblog)" { + do! WebLogUserDataTests.``FindById succeeds when a user is not found (incorrect weblog)`` (mkData ()) + } + testTask "succeeds when a user is not found (bad ID)" { + do! WebLogUserDataTests.``FindById succeeds when a user is not found (bad ID)`` (mkData ()) + } + ] + testList "FindByWebLog" [ + testTask "succeeds when users exist" { + do! WebLogUserDataTests.``FindByWebLog succeeds when users exist`` (mkData ()) + } + testTask "succeeds when no users exist" { + do! WebLogUserDataTests.``FindByWebLog succeeds when no users exist`` (mkData ()) + } + ] + testList "FindNames" [ + testTask "succeeds when users exist" { + do! WebLogUserDataTests.``FindNames succeeds when users exist`` (mkData ()) + } + testTask "succeeds when users do not exist" { + do! WebLogUserDataTests.``FindNames succeeds when users do not exist`` (mkData ()) + } + ] + testList "SetLastSeen" [ + testTask "succeeds when the user exists" { + do! WebLogUserDataTests.``SetLastSeen succeeds when the user exists`` (mkData ()) + } + testTask "succeeds when the user does not exist" { + do! WebLogUserDataTests.``SetLastSeen succeeds when the user does not exist`` (mkData ()) + } + ] + testList "Update" [ + testTask "succeeds when the user exists" { + do! WebLogUserDataTests.``Update succeeds when the user exists`` (mkData ()) + } + testTask "succeeds when the user does not exist" { + do! WebLogUserDataTests.``Update succeeds when the user does not exist`` (mkData ()) + } + ] + testList "Delete" [ + testTask "fails when the user is the author of a page" { + do! WebLogUserDataTests.``Delete fails when the user is the author of a page`` (mkData ()) + } + testTask "fails when the user is the author of a post" { + do! WebLogUserDataTests.``Delete fails when the user is the author of a post`` (mkData ()) + } + testTask "succeeds when the user is not an author" { + do! WebLogUserDataTests.``Delete succeeds when the user is not an author`` (mkData ()) + } + testTask "succeeds when the user does not exist" { + do! WebLogUserDataTests.``Delete succeeds when the user does not exist`` (mkData ()) + } + ] +] + +let private webLogTests = testList "WebLog" [ + testTask "Add succeeds" { + do! WebLogDataTests.``Add succeeds`` (mkData ()) + } + testTask "All succeeds" { + do! WebLogDataTests.``All succeeds`` (mkData ()) + } + testList "FindByHost" [ + testTask "succeeds when a web log is found" { + do! WebLogDataTests.``FindByHost succeeds when a web log is found`` (mkData ()) + } + testTask "succeeds when a web log is not found" { + do! WebLogDataTests.``FindByHost succeeds when a web log is not found`` (mkData ()) + } + ] + testList "FindById" [ + testTask "succeeds when a web log is found" { + do! WebLogDataTests.``FindById succeeds when a web log is found`` (mkData ()) + } + testTask "succeeds when a web log is not found" { + do! WebLogDataTests.``FindById succeeds when a web log is not found`` (mkData ()) + } + ] + testList "UpdateRedirectRules" [ + testTask "succeeds when the web log exists" { + do! WebLogDataTests.``UpdateRedirectRules succeeds when the web log exists`` (mkData ()) + } + testTask "succeeds when the web log does not exist" { + do! WebLogDataTests.``UpdateRedirectRules succeeds when the web log does not exist`` (mkData ()) + } + ] + testList "UpdateRssOptions" [ + testTask "succeeds when the web log exists" { + do! WebLogDataTests.``UpdateRssOptions succeeds when the web log exists`` (mkData ()) + } + testTask "succeeds when the web log does not exist" { + do! WebLogDataTests.``UpdateRssOptions succeeds when the web log does not exist`` (mkData ()) + } + ] + testList "UpdateSettings" [ + testTask "succeeds when the web log exists" { + do! WebLogDataTests.``UpdateSettings succeeds when the web log exists`` (mkData ()) + } + testTask "succeeds when the web log does not exist" { + do! WebLogDataTests.``UpdateSettings succeeds when the web log does not exist`` (mkData ()) + } + ] + testList "Delete" [ + testTask "succeeds when the web log exists" { + do! WebLogDataTests.``Delete succeeds when the web log exists`` (mkData ()) + let! revisions = + Custom.scalar + "SELECT (SELECT COUNT(*) FROM page_revision) + (SELECT COUNT(*) FROM post_revision) AS it" + [] + toCount + Expect.equal revisions 0 "All revisions should be deleted" + } + testTask "succeeds when the web log does not exist" { + do! WebLogDataTests.``Delete succeeds when the web log does not exist`` (mkData ()) + } + ] +] + +/// Drop the throwaway PostgreSQL database +let private environmentCleanUp = test "Clean Up" { + if db.IsSome then db.Value.Dispose() +} + +/// All PostgreSQL data tests +let all = + testList "PostgresData" + [ environmentSetUp + categoryTests + pageTests + postTests + tagMapTests + themeTests + themeAssetTests + uploadTests + webLogUserTests + webLogTests + environmentCleanUp ] + |> testSequenced diff --git a/src/MyWebLog.Tests/Data/RethinkDbDataTests.fs b/src/MyWebLog.Tests/Data/RethinkDbDataTests.fs new file mode 100644 index 0000000..19cf1cf --- /dev/null +++ b/src/MyWebLog.Tests/Data/RethinkDbDataTests.fs @@ -0,0 +1,704 @@ +module RethinkDbDataTests + +open System +open Expecto +open Microsoft.Extensions.Logging.Abstractions +open MyWebLog +open MyWebLog.Converters +open MyWebLog.Data +open RethinkDb.Driver.FSharp +open RethinkDb.Driver.Net + +/// Get an environment variable, using the given value as the default if it is not set +let env name value = + match Environment.GetEnvironmentVariable $"MWL_TEST_{name}" with + | null -> value + | it when it.Trim() = "" -> value + | it -> it + + +/// The data configuration for the test database +let private dataCfg = + DataConfig.FromUri (env "RETHINK_URI" "rethinkdb://172.17.0.2/mwl_test") + +/// The active data instance to use for testing +let mutable private data: IData option = None + +/// Dispose the existing data +let private disposeData () = task { + if data.IsSome then + let conn = (data.Value :?> RethinkDbData).Conn + do! rethink { dbDrop dataCfg.Database; write; withRetryOnce; ignoreResult conn } + conn.Dispose() + data <- None +} + +/// Create a new data implementation instance +let private newData () = + let log = NullLogger() + let conn = dataCfg.CreateConnection log + RethinkDbData(conn, dataCfg, log) + +/// Create a fresh environment from the root backup +let private freshEnvironment () = task { + do! disposeData () + data <- Some (newData ()) + do! data.Value.StartUp() + // This exercises Restore for all implementations; all tests are dependent on it working as expected + do! Maintenance.Backup.restoreBackup "root-weblog.json" None false false data.Value +} + +/// Set up the environment for the RethinkDB tests +let private environmentSetUp = testTask "creating database" { + let _ = Json.configure Converter.Serializer + do! freshEnvironment () +} + +/// Integration tests for the Category implementation in RethinkDB +let private categoryTests = testList "Category" [ + testTask "Add succeeds" { + do! CategoryDataTests.``Add succeeds`` data.Value + } + testList "CountAll" [ + testTask "succeeds when categories exist" { + do! CategoryDataTests.``CountAll succeeds when categories exist`` data.Value + } + testTask "succeeds when categories do not exist" { + do! CategoryDataTests.``CountAll succeeds when categories do not exist`` data.Value + } + ] + testList "CountTopLevel" [ + testTask "succeeds when top-level categories exist" { + do! CategoryDataTests.``CountTopLevel succeeds when top-level categories exist`` data.Value + } + testTask "succeeds when no top-level categories exist" { + do! CategoryDataTests.``CountTopLevel succeeds when no top-level categories exist`` data.Value + } + ] + testTask "FindAllForView succeeds" { + do! CategoryDataTests.``FindAllForView succeeds`` data.Value + } + testList "FindById" [ + testTask "succeeds when a category is found" { + do! CategoryDataTests.``FindById succeeds when a category is found`` data.Value + } + testTask "succeeds when a category is not found" { + do! CategoryDataTests.``FindById succeeds when a category is not found`` data.Value + } + ] + testList "FindByWebLog" [ + testTask "succeeds when categories exist" { + do! CategoryDataTests.``FindByWebLog succeeds when categories exist`` data.Value + } + testTask "succeeds when no categories exist" { + do! CategoryDataTests.``FindByWebLog succeeds when no categories exist`` data.Value + } + ] + testTask "Update succeeds" { + do! CategoryDataTests.``Update succeeds`` data.Value + } + testList "Delete" [ + testTask "succeeds when the category is deleted (no posts)" { + do! CategoryDataTests.``Delete succeeds when the category is deleted (no posts)`` data.Value + } + testTask "succeeds when the category does not exist" { + do! CategoryDataTests.``Delete succeeds when the category does not exist`` data.Value + } + testTask "succeeds when reassigning parent category to None" { + do! CategoryDataTests.``Delete succeeds when reassigning parent category to None`` data.Value + } + testTask "succeeds when reassigning parent category to Some" { + do! CategoryDataTests.``Delete succeeds when reassigning parent category to Some`` data.Value + } + testTask "succeeds and removes category from posts" { + do! CategoryDataTests.``Delete succeeds and removes category from posts`` data.Value + } + ] +] + +/// Integration tests for the Page implementation in RethinkDB +let private pageTests = testList "Page" [ + testTask "Add succeeds" { + do! PageDataTests.``Add succeeds`` data.Value + } + testTask "All succeeds" { + do! PageDataTests.``All succeeds`` data.Value + } + testTask "CountAll succeeds" { + do! PageDataTests.``CountAll succeeds`` data.Value + } + testTask "CountListed succeeds" { + do! PageDataTests.``CountListed succeeds`` data.Value + } + testList "FindById" [ + testTask "succeeds when a page is found" { + do! PageDataTests.``FindById succeeds when a page is found`` data.Value + } + testTask "succeeds when a page is not found (incorrect weblog)" { + do! PageDataTests.``FindById succeeds when a page is not found (incorrect weblog)`` data.Value + } + testTask "succeeds when a page is not found (bad page ID)" { + do! PageDataTests.``FindById succeeds when a page is not found (bad page ID)`` data.Value + } + ] + testList "FindByPermalink" [ + testTask "succeeds when a page is found" { + do! PageDataTests.``FindByPermalink succeeds when a page is found`` data.Value + } + testTask "succeeds when a page is not found (incorrect weblog)" { + do! PageDataTests.``FindByPermalink succeeds when a page is not found (incorrect weblog)`` data.Value + } + testTask "succeeds when a page is not found (no such permalink)" { + do! PageDataTests.``FindByPermalink succeeds when a page is not found (no such permalink)`` data.Value + } + ] + testList "FindCurrentPermalink" [ + testTask "succeeds when a page is found" { + do! PageDataTests.``FindCurrentPermalink succeeds when a page is found`` data.Value + } + testTask "succeeds when a page is not found" { + do! PageDataTests.``FindCurrentPermalink succeeds when a page is not found`` data.Value + } + ] + testList "FindFullById" [ + testTask "succeeds when a page is found" { + do! PageDataTests.``FindFullById succeeds when a page is found`` data.Value + } + testTask "succeeds when a page is not found" { + do! PageDataTests.``FindFullById succeeds when a page is not found`` data.Value + } + ] + testList "FindFullByWebLog" [ + testTask "succeeds when pages are found" { + do! PageDataTests.``FindFullByWebLog succeeds when pages are found`` data.Value + } + testTask "succeeds when a pages are not found" { + do! PageDataTests.``FindFullByWebLog succeeds when pages are not found`` data.Value + } + ] + testList "FindListed" [ + testTask "succeeds when pages are found" { + do! PageDataTests.``FindListed succeeds when pages are found`` data.Value + } + testTask "succeeds when a pages are not found" { + do! PageDataTests.``FindListed succeeds when pages are not found`` data.Value + } + ] + testList "FindPageOfPages" [ + testTask "succeeds when pages are found" { + do! PageDataTests.``FindPageOfPages succeeds when pages are found`` data.Value + } + testTask "succeeds when a pages are not found" { + do! PageDataTests.``FindPageOfPages succeeds when pages are not found`` data.Value + } + ] + testList "Update" [ + testTask "succeeds when the page exists" { + do! PageDataTests.``Update succeeds when the page exists`` data.Value + } + testTask "succeeds when the page does not exist" { + do! PageDataTests.``Update succeeds when the page does not exist`` data.Value + } + ] + testList "UpdatePriorPermalinks" [ + testTask "succeeds when the page exists" { + do! PageDataTests.``UpdatePriorPermalinks succeeds when the page exists`` data.Value + } + testTask "succeeds when the page does not exist" { + do! PageDataTests.``UpdatePriorPermalinks succeeds when the page does not exist`` data.Value + } + ] + testList "Delete" [ + testTask "succeeds when a page is deleted" { + do! PageDataTests.``Delete succeeds when a page is deleted`` data.Value + } + testTask "succeeds when a page is not deleted" { + do! PageDataTests.``Delete succeeds when a page is not deleted`` data.Value + } + ] +] + +/// Integration tests for the Post implementation in RethinkDB +let private postTests = testList "Post" [ + testTask "Add succeeds" { + // We'll need the root website categories restored for these tests + do! freshEnvironment () + do! PostDataTests.``Add succeeds`` data.Value + } + testTask "CountByStatus succeeds" { + do! PostDataTests.``CountByStatus succeeds`` data.Value + } + testList "FindById" [ + testTask "succeeds when a post is found" { + do! PostDataTests.``FindById succeeds when a post is found`` data.Value + } + testTask "succeeds when a post is not found (incorrect weblog)" { + do! PostDataTests.``FindById succeeds when a post is not found (incorrect weblog)`` data.Value + } + testTask "succeeds when a post is not found (bad post ID)" { + do! PostDataTests.``FindById succeeds when a post is not found (bad post ID)`` data.Value + } + ] + testList "FindByPermalink" [ + testTask "succeeds when a post is found" { + do! PostDataTests.``FindByPermalink succeeds when a post is found`` data.Value + } + testTask "succeeds when a post is not found (incorrect weblog)" { + do! PostDataTests.``FindByPermalink succeeds when a post is not found (incorrect weblog)`` data.Value + } + testTask "succeeds when a post is not found (no such permalink)" { + do! PostDataTests.``FindByPermalink succeeds when a post is not found (no such permalink)`` data.Value + } + ] + testList "FindCurrentPermalink" [ + testTask "succeeds when a post is found" { + do! PostDataTests.``FindCurrentPermalink succeeds when a post is found`` data.Value + } + testTask "succeeds when a post is not found" { + do! PostDataTests.``FindCurrentPermalink succeeds when a post is not found`` data.Value + } + ] + testList "FindFullById" [ + testTask "succeeds when a post is found" { + do! PostDataTests.``FindFullById succeeds when a post is found`` data.Value + } + testTask "succeeds when a post is not found" { + do! PostDataTests.``FindFullById succeeds when a post is not found`` data.Value + } + ] + testList "FindFullByWebLog" [ + testTask "succeeds when posts are found" { + do! PostDataTests.``FindFullByWebLog succeeds when posts are found`` data.Value + } + testTask "succeeds when a posts are not found" { + do! PostDataTests.``FindFullByWebLog succeeds when posts are not found`` data.Value + } + ] + testList "FindPageOfCategorizedPosts" [ + testTask "succeeds when posts are found" { + do! PostDataTests.``FindPageOfCategorizedPosts succeeds when posts are found`` data.Value + } + testTask "succeeds when finding a too-high page number" { + do! PostDataTests.``FindPageOfCategorizedPosts succeeds when finding a too-high page number`` data.Value + } + testTask "succeeds when a category has no posts" { + do! PostDataTests.``FindPageOfCategorizedPosts succeeds when a category has no posts`` data.Value + } + ] + testList "FindPageOfPosts" [ + testTask "succeeds when posts are found" { + do! PostDataTests.``FindPageOfPosts succeeds when posts are found`` data.Value + } + testTask "succeeds when finding a too-high page number" { + do! PostDataTests.``FindPageOfPosts succeeds when finding a too-high page number`` data.Value + } + testTask "succeeds when there are no posts" { + do! PostDataTests.``FindPageOfPosts succeeds when there are no posts`` data.Value + } + ] + testList "FindPageOfPublishedPosts" [ + testTask "succeeds when posts are found" { + do! PostDataTests.``FindPageOfPublishedPosts succeeds when posts are found`` data.Value + } + testTask "succeeds when finding a too-high page number" { + do! PostDataTests.``FindPageOfPublishedPosts succeeds when finding a too-high page number`` data.Value + } + testTask "succeeds when there are no posts" { + do! PostDataTests.``FindPageOfPublishedPosts succeeds when there are no posts`` data.Value + } + ] + testList "FindPageOfTaggedPosts" [ + testTask "succeeds when posts are found" { + do! PostDataTests.``FindPageOfTaggedPosts succeeds when posts are found`` data.Value + } + testTask "succeeds when posts are found (excluding drafts)" { + do! PostDataTests.``FindPageOfTaggedPosts succeeds when posts are found (excluding drafts)`` data.Value + } + testTask "succeeds when finding a too-high page number" { + do! PostDataTests.``FindPageOfTaggedPosts succeeds when finding a too-high page number`` data.Value + } + testTask "succeeds when there are no posts" { + do! PostDataTests.``FindPageOfTaggedPosts succeeds when there are no posts`` data.Value + } + ] + testList "FindSurroundingPosts" [ + testTask "succeeds when there is no next newer post" { + do! PostDataTests.``FindSurroundingPosts succeeds when there is no next newer post`` data.Value + } + testTask "succeeds when there is no next older post" { + do! PostDataTests.``FindSurroundingPosts succeeds when there is no next older post`` data.Value + } + testTask "succeeds when older and newer exist" { + do! PostDataTests.``FindSurroundingPosts succeeds when older and newer exist`` data.Value + } + ] + testList "Update" [ + testTask "succeeds when the post exists" { + do! PostDataTests.``Update succeeds when the post exists`` data.Value + } + testTask "succeeds when the post does not exist" { + do! PostDataTests.``Update succeeds when the post does not exist`` data.Value + } + ] + testList "UpdatePriorPermalinks" [ + testTask "succeeds when the post exists" { + do! PostDataTests.``UpdatePriorPermalinks succeeds when the post exists`` data.Value + } + testTask "succeeds when the post does not exist" { + do! PostDataTests.``UpdatePriorPermalinks succeeds when the post does not exist`` data.Value + } + ] + testList "Delete" [ + testTask "succeeds when a post is deleted" { + do! PostDataTests.``Delete succeeds when a post is deleted`` data.Value + } + testTask "succeeds when a post is not deleted" { + do! PostDataTests.``Delete succeeds when a post is not deleted`` data.Value + } + ] +] + +let private tagMapTests = testList "TagMap" [ + testList "FindById" [ + testTask "succeeds when a tag mapping is found" { + do! TagMapDataTests.``FindById succeeds when a tag mapping is found`` data.Value + } + testTask "succeeds when a tag mapping is not found (incorrect weblog)" { + do! TagMapDataTests.``FindById succeeds when a tag mapping is not found (incorrect weblog)`` data.Value + } + testTask "succeeds when a tag mapping is not found (bad tag map ID)" { + do! TagMapDataTests.``FindById succeeds when a tag mapping is not found (bad tag map ID)`` data.Value + } + ] + testList "FindByUrlValue" [ + testTask "succeeds when a tag mapping is found" { + do! TagMapDataTests.``FindByUrlValue succeeds when a tag mapping is found`` data.Value + } + testTask "succeeds when a tag mapping is not found (incorrect weblog)" { + do! TagMapDataTests.``FindByUrlValue succeeds when a tag mapping is not found (incorrect weblog)`` + data.Value + } + testTask "succeeds when a tag mapping is not found (no such value)" { + do! TagMapDataTests.``FindByUrlValue succeeds when a tag mapping is not found (no such value)`` data.Value + } + ] + testList "FindByWebLog" [ + testTask "succeeds when tag mappings are found" { + do! TagMapDataTests.``FindByWebLog succeeds when tag mappings are found`` data.Value + } + testTask "succeeds when no tag mappings are found" { + do! TagMapDataTests.``FindByWebLog succeeds when no tag mappings are found`` data.Value + } + ] + testList "FindMappingForTags" [ + testTask "succeeds when mappings exist" { + do! TagMapDataTests.``FindMappingForTags succeeds when mappings exist`` data.Value + } + testTask "succeeds when no mappings exist" { + do! TagMapDataTests.``FindMappingForTags succeeds when no mappings exist`` data.Value + } + ] + testList "Save" [ + testTask "succeeds when adding a tag mapping" { + do! TagMapDataTests.``Save succeeds when adding a tag mapping`` data.Value + } + testTask "succeeds when updating a tag mapping" { + do! TagMapDataTests.``Save succeeds when updating a tag mapping`` data.Value + } + ] + testList "Delete" [ + testTask "succeeds when a tag mapping is deleted" { + do! TagMapDataTests.``Delete succeeds when a tag mapping is deleted`` data.Value + } + testTask "succeeds when a tag mapping is not deleted" { + do! TagMapDataTests.``Delete succeeds when a tag mapping is not deleted`` data.Value + } + ] +] + +let private themeTests = testList "Theme" [ + testTask "All succeeds" { + do! ThemeDataTests.``All succeeds`` data.Value + } + testList "Exists" [ + testTask "succeeds when the theme exists" { + do! ThemeDataTests.``Exists succeeds when the theme exists`` data.Value + } + testTask "succeeds when the theme does not exist" { + do! ThemeDataTests.``Exists succeeds when the theme does not exist`` data.Value + } + ] + testList "FindById" [ + testTask "succeeds when the theme exists" { + do! ThemeDataTests.``FindById succeeds when the theme exists`` data.Value + } + testTask "succeeds when the theme does not exist" { + do! ThemeDataTests.``FindById succeeds when the theme does not exist`` data.Value + } + ] + testList "FindByIdWithoutText" [ + testTask "succeeds when the theme exists" { + do! ThemeDataTests.``FindByIdWithoutText succeeds when the theme exists`` data.Value + } + testTask "succeeds when the theme does not exist" { + do! ThemeDataTests.``FindByIdWithoutText succeeds when the theme does not exist`` data.Value + } + ] + testList "Save" [ + testTask "succeeds when adding a theme" { + do! ThemeDataTests.``Save succeeds when adding a theme`` data.Value + } + testTask "succeeds when updating a theme" { + do! ThemeDataTests.``Save succeeds when updating a theme`` data.Value + } + ] + testList "Delete" [ + testTask "succeeds when a theme is deleted" { + do! ThemeDataTests.``Delete succeeds when a theme is deleted`` data.Value + } + testTask "succeeds when a theme is not deleted" { + do! ThemeDataTests.``Delete succeeds when a theme is not deleted`` data.Value + } + ] +] + +let private themeAssetTests = testList "ThemeAsset" [ + testList "Save" [ + testTask "succeeds when adding an asset" { + do! ThemeDataTests.Asset.``Save succeeds when adding an asset`` data.Value + } + testTask "succeeds when updating an asset" { + do! ThemeDataTests.Asset.``Save succeeds when updating an asset`` data.Value + } + ] + testTask "All succeeds" { + do! ThemeDataTests.Asset.``All succeeds`` data.Value + } + testList "FindById" [ + testTask "succeeds when an asset is found" { + do! ThemeDataTests.Asset.``FindById succeeds when an asset is found`` data.Value + } + testTask "succeeds when an asset is not found" { + do! ThemeDataTests.Asset.``FindById succeeds when an asset is not found`` data.Value + } + ] + testList "FindByTheme" [ + testTask "succeeds when assets exist" { + do! ThemeDataTests.Asset.``FindByTheme succeeds when assets exist`` data.Value + } + testTask "succeeds when assets do not exist" { + do! ThemeDataTests.Asset.``FindByTheme succeeds when assets do not exist`` data.Value + } + ] + testList "FindByThemeWithData" [ + testTask "succeeds when assets exist" { + do! ThemeDataTests.Asset.``FindByThemeWithData succeeds when assets exist`` data.Value + } + testTask "succeeds when assets do not exist" { + do! ThemeDataTests.Asset.``FindByThemeWithData succeeds when assets do not exist`` data.Value + } + ] + testList "DeleteByTheme" [ + testTask "succeeds when assets are deleted" { + do! ThemeDataTests.Asset.``DeleteByTheme succeeds when assets are deleted`` data.Value + } + testTask "succeeds when no assets are deleted" { + do! ThemeDataTests.Asset.``DeleteByTheme succeeds when no assets are deleted`` data.Value + } + ] +] + +let private uploadTests = testList "Upload" [ + testTask "Add succeeds" { + do! UploadDataTests.``Add succeeds`` data.Value + } + testList "FindByPath" [ + testTask "succeeds when an upload is found" { + do! UploadDataTests.``FindByPath succeeds when an upload is found`` data.Value + } + testTask "succeeds when an upload is not found (incorrect weblog)" { + do! UploadDataTests.``FindByPath succeeds when an upload is not found (incorrect weblog)`` data.Value + } + testTask "succeeds when an upload is not found (bad path)" { + do! UploadDataTests.``FindByPath succeeds when an upload is not found (bad path)`` data.Value + } + ] + testList "FindByWebLog" [ + testTask "succeeds when uploads exist" { + do! UploadDataTests.``FindByWebLog succeeds when uploads exist`` data.Value + } + testTask "succeeds when no uploads exist" { + do! UploadDataTests.``FindByWebLog succeeds when no uploads exist`` data.Value + } + ] + testList "FindByWebLogWithData" [ + testTask "succeeds when uploads exist" { + do! UploadDataTests.``FindByWebLogWithData succeeds when uploads exist`` data.Value + } + testTask "succeeds when no uploads exist" { + do! UploadDataTests.``FindByWebLogWithData succeeds when no uploads exist`` data.Value + } + ] + testList "Delete" [ + testTask "succeeds when an upload is deleted" { + do! UploadDataTests.``Delete succeeds when an upload is deleted`` data.Value + } + testTask "succeeds when an upload is not deleted" { + do! UploadDataTests.``Delete succeeds when an upload is not deleted`` data.Value + } + ] +] + +let private webLogUserTests = testList "WebLogUser" [ + testTask "Add succeeds" { + // This restore ensures all the posts and pages exist + do! freshEnvironment () + do! WebLogUserDataTests.``Add succeeds`` data.Value + } + testList "FindByEmail" [ + testTask "succeeds when a user is found" { + do! WebLogUserDataTests.``FindByEmail succeeds when a user is found`` data.Value + } + testTask "succeeds when a user is not found (incorrect weblog)" { + do! WebLogUserDataTests.``FindByEmail succeeds when a user is not found (incorrect weblog)`` data.Value + } + testTask "succeeds when a user is not found (bad email)" { + do! WebLogUserDataTests.``FindByEmail succeeds when a user is not found (bad email)`` data.Value + } + ] + testList "FindById" [ + testTask "succeeds when a user is found" { + do! WebLogUserDataTests.``FindById succeeds when a user is found`` data.Value + } + testTask "succeeds when a user is not found (incorrect weblog)" { + do! WebLogUserDataTests.``FindById succeeds when a user is not found (incorrect weblog)`` data.Value + } + testTask "succeeds when a user is not found (bad ID)" { + do! WebLogUserDataTests.``FindById succeeds when a user is not found (bad ID)`` data.Value + } + ] + testList "FindByWebLog" [ + testTask "succeeds when users exist" { + do! WebLogUserDataTests.``FindByWebLog succeeds when users exist`` data.Value + } + testTask "succeeds when no users exist" { + do! WebLogUserDataTests.``FindByWebLog succeeds when no users exist`` data.Value + } + ] + testList "FindNames" [ + testTask "succeeds when users exist" { + do! WebLogUserDataTests.``FindNames succeeds when users exist`` data.Value + } + testTask "succeeds when users do not exist" { + do! WebLogUserDataTests.``FindNames succeeds when users do not exist`` data.Value + } + ] + testList "SetLastSeen" [ + testTask "succeeds when the user exists" { + do! WebLogUserDataTests.``SetLastSeen succeeds when the user exists`` data.Value + } + testTask "succeeds when the user does not exist" { + do! WebLogUserDataTests.``SetLastSeen succeeds when the user does not exist`` data.Value + } + ] + testList "Update" [ + testTask "succeeds when the user exists" { + do! WebLogUserDataTests.``Update succeeds when the user exists`` data.Value + } + testTask "succeeds when the user does not exist" { + do! WebLogUserDataTests.``Update succeeds when the user does not exist`` data.Value + } + ] + testList "Delete" [ + testTask "fails when the user is the author of a page" { + do! WebLogUserDataTests.``Delete fails when the user is the author of a page`` data.Value + } + testTask "fails when the user is the author of a post" { + do! WebLogUserDataTests.``Delete fails when the user is the author of a post`` data.Value + } + testTask "succeeds when the user is not an author" { + do! WebLogUserDataTests.``Delete succeeds when the user is not an author`` data.Value + } + testTask "succeeds when the user does not exist" { + do! WebLogUserDataTests.``Delete succeeds when the user does not exist`` data.Value + } + ] +] + +let private webLogTests = testList "WebLog" [ + testTask "Add succeeds" { + do! WebLogDataTests.``Add succeeds`` data.Value + } + testTask "All succeeds" { + do! WebLogDataTests.``All succeeds`` data.Value + } + testList "FindByHost" [ + testTask "succeeds when a web log is found" { + do! WebLogDataTests.``FindByHost succeeds when a web log is found`` data.Value + } + testTask "succeeds when a web log is not found" { + do! WebLogDataTests.``FindByHost succeeds when a web log is not found`` data.Value + } + ] + testList "FindById" [ + testTask "succeeds when a web log is found" { + do! WebLogDataTests.``FindById succeeds when a web log is found`` data.Value + } + testTask "succeeds when a web log is not found" { + do! WebLogDataTests.``FindById succeeds when a web log is not found`` data.Value + } + ] + testList "UpdateRedirectRules" [ + testTask "succeeds when the web log exists" { + do! WebLogDataTests.``UpdateRedirectRules succeeds when the web log exists`` data.Value + } + testTask "succeeds when the web log does not exist" { + do! WebLogDataTests.``UpdateRedirectRules succeeds when the web log does not exist`` data.Value + } + ] + testList "UpdateRssOptions" [ + testTask "succeeds when the web log exists" { + do! WebLogDataTests.``UpdateRssOptions succeeds when the web log exists`` data.Value + } + testTask "succeeds when the web log does not exist" { + do! WebLogDataTests.``UpdateRssOptions succeeds when the web log does not exist`` data.Value + } + ] + testList "UpdateSettings" [ + testTask "succeeds when the web log exists" { + do! WebLogDataTests.``UpdateSettings succeeds when the web log exists`` data.Value + } + testTask "succeeds when the web log does not exist" { + do! WebLogDataTests.``UpdateSettings succeeds when the web log does not exist`` data.Value + } + ] + testList "Delete" [ + testTask "succeeds when the web log exists" { + do! WebLogDataTests.``Delete succeeds when the web log exists`` data.Value + } + testTask "succeeds when the web log does not exist" { + do! WebLogDataTests.``Delete succeeds when the web log does not exist`` data.Value + } + ] +] + +/// Drop the throwaway RethinkDB database +let private environmentCleanUp = testTask "Clean Up" { + do! disposeData () +} + +/// All RethinkDB data tests +let all = + testList "RethinkDbData" + [ environmentSetUp + categoryTests + pageTests + postTests + tagMapTests + themeTests + themeAssetTests + uploadTests + webLogUserTests + webLogTests + environmentCleanUp ] + |> testSequenced diff --git a/src/MyWebLog.Tests/Data/SQLiteDataTests.fs b/src/MyWebLog.Tests/Data/SQLiteDataTests.fs new file mode 100644 index 0000000..50e74bf --- /dev/null +++ b/src/MyWebLog.Tests/Data/SQLiteDataTests.fs @@ -0,0 +1,1054 @@ +module SQLiteDataTests + +open System.IO +open BitBadger.Documents.Sqlite +open Expecto +open Microsoft.Extensions.Logging.Abstractions +open MyWebLog +open MyWebLog.Converters +open MyWebLog.Data +open Newtonsoft.Json + +/// JSON serializer +let private ser = Json.configure (JsonSerializer.CreateDefault()) + +/// The test database name +let private dbName = + RethinkDbDataTests.env "SQLITE_DB" "test-db.db" + +/// Create a SQLiteData instance for testing +let private mkData () = + Configuration.useConnectionString $"Data Source=./{dbName}" + let conn = Configuration.dbConn () + SQLiteData(conn, NullLogger(), ser) :> IData + +// /// Create a SQLiteData instance for testing +// let private mkTraceData () = +// Sqlite.Configuration.useConnectionString $"Data Source=./{dbName}" +// let conn = Sqlite.Configuration.dbConn () +// let myLogger = +// LoggerFactory +// .Create(fun builder -> +// builder +// .AddSimpleConsole() +// .SetMinimumLevel(LogLevel.Trace) +// |> ignore) +// .CreateLogger() +// SQLiteData(conn, myLogger, ser) :> IData + +/// Dispose the connection associated with the SQLiteData instance +let private dispose (data: IData) = + (data :?> SQLiteData).Conn.Dispose() + +/// Create a fresh environment from the root backup +let private freshEnvironment (data: IData option) = task { + let! env = task { + match data with + | Some d -> + return d + | None -> + let d = mkData () + // Thank you, kind Internet stranger... https://stackoverflow.com/a/548297 + do! (d :?> SQLiteData).Conn.customNonQuery + "PRAGMA writable_schema = 1; + DELETE FROM sqlite_master WHERE type IN ('table', 'index'); + PRAGMA writable_schema = 0; + VACUUM" [] + return d + } + do! env.StartUp() + // This exercises Restore for all implementations; all tests are dependent on it working as expected + do! Maintenance.Backup.restoreBackup "root-weblog.json" None false false env + return env +} + +/// Set up the environment for the SQLite tests +let private environmentSetUp = testList "Environment" [ + testTask "creating database" { + let data = mkData () + try do! freshEnvironment (Some data) + finally dispose data + } +] + +/// Integration tests for the Category implementation in SQLite +let private categoryTests = testList "Category" [ + testTask "Add succeeds" { + let data = mkData () + try do! CategoryDataTests.``Add succeeds`` data + finally dispose data + } + testList "CountAll" [ + testTask "succeeds when categories exist" { + let data = mkData () + try do! CategoryDataTests.``CountAll succeeds when categories exist`` data + finally dispose data + } + testTask "succeeds when categories do not exist" { + let data = mkData () + try do! CategoryDataTests.``CountAll succeeds when categories do not exist`` data + finally dispose data + } + ] + testList "CountTopLevel" [ + testTask "succeeds when top-level categories exist" { + let data = mkData () + try do! CategoryDataTests.``CountTopLevel succeeds when top-level categories exist`` data + finally dispose data + } + testTask "succeeds when no top-level categories exist" { + let data = mkData () + try do! CategoryDataTests.``CountTopLevel succeeds when no top-level categories exist`` data + finally dispose data + } + ] + testTask "FindAllForView succeeds" { + let data = mkData () + try do! CategoryDataTests.``FindAllForView succeeds`` data + finally dispose data + } + testList "FindById" [ + testTask "succeeds when a category is found" { + let data = mkData () + try do! CategoryDataTests.``FindById succeeds when a category is found`` data + finally dispose data + } + testTask "succeeds when a category is not found" { + let data = mkData () + try do! CategoryDataTests.``FindById succeeds when a category is not found`` data + finally dispose data + } + ] + testList "FindByWebLog" [ + testTask "succeeds when categories exist" { + let data = mkData () + try do! CategoryDataTests.``FindByWebLog succeeds when categories exist`` data + finally dispose data + } + testTask "succeeds when no categories exist" { + let data = mkData () + try do! CategoryDataTests.``FindByWebLog succeeds when no categories exist`` data + finally dispose data + } + ] + testTask "Update succeeds" { + let data = mkData () + try do! CategoryDataTests.``Update succeeds`` data + finally dispose data + } + testList "Delete" [ + testTask "succeeds when the category is deleted (no posts)" { + let data = mkData () + try do! CategoryDataTests.``Delete succeeds when the category is deleted (no posts)`` data + finally dispose data + } + testTask "succeeds when the category does not exist" { + let data = mkData () + try do! CategoryDataTests.``Delete succeeds when the category does not exist`` data + finally dispose data + } + testTask "succeeds when reassigning parent category to None" { + let data = mkData () + try do! CategoryDataTests.``Delete succeeds when reassigning parent category to None`` data + finally dispose data + } + testTask "succeeds when reassigning parent category to Some" { + let data = mkData () + try do! CategoryDataTests.``Delete succeeds when reassigning parent category to Some`` data + finally dispose data + } + testTask "succeeds and removes category from posts" { + let data = mkData () + try do! CategoryDataTests.``Delete succeeds and removes category from posts`` data + finally dispose data + } + ] +] + +/// Integration tests for the Page implementation in SQLite +let private pageTests = testList "Page" [ + testTask "Add succeeds" { + let data = mkData () + try do! PageDataTests.``Add succeeds`` data + finally dispose data + } + testTask "All succeeds" { + let data = mkData () + try do! PageDataTests.``All succeeds`` data + finally dispose data + } + testTask "CountAll succeeds" { + let data = mkData () + try do! PageDataTests.``CountAll succeeds`` data + finally dispose data + } + testTask "CountListed succeeds" { + let data = mkData () + try do! PageDataTests.``CountListed succeeds`` data + finally dispose data + } + testList "FindById" [ + testTask "succeeds when a page is found" { + let data = mkData () + try do! PageDataTests.``FindById succeeds when a page is found`` data + finally dispose data + } + testTask "succeeds when a page is not found (incorrect weblog)" { + let data = mkData () + try do! PageDataTests.``FindById succeeds when a page is not found (incorrect weblog)`` data + finally dispose data + } + testTask "succeeds when a page is not found (bad page ID)" { + let data = mkData () + try do! PageDataTests.``FindById succeeds when a page is not found (bad page ID)`` data + finally dispose data + } + ] + testList "FindByPermalink" [ + testTask "succeeds when a page is found" { + let data = mkData () + try do! PageDataTests.``FindByPermalink succeeds when a page is found`` data + finally dispose data + } + testTask "succeeds when a page is not found (incorrect weblog)" { + let data = mkData () + try do! PageDataTests.``FindByPermalink succeeds when a page is not found (incorrect weblog)`` data + finally dispose data + } + testTask "succeeds when a page is not found (no such permalink)" { + let data = mkData () + try do! PageDataTests.``FindByPermalink succeeds when a page is not found (no such permalink)`` data + finally dispose data + } + ] + testList "FindCurrentPermalink" [ + testTask "succeeds when a page is found" { + let data = mkData () + try do! PageDataTests.``FindCurrentPermalink succeeds when a page is found`` data + finally dispose data + } + testTask "succeeds when a page is not found" { + let data = mkData () + try do! PageDataTests.``FindCurrentPermalink succeeds when a page is not found`` data + finally dispose data + } + ] + testList "FindFullById" [ + testTask "succeeds when a page is found" { + let data = mkData () + try do! PageDataTests.``FindFullById succeeds when a page is found`` data + finally dispose data + } + testTask "succeeds when a page is not found" { + let data = mkData () + try do! PageDataTests.``FindFullById succeeds when a page is not found`` data + finally dispose data + } + ] + testList "FindFullByWebLog" [ + testTask "succeeds when pages are found" { + let data = mkData () + try do! PageDataTests.``FindFullByWebLog succeeds when pages are found`` data + finally dispose data + } + testTask "succeeds when a pages are not found" { + let data = mkData () + try do! PageDataTests.``FindFullByWebLog succeeds when pages are not found`` data + finally dispose data + } + ] + testList "FindListed" [ + testTask "succeeds when pages are found" { + let data = mkData () + try do! PageDataTests.``FindListed succeeds when pages are found`` data + finally dispose data + } + testTask "succeeds when a pages are not found" { + let data = mkData () + try do! PageDataTests.``FindListed succeeds when pages are not found`` data + finally dispose data + } + ] + testList "FindPageOfPages" [ + testTask "succeeds when pages are found" { + let data = mkData () + try do! PageDataTests.``FindPageOfPages succeeds when pages are found`` data + finally dispose data + } + testTask "succeeds when a pages are not found" { + let data = mkData () + try do! PageDataTests.``FindPageOfPages succeeds when pages are not found`` data + finally dispose data + } + ] + testList "Update" [ + testTask "succeeds when the page exists" { + let data = mkData () + try do! PageDataTests.``Update succeeds when the page exists`` data + finally dispose data + } + testTask "succeeds when the page does not exist" { + let data = mkData () + try do! PageDataTests.``Update succeeds when the page does not exist`` data + finally dispose data + } + ] + testList "UpdatePriorPermalinks" [ + testTask "succeeds when the page exists" { + let data = mkData () + try do! PageDataTests.``UpdatePriorPermalinks succeeds when the page exists`` data + finally dispose data + } + testTask "succeeds when the page does not exist" { + let data = mkData () + try do! PageDataTests.``UpdatePriorPermalinks succeeds when the page does not exist`` data + finally dispose data + } + ] + testList "Delete" [ + testTask "succeeds when a page is deleted" { + let data = mkData () + try + do! PageDataTests.``Delete succeeds when a page is deleted`` data + let! revisions = + (data :?> SQLiteData).Conn.customScalar + "SELECT COUNT(*) AS it FROM page_revision WHERE page_id = @id" + [ idParam PageDataTests.coolPageId ] + toCount + Expect.equal revisions 0L "All revisions for the page should have been deleted" + finally dispose data + } + testTask "succeeds when a page is not deleted" { + let data = mkData () + try do! PageDataTests.``Delete succeeds when a page is not deleted`` data + finally dispose data + } + ] +] + +/// Integration tests for the Post implementation in SQLite +let private postTests = testList "Post" [ + testTask "Add succeeds" { + // We'll need the root website categories restored for these tests + let! data = freshEnvironment None + try do! PostDataTests.``Add succeeds`` data + finally dispose data + } + testTask "CountPostsByStatus succeeds" { + let data = mkData () + try do! PostDataTests.``CountByStatus succeeds`` data + finally dispose data + } + testList "FindById" [ + testTask "succeeds when a post is found" { + let data = mkData () + try do! PostDataTests.``FindById succeeds when a post is found`` data + finally dispose data + } + testTask "succeeds when a post is not found (incorrect weblog)" { + let data = mkData () + try do! PostDataTests.``FindById succeeds when a post is not found (incorrect weblog)`` data + finally dispose data + } + testTask "succeeds when a post is not found (bad post ID)" { + let data = mkData () + try do! PostDataTests.``FindById succeeds when a post is not found (bad post ID)`` data + finally dispose data + } + ] + testList "FindByPermalink" [ + testTask "succeeds when a post is found" { + let data = mkData () + try do! PostDataTests.``FindByPermalink succeeds when a post is found`` data + finally dispose data + } + testTask "succeeds when a post is not found (incorrect weblog)" { + let data = mkData () + try do! PostDataTests.``FindByPermalink succeeds when a post is not found (incorrect weblog)`` data + finally dispose data + } + testTask "succeeds when a post is not found (no such permalink)" { + let data = mkData () + try do! PostDataTests.``FindByPermalink succeeds when a post is not found (no such permalink)`` data + finally dispose data + } + ] + testList "FindCurrentPermalink" [ + testTask "succeeds when a post is found" { + let data = mkData () + try do! PostDataTests.``FindCurrentPermalink succeeds when a post is found`` data + finally dispose data + } + testTask "succeeds when a post is not found" { + let data = mkData () + try do! PostDataTests.``FindCurrentPermalink succeeds when a post is not found`` data + finally dispose data + } + ] + testList "FindFullById" [ + testTask "succeeds when a post is found" { + let data = mkData () + try do! PostDataTests.``FindFullById succeeds when a post is found`` data + finally dispose data + } + testTask "succeeds when a post is not found" { + let data = mkData () + try do! PostDataTests.``FindFullById succeeds when a post is not found`` data + finally dispose data + } + ] + testList "FindFullByWebLog" [ + testTask "succeeds when posts are found" { + let data = mkData () + try do! PostDataTests.``FindFullByWebLog succeeds when posts are found`` data + finally dispose data + } + testTask "succeeds when a posts are not found" { + let data = mkData () + try do! PostDataTests.``FindFullByWebLog succeeds when posts are not found`` data + finally dispose data + } + ] + testList "FindPageOfCategorizedPosts" [ + testTask "succeeds when posts are found" { + let data = mkData () + try do! PostDataTests.``FindPageOfCategorizedPosts succeeds when posts are found`` data + finally dispose data + } + testTask "succeeds when finding a too-high page number" { + let data = mkData () + try do! PostDataTests.``FindPageOfCategorizedPosts succeeds when finding a too-high page number`` data + finally dispose data + } + testTask "succeeds when a category has no posts" { + let data = mkData () + try do! PostDataTests.``FindPageOfCategorizedPosts succeeds when a category has no posts`` data + finally dispose data + } + ] + testList "FindPageOfPosts" [ + testTask "succeeds when posts are found" { + let data = mkData () + try do! PostDataTests.``FindPageOfPosts succeeds when posts are found`` data + finally dispose data + } + testTask "succeeds when finding a too-high page number" { + let data = mkData () + try do! PostDataTests.``FindPageOfPosts succeeds when finding a too-high page number`` data + finally dispose data + } + testTask "succeeds when there are no posts" { + let data = mkData () + try do! PostDataTests.``FindPageOfPosts succeeds when there are no posts`` data + finally dispose data + } + ] + testList "FindPageOfPublishedPosts" [ + testTask "succeeds when posts are found" { + let data = mkData () + try do! PostDataTests.``FindPageOfPublishedPosts succeeds when posts are found`` data + finally dispose data + } + testTask "succeeds when finding a too-high page number" { + let data = mkData () + try do! PostDataTests.``FindPageOfPublishedPosts succeeds when finding a too-high page number`` data + finally dispose data + } + testTask "succeeds when there are no posts" { + let data = mkData () + try do! PostDataTests.``FindPageOfPublishedPosts succeeds when there are no posts`` data + finally dispose data + } + ] + testList "FindPageOfTaggedPosts" [ + testTask "succeeds when posts are found" { + let data = mkData () + try do! PostDataTests.``FindPageOfTaggedPosts succeeds when posts are found`` data + finally dispose data + } + testTask "succeeds when posts are found (excluding drafts)" { + let data = mkData () + try do! PostDataTests.``FindPageOfTaggedPosts succeeds when posts are found (excluding drafts)`` data + finally dispose data + } + testTask "succeeds when finding a too-high page number" { + let data = mkData () + try do! PostDataTests.``FindPageOfTaggedPosts succeeds when finding a too-high page number`` data + finally dispose data + } + testTask "succeeds when there are no posts" { + let data = mkData () + try do! PostDataTests.``FindPageOfTaggedPosts succeeds when there are no posts`` data + finally dispose data + } + ] + testList "FindSurroundingPosts" [ + testTask "succeeds when there is no next newer post" { + let data = mkData () + try do! PostDataTests.``FindSurroundingPosts succeeds when there is no next newer post`` data + finally dispose data + } + testTask "succeeds when there is no next older post" { + let data = mkData () + try do! PostDataTests.``FindSurroundingPosts succeeds when there is no next older post`` data + finally dispose data + } + testTask "succeeds when older and newer exist" { + let data = mkData () + try do! PostDataTests.``FindSurroundingPosts succeeds when older and newer exist`` data + finally dispose data + } + ] + testList "Update" [ + testTask "succeeds when the post exists" { + let data = mkData () + try do! PostDataTests.``Update succeeds when the post exists`` data + finally dispose data + } + testTask "succeeds when the post does not exist" { + let data = mkData () + try do! PostDataTests.``Update succeeds when the post does not exist`` data + finally dispose data + } + ] + testList "UpdatePriorPermalinks" [ + testTask "succeeds when the post exists" { + let data = mkData () + try do! PostDataTests.``UpdatePriorPermalinks succeeds when the post exists`` data + finally dispose data + } + testTask "succeeds when the post does not exist" { + let data = mkData () + try do! PostDataTests.``UpdatePriorPermalinks succeeds when the post does not exist`` data + finally dispose data + } + ] + testList "Delete" [ + testTask "succeeds when a post is deleted" { + let data = mkData () + try + do! PostDataTests.``Delete succeeds when a post is deleted`` data + let! revisions = + (data :?> SQLiteData).Conn.customScalar + "SELECT COUNT(*) AS it FROM post_revision WHERE post_id = @id" + [ idParam PostDataTests.episode2 ] + toCount + Expect.equal revisions 0L "All revisions for the post should have been deleted" + finally dispose data + } + testTask "succeeds when a post is not deleted" { + let data = mkData () + try do! PostDataTests.``Delete succeeds when a post is not deleted`` data + finally dispose data + } + ] +] + +let private tagMapTests = testList "TagMap" [ + testList "FindById" [ + testTask "succeeds when a tag mapping is found" { + let data = mkData () + try do! TagMapDataTests.``FindById succeeds when a tag mapping is found`` data + finally dispose data + } + testTask "succeeds when a tag mapping is not found (incorrect weblog)" { + let data = mkData () + try do! TagMapDataTests.``FindById succeeds when a tag mapping is not found (incorrect weblog)`` data + finally dispose data + } + testTask "succeeds when a tag mapping is not found (bad tag map ID)" { + let data = mkData () + try do! TagMapDataTests.``FindById succeeds when a tag mapping is not found (bad tag map ID)`` data + finally dispose data + } + ] + testList "FindByUrlValue" [ + testTask "succeeds when a tag mapping is found" { + let data = mkData () + try do! TagMapDataTests.``FindByUrlValue succeeds when a tag mapping is found`` data + finally dispose data + } + testTask "succeeds when a tag mapping is not found (incorrect weblog)" { + let data = mkData () + try do! TagMapDataTests.``FindByUrlValue succeeds when a tag mapping is not found (incorrect weblog)`` data + finally dispose data + } + testTask "succeeds when a tag mapping is not found (no such value)" { + let data = mkData () + try do! TagMapDataTests.``FindByUrlValue succeeds when a tag mapping is not found (no such value)`` data + finally dispose data + } + ] + testList "FindByWebLog" [ + testTask "succeeds when tag mappings are found" { + let data = mkData () + try do! TagMapDataTests.``FindByWebLog succeeds when tag mappings are found`` data + finally dispose data + } + testTask "succeeds when no tag mappings are found" { + let data = mkData () + try do! TagMapDataTests.``FindByWebLog succeeds when no tag mappings are found`` data + finally dispose data + } + ] + testList "FindMappingForTags" [ + testTask "succeeds when mappings exist" { + let data = mkData () + try do! TagMapDataTests.``FindMappingForTags succeeds when mappings exist`` data + finally dispose data + } + testTask "succeeds when no mappings exist" { + let data = mkData () + try do! TagMapDataTests.``FindMappingForTags succeeds when no mappings exist`` data + finally dispose data + } + ] + testList "Save" [ + testTask "succeeds when adding a tag mapping" { + let data = mkData () + try do! TagMapDataTests.``Save succeeds when adding a tag mapping`` data + finally dispose data + } + testTask "succeeds when updating a tag mapping" { + let data = mkData () + try do! TagMapDataTests.``Save succeeds when updating a tag mapping`` data + finally dispose data + } + ] + testList "Delete" [ + testTask "succeeds when a tag mapping is deleted" { + let data = mkData () + try do! TagMapDataTests.``Delete succeeds when a tag mapping is deleted`` data + finally dispose data + } + testTask "succeeds when a tag mapping is not deleted" { + let data = mkData () + try do! TagMapDataTests.``Delete succeeds when a tag mapping is not deleted`` data + finally dispose data + } + ] +] + +let private themeTests = testList "Theme" [ + testTask "All succeeds" { + let data = mkData () + try do! ThemeDataTests.``All succeeds`` data + finally dispose data + } + testList "Exists" [ + testTask "succeeds when the theme exists" { + let data = mkData () + try do! ThemeDataTests.``Exists succeeds when the theme exists`` data + finally dispose data + } + testTask "succeeds when the theme does not exist" { + let data = mkData () + try do! ThemeDataTests.``Exists succeeds when the theme does not exist`` data + finally dispose data + } + ] + testList "FindById" [ + testTask "succeeds when the theme exists" { + let data = mkData () + try do! ThemeDataTests.``FindById succeeds when the theme exists`` data + finally dispose data + } + testTask "succeeds when the theme does not exist" { + let data = mkData () + try do! ThemeDataTests.``FindById succeeds when the theme does not exist`` data + finally dispose data + } + ] + testList "FindByIdWithoutText" [ + testTask "succeeds when the theme exists" { + let data = mkData () + try do! ThemeDataTests.``FindByIdWithoutText succeeds when the theme exists`` data + finally dispose data + } + testTask "succeeds when the theme does not exist" { + let data = mkData () + try do! ThemeDataTests.``FindByIdWithoutText succeeds when the theme does not exist`` data + finally dispose data + } + ] + testList "Save" [ + testTask "succeeds when adding a theme" { + let data = mkData () + try do! ThemeDataTests.``Save succeeds when adding a theme`` data + finally dispose data + } + testTask "succeeds when updating a theme" { + let data = mkData () + try do! ThemeDataTests.``Save succeeds when updating a theme`` data + finally dispose data + } + ] + testList "Delete" [ + testTask "succeeds when a theme is deleted" { + let data = mkData () + try do! ThemeDataTests.``Delete succeeds when a theme is deleted`` data + finally dispose data + } + testTask "succeeds when a theme is not deleted" { + let data = mkData () + try do! ThemeDataTests.``Delete succeeds when a theme is not deleted`` data + finally dispose data + } + ] +] + +let private themeAssetTests = testList "ThemeAsset" [ + testList "Save" [ + testTask "succeeds when adding an asset" { + let data = mkData () + try do! ThemeDataTests.Asset.``Save succeeds when adding an asset`` data + finally dispose data + } + testTask "succeeds when updating an asset" { + let data = mkData () + try do! ThemeDataTests.Asset.``Save succeeds when updating an asset`` data + finally dispose data + } + ] + testTask "All succeeds" { + let data = mkData () + try do! ThemeDataTests.Asset.``All succeeds`` data + finally dispose data + } + testList "FindById" [ + testTask "succeeds when an asset is found" { + let data = mkData () + try do! ThemeDataTests.Asset.``FindById succeeds when an asset is found`` data + finally dispose data + } + testTask "succeeds when an asset is not found" { + let data = mkData () + try do! ThemeDataTests.Asset.``FindById succeeds when an asset is not found`` data + finally dispose data + } + ] + testList "FindByTheme" [ + testTask "succeeds when assets exist" { + let data = mkData () + try do! ThemeDataTests.Asset.``FindByTheme succeeds when assets exist`` data + finally dispose data + } + testTask "succeeds when assets do not exist" { + let data = mkData () + try do! ThemeDataTests.Asset.``FindByTheme succeeds when assets do not exist`` data + finally dispose data + } + ] + testList "FindByThemeWithData" [ + testTask "succeeds when assets exist" { + let data = mkData () + try do! ThemeDataTests.Asset.``FindByThemeWithData succeeds when assets exist`` data + finally dispose data + } + testTask "succeeds when assets do not exist" { + let data = mkData () + try do! ThemeDataTests.Asset.``FindByThemeWithData succeeds when assets do not exist`` data + finally dispose data + } + ] + testList "DeleteByTheme" [ + testTask "succeeds when assets are deleted" { + let data = mkData () + try do! ThemeDataTests.Asset.``DeleteByTheme succeeds when assets are deleted`` data + finally dispose data + } + testTask "succeeds when no assets are deleted" { + let data = mkData () + try do! ThemeDataTests.Asset.``DeleteByTheme succeeds when no assets are deleted`` data + finally dispose data + } + ] +] + +let private uploadTests = testList "Upload" [ + testTask "Add succeeds" { + let data = mkData () + try do! UploadDataTests.``Add succeeds`` data + finally dispose data + } + testList "FindByPath" [ + testTask "succeeds when an upload is found" { + let data = mkData () + try do! UploadDataTests.``FindByPath succeeds when an upload is found`` data + finally dispose data + } + testTask "succeeds when an upload is not found (incorrect weblog)" { + let data = mkData () + try do! UploadDataTests.``FindByPath succeeds when an upload is not found (incorrect weblog)`` data + finally dispose data + } + testTask "succeeds when an upload is not found (bad path)" { + let data = mkData () + try do! UploadDataTests.``FindByPath succeeds when an upload is not found (bad path)`` data + finally dispose data + } + ] + testList "FindByWebLog" [ + testTask "succeeds when uploads exist" { + let data = mkData () + try do! UploadDataTests.``FindByWebLog succeeds when uploads exist`` data + finally dispose data + } + testTask "succeeds when no uploads exist" { + let data = mkData () + try do! UploadDataTests.``FindByWebLog succeeds when no uploads exist`` data + finally dispose data + } + ] + testList "FindByWebLogWithData" [ + testTask "succeeds when uploads exist" { + let data = mkData () + try do! UploadDataTests.``FindByWebLogWithData succeeds when uploads exist`` data + finally dispose data + } + testTask "succeeds when no uploads exist" { + let data = mkData () + try do! UploadDataTests.``FindByWebLogWithData succeeds when no uploads exist`` data + finally dispose data + } + ] + testList "Delete" [ + testTask "succeeds when an upload is deleted" { + let data = mkData () + try do! UploadDataTests.``Delete succeeds when an upload is deleted`` data + finally dispose data + } + testTask "succeeds when an upload is not deleted" { + let data = mkData () + try do! UploadDataTests.``Delete succeeds when an upload is not deleted`` data + finally dispose data + } + ] +] + +let private webLogUserTests = testList "WebLogUser" [ + testTask "Add succeeds" { + // This restore ensures all the posts and pages exist + let! data = freshEnvironment None + try do! WebLogUserDataTests.``Add succeeds`` data + finally dispose data + } + testList "FindByEmail" [ + testTask "succeeds when a user is found" { + let data = mkData () + try do! WebLogUserDataTests.``FindByEmail succeeds when a user is found`` data + finally dispose data + } + testTask "succeeds when a user is not found (incorrect weblog)" { + let data = mkData () + try do! WebLogUserDataTests.``FindByEmail succeeds when a user is not found (incorrect weblog)`` data + finally dispose data + } + testTask "succeeds when a user is not found (bad email)" { + let data = mkData () + try do! WebLogUserDataTests.``FindByEmail succeeds when a user is not found (bad email)`` data + finally dispose data + } + ] + testList "FindById" [ + testTask "succeeds when a user is found" { + let data = mkData () + try do! WebLogUserDataTests.``FindById succeeds when a user is found`` data + finally dispose data + } + testTask "succeeds when a user is not found (incorrect weblog)" { + let data = mkData () + try do! WebLogUserDataTests.``FindById succeeds when a user is not found (incorrect weblog)`` data + finally dispose data + } + testTask "succeeds when a user is not found (bad ID)" { + let data = mkData () + try do! WebLogUserDataTests.``FindById succeeds when a user is not found (bad ID)`` data + finally dispose data + } + ] + testList "FindByWebLog" [ + testTask "succeeds when users exist" { + let data = mkData () + try do! WebLogUserDataTests.``FindByWebLog succeeds when users exist`` data + finally dispose data + } + testTask "succeeds when no users exist" { + let data = mkData () + try do! WebLogUserDataTests.``FindByWebLog succeeds when no users exist`` data + finally dispose data + } + ] + testList "FindNames" [ + testTask "succeeds when users exist" { + let data = mkData () + try do! WebLogUserDataTests.``FindNames succeeds when users exist`` data + finally dispose data + } + testTask "succeeds when users do not exist" { + let data = mkData () + try do! WebLogUserDataTests.``FindNames succeeds when users do not exist`` data + finally dispose data + } + ] + testList "SetLastSeen" [ + testTask "succeeds when the user exists" { + let data = mkData () + try do! WebLogUserDataTests.``SetLastSeen succeeds when the user exists`` data + finally dispose data + } + testTask "succeeds when the user does not exist" { + let data = mkData () + try do! WebLogUserDataTests.``SetLastSeen succeeds when the user does not exist`` data + finally dispose data + } + ] + testList "Update" [ + testTask "succeeds when the user exists" { + let data = mkData () + try do! WebLogUserDataTests.``Update succeeds when the user exists`` data + finally dispose data + } + testTask "succeeds when the user does not exist" { + let data = mkData () + try do! WebLogUserDataTests.``Update succeeds when the user does not exist`` data + finally dispose data + } + ] + testList "Delete" [ + testTask "fails when the user is the author of a page" { + let data = mkData () + try do! WebLogUserDataTests.``Delete fails when the user is the author of a page`` data + finally dispose data + } + testTask "fails when the user is the author of a post" { + let data = mkData () + try do! WebLogUserDataTests.``Delete fails when the user is the author of a post`` data + finally dispose data + } + testTask "succeeds when the user is not an author" { + let data = mkData () + try do! WebLogUserDataTests.``Delete succeeds when the user is not an author`` data + finally dispose data + } + testTask "succeeds when the user does not exist" { + let data = mkData () + try do! WebLogUserDataTests.``Delete succeeds when the user does not exist`` data + finally dispose data + } + ] +] + +let private webLogTests = testList "WebLog" [ + testTask "Add succeeds" { + let data = mkData () + try do! WebLogDataTests.``Add succeeds`` data + finally dispose data + } + testTask "All succeeds" { + let data = mkData () + try do! WebLogDataTests.``All succeeds`` data + finally dispose data + } + testList "FindByHost" [ + testTask "succeeds when a web log is found" { + let data = mkData () + try do! WebLogDataTests.``FindByHost succeeds when a web log is found`` data + finally dispose data + } + testTask "succeeds when a web log is not found" { + let data = mkData () + try do! WebLogDataTests.``FindByHost succeeds when a web log is not found`` data + finally dispose data + } + ] + testList "FindById" [ + testTask "succeeds when a web log is found" { + let data = mkData () + try do! WebLogDataTests.``FindById succeeds when a web log is found`` data + finally dispose data + } + testTask "succeeds when a web log is not found" { + let data = mkData () + try do! WebLogDataTests.``FindById succeeds when a web log is not found`` data + finally dispose data + } + ] + testList "UpdateRedirectRules" [ + testTask "succeeds when the web log exists" { + let data = mkData () + try do! WebLogDataTests.``UpdateRedirectRules succeeds when the web log exists`` data + finally dispose data + } + testTask "succeeds when the web log does not exist" { + let data = mkData () + try do! WebLogDataTests.``UpdateRedirectRules succeeds when the web log does not exist`` data + finally dispose data + } + ] + testList "UpdateRssOptions" [ + testTask "succeeds when the web log exists" { + let data = mkData () + try do! WebLogDataTests.``UpdateRssOptions succeeds when the web log exists`` data + finally dispose data + } + testTask "succeeds when the web log does not exist" { + let data = mkData () + try do! WebLogDataTests.``UpdateRssOptions succeeds when the web log does not exist`` data + finally dispose data + } + ] + testList "UpdateSettings" [ + testTask "succeeds when the web log exists" { + let data = mkData () + try do! WebLogDataTests.``UpdateSettings succeeds when the web log exists`` data + finally dispose data + } + testTask "succeeds when the web log does not exist" { + let data = mkData () + try do! WebLogDataTests.``UpdateSettings succeeds when the web log does not exist`` data + finally dispose data + } + ] + testList "Delete" [ + testTask "succeeds when the web log exists" { + let data = mkData () + try + do! WebLogDataTests.``Delete succeeds when the web log exists`` data + let! revisions = + (data :?> SQLiteData).Conn.customScalar + "SELECT (SELECT COUNT(*) FROM page_revision) + (SELECT COUNT(*) FROM post_revision) AS it" + [] + toCount + Expect.equal revisions 0L "All revisions should be deleted" + finally dispose data + } + testTask "succeeds when the web log does not exist" { + let data = mkData () + try do! WebLogDataTests.``Delete succeeds when the web log does not exist`` data + finally dispose data + } + ] +] + +/// Delete the SQLite database +let private environmentCleanUp = test "Clean Up" { + File.Delete dbName + Expect.isFalse (File.Exists dbName) "The test SQLite database should have been deleted" +} + +/// All SQLite data tests +let all = + testList "SQLiteData" + [ environmentSetUp + categoryTests + pageTests + postTests + tagMapTests + themeTests + themeAssetTests + uploadTests + webLogUserTests + webLogTests + environmentCleanUp ] + |> testSequenced diff --git a/src/MyWebLog.Tests/Data/TagMapDataTests.fs b/src/MyWebLog.Tests/Data/TagMapDataTests.fs new file mode 100644 index 0000000..d075b22 --- /dev/null +++ b/src/MyWebLog.Tests/Data/TagMapDataTests.fs @@ -0,0 +1,112 @@ +///

+/// Integration tests for implementations +/// +module TagMapDataTests + +open Expecto +open MyWebLog +open MyWebLog.Data + +/// The ID of the root web log +let private rootId = CategoryDataTests.rootId + +/// The ID of the f# tag +let private fSharpId = TagMapId "Icm027noqE-rPHKZA98vAw" + +/// The ID of the ghoti tag +let private fishId = TagMapId "GdryXh-S0kGsNBs2RIacGA" + +let ``FindById succeeds when a tag mapping is found`` (data: IData) = task { + let! tagMap = data.TagMap.FindById fSharpId rootId + Expect.isSome tagMap "There should have been a tag mapping returned" + let tag = tagMap.Value + Expect.equal tag.Id fSharpId "ID is incorrect" + Expect.equal tag.WebLogId rootId "Web log ID is incorrect" + Expect.equal tag.Tag "f#" "Tag is incorrect" + Expect.equal tag.UrlValue "f-sharp" "URL value is incorrect" +} + +let ``FindById succeeds when a tag mapping is not found (incorrect weblog)`` (data: IData) = task { + let! tagMap = data.TagMap.FindById fSharpId (WebLogId "wrong") + Expect.isNone tagMap "There should not have been a tag mapping returned" +} + +let ``FindById succeeds when a tag mapping is not found (bad tag map ID)`` (data: IData) = task { + let! tagMap = data.TagMap.FindById (TagMapId "out") rootId + Expect.isNone tagMap "There should not have been a tag mapping returned" +} + +let ``FindByUrlValue succeeds when a tag mapping is found`` (data: IData) = task { + let! tagMap = data.TagMap.FindByUrlValue "f-sharp" rootId + Expect.isSome tagMap "There should have been a tag mapping returned" + Expect.equal tagMap.Value.Id fSharpId "ID is incorrect" +} + +let ``FindByUrlValue succeeds when a tag mapping is not found (incorrect weblog)`` (data: IData) = task { + let! tagMap = data.TagMap.FindByUrlValue "f-sharp" (WebLogId "incorrect") + Expect.isNone tagMap "There should not have been a tag mapping returned" +} + +let ``FindByUrlValue succeeds when a tag mapping is not found (no such value)`` (data: IData) = task { + let! tagMap = data.TagMap.FindByUrlValue "c-sharp" rootId + Expect.isNone tagMap "There should not have been a tag mapping returned" +} + +let ``FindByWebLog succeeds when tag mappings are found`` (data: IData) = task { + let! mappings = data.TagMap.FindByWebLog rootId + Expect.hasLength mappings 2 "There should have been 2 tag mappings returned" + for mapping in mappings do + Expect.contains [ fSharpId; fishId ] mapping.Id $"Unexpected mapping ID ({mapping.Id})" + Expect.equal mapping.WebLogId rootId "Web log ID is incorrect" + Expect.isNotEmpty mapping.Tag "Tag should not have been blank" + Expect.isNotEmpty mapping.UrlValue "URL value should not have been blank" +} + +let ``FindByWebLog succeeds when no tag mappings are found`` (data: IData) = task { + let! mappings = data.TagMap.FindByWebLog (WebLogId "no-maps") + Expect.isEmpty mappings "There should have been no tag mappings returned" +} + +let ``FindMappingForTags succeeds when mappings exist`` (data: IData) = task { + let! mappings = data.TagMap.FindMappingForTags [ "f#"; "testing"; "unit" ] rootId + Expect.hasLength mappings 1 "There should have been one mapping returned" + Expect.equal mappings[0].Id fSharpId "The wrong mapping was returned" +} + +let ``FindMappingForTags succeeds when no mappings exist`` (data: IData) = task { + let! mappings = data.TagMap.FindMappingForTags [ "c#"; "turkey"; "ham" ] rootId + Expect.isEmpty mappings "There should have been no tag mappings returned" +} + +let ``Save succeeds when adding a tag mapping`` (data: IData) = task { + let mapId = TagMapId "test" + do! data.TagMap.Save { Id = mapId; WebLogId = rootId; Tag = "c#"; UrlValue = "c-sharp" } + let! mapping = data.TagMap.FindById mapId rootId + Expect.isSome mapping "The mapping should have been retrieved" + let tag = mapping.Value + Expect.equal tag.Id mapId "ID is incorrect" + Expect.equal tag.WebLogId rootId "Web log ID is incorrect" + Expect.equal tag.Tag "c#" "Tag is incorrect" + Expect.equal tag.UrlValue "c-sharp" "URL value is incorrect" +} + +let ``Save succeeds when updating a tag mapping`` (data: IData) = task { + do! data.TagMap.Save { Id = fishId; WebLogId = rootId; Tag = "halibut"; UrlValue = "mackerel" } + let! mapping = data.TagMap.FindById fishId rootId + Expect.isSome mapping "The mapping should have been retrieved" + let tag = mapping.Value + Expect.equal tag.Id fishId "ID is incorrect" + Expect.equal tag.WebLogId rootId "Web log ID is incorrect" + Expect.equal tag.Tag "halibut" "Tag is incorrect" + Expect.equal tag.UrlValue "mackerel" "URL value is incorrect" +} + +let ``Delete succeeds when a tag mapping is deleted`` (data: IData) = task { + let! deleted = data.TagMap.Delete fSharpId rootId + Expect.isTrue deleted "The tag mapping should have been deleted" +} + +let ``Delete succeeds when a tag mapping is not deleted`` (data: IData) = task { + let! deleted = data.TagMap.Delete fSharpId rootId // this was deleted above + Expect.isFalse deleted "A tag mapping should not have been deleted" +} diff --git a/src/MyWebLog.Tests/Data/ThemeDataTests.fs b/src/MyWebLog.Tests/Data/ThemeDataTests.fs new file mode 100644 index 0000000..5233187 --- /dev/null +++ b/src/MyWebLog.Tests/Data/ThemeDataTests.fs @@ -0,0 +1,234 @@ +/// +/// Integration tests for implementations +/// +module ThemeDataTests + +open System.IO +open Expecto +open MyWebLog +open MyWebLog.Data +open NodaTime + +/// The ID of the default theme (restored from root-weblog.json) +let private defaultId = ThemeId "default" + +/// The ID of the test theme loaded and manipulated by these tests +let private testId = ThemeId "test-theme" + +/// The dark version of the myWebLog logo +let private darkFile = File.ReadAllBytes "../admin-theme/wwwroot/logo-dark.png" + +/// The light version of the myWebLog logo +let private lightFile = File.ReadAllBytes "../admin-theme/wwwroot/logo-light.png" + +/// Ensure that theme templates do not have any text +let private ensureNoText theme = + for template in theme.Templates do + Expect.equal template.Text "" $"Text for template {template.Name} should have been blank" + +let ``All succeeds`` (data: IData) = task { + let! themes = data.Theme.All() + Expect.hasLength themes 1 "There should have been one theme returned" + Expect.equal themes[0].Id defaultId "ID was incorrect" + Expect.equal themes[0].Name "myWebLog Default Theme" "Name was incorrect" + Expect.equal themes[0].Version "2.1.0" "Version was incorrect" + ensureNoText themes[0] +} + +let ``Exists succeeds when the theme exists`` (data: IData) = task { + let! exists = data.Theme.Exists defaultId + Expect.isTrue exists "The \"default\" theme should have existed" +} + +let ``Exists succeeds when the theme does not exist`` (data: IData) = task { + let! exists = data.Theme.Exists (ThemeId "fancy") + Expect.isFalse exists "The \"fancy\" theme should not have existed" +} + +let ``FindById succeeds when the theme exists`` (data: IData) = task { + let! theme = data.Theme.FindById defaultId + Expect.isSome theme "The theme should have been found" + let it = theme.Value + Expect.equal it.Id defaultId "ID was incorrect" + Expect.equal it.Name "myWebLog Default Theme" "Name was incorrect" + Expect.equal it.Version "2.1.0" "Version was incorrect" + for template in it.Templates do + Expect.isNotEmpty template.Text $"Text for template {template.Name} should not have been blank" +} + +let ``FindById succeeds when the theme does not exist`` (data: IData) = task { + let! theme = data.Theme.FindById (ThemeId "missing") + Expect.isNone theme "There should not have been a theme found" +} + +let ``FindByIdWithoutText succeeds when the theme exists`` (data: IData) = task { + let! theme = data.Theme.FindByIdWithoutText defaultId + Expect.isSome theme "The theme should have been found" + let it = theme.Value + Expect.equal it.Id defaultId "ID was incorrect" + ensureNoText it +} + +let ``FindByIdWithoutText succeeds when the theme does not exist`` (data: IData) = task { + let! theme = data.Theme.FindByIdWithoutText (ThemeId "ornate") + Expect.isNone theme "There should not have been a theme found" +} + +let ``Save succeeds when adding a theme`` (data: IData) = task { + do! data.Theme.Save + { Id = testId + Name = "Test Theme" + Version = "evergreen" + Templates = + [ { Name = "index"; Text = "

{{ values_here }}

" } + { Name = "single-post"; Text = "

{{ the_post }}" } ] } + let! saved = data.Theme.FindById testId + Expect.isSome saved "There should have been a theme returned" + let it = saved.Value + Expect.equal it.Id testId "ID was incorrect" + Expect.equal it.Name "Test Theme" "Name was incorrect" + Expect.equal it.Version "evergreen" "Version was incorrect" + Expect.hasLength it.Templates 2 "There should have been 2 templates" + Expect.equal it.Templates[0].Name "index" "Template 0 name incorrect" + Expect.equal it.Templates[0].Text "

{{ values_here }}

" "Template 0 text incorrect" + Expect.equal it.Templates[1].Name "single-post" "Template 1 name incorrect" + Expect.equal it.Templates[1].Text "

{{ the_post }}" "Template 1 text incorrect" +} + +let ``Save succeeds when updating a theme`` (data: IData) = task { + do! data.Theme.Save + { Id = testId + Name = "Updated Theme" + Version = "still evergreen" + Templates = + [ { Name = "index"; Text = "

{{ values_there }}

" } + { Name = "layout"; Text = "" } + { Name = "single-post"; Text = "

{{ the_post }}" } ] } + let! updated = data.Theme.FindById testId + Expect.isSome updated "The updated theme should have been returned" + let it = updated.Value + Expect.equal it.Id testId "ID was incorrect" + Expect.equal it.Name "Updated Theme" "Name was incorrect" + Expect.equal it.Version "still evergreen" "Version was incorrect" + Expect.hasLength it.Templates 3 "There should have been 3 templates" + Expect.equal it.Templates[0].Name "index" "Template 0 name incorrect" + Expect.equal it.Templates[0].Text "

{{ values_there }}

" "Template 0 text incorrect" + Expect.equal it.Templates[1].Name "layout" "Template 1 name incorrect" + Expect.equal it.Templates[1].Text "" "Template 1 text incorrect" + Expect.equal it.Templates[2].Name "single-post" "Template 2 name incorrect" + Expect.equal it.Templates[2].Text "

{{ the_post }}" "Template 2 text incorrect" +} + +let ``Delete succeeds when a theme is deleted`` (data: IData) = task { + // Delete should also delete assets associated with the theme + do! data.ThemeAsset.Save { Id = ThemeAssetId (testId, "logo-dark.png"); UpdatedOn = Noda.epoch; Data = darkFile } + do! data.ThemeAsset.Save { Id = ThemeAssetId (testId, "logo-light.png"); UpdatedOn = Noda.epoch; Data = lightFile } + let! deleted = data.Theme.Delete testId + Expect.isTrue deleted "The theme should have been deleted" + let! assets = data.ThemeAsset.FindByTheme testId + Expect.isEmpty assets "The theme's assets should have been deleted" +} + +let ``Delete succeeds when a theme is not deleted`` (data: IData) = task { + let! deleted = data.Theme.Delete (ThemeId "test-theme") // already deleted above + Expect.isFalse deleted "The theme should not have been deleted" +} + +///

+/// Integration tests for implementations +/// +module Asset = + + /// The theme ID for which assets will be tested + let private assetThemeId = ThemeId "asset-test" + + /// The asset ID for the dark logo + let private darkId = ThemeAssetId (assetThemeId, "logo-dark.png") + + /// The asset ID for the light logo + let private lightId = ThemeAssetId (assetThemeId, "logo-light.png") + + let ``Save succeeds when adding an asset`` (data: IData) = task { + do! data.Theme.Save { Theme.Empty with Id = assetThemeId } + do! data.ThemeAsset.Save { Id = lightId; UpdatedOn = Noda.epoch + Duration.FromDays 18; Data = lightFile } + let! asset = data.ThemeAsset.FindById lightId + Expect.isSome asset "The asset should have been found" + let it = asset.Value + Expect.equal it.Id lightId "ID was incorrect" + Expect.equal it.UpdatedOn (Noda.epoch + Duration.FromDays 18) "Updated on was incorrect" + Expect.equal it.Data lightFile "Data was incorrect" + } + + let ``Save succeeds when updating an asset`` (data: IData) = task { + do! data.ThemeAsset.Save { Id = lightId; UpdatedOn = Noda.epoch + Duration.FromDays 20; Data = darkFile } + let! asset = data.ThemeAsset.FindById lightId + Expect.isSome asset "The asset should have been found" + let it = asset.Value + Expect.equal it.Id lightId "ID was incorrect" + Expect.equal it.UpdatedOn (Noda.epoch + Duration.FromDays 20) "Updated on was incorrect" + Expect.equal it.Data darkFile "Data was incorrect" + } + + let ``All succeeds`` (data: IData) = task { + let! all = data.ThemeAsset.All() + Expect.hasLength all 2 "There should have been 2 assets retrieved" + for asset in all do + Expect.contains + [ ThemeAssetId (defaultId, "style.css"); lightId ] asset.Id $"Unexpected asset found ({asset.Id})" + Expect.isEmpty asset.Data $"Asset {asset.Id} should not have had data" + } + + let ``FindById succeeds when an asset is found`` (data: IData) = task { + let! asset = data.ThemeAsset.FindById lightId + Expect.isSome asset "The asset should have been found" + let it = asset.Value + Expect.equal it.Id lightId "ID was incorrect" + Expect.equal it.UpdatedOn (Noda.epoch + Duration.FromDays 20) "Updated on was incorrect" + Expect.equal it.Data darkFile "Data was incorrect" + } + + let ``FindById succeeds when an asset is not found`` (data: IData) = task { + let! asset = data.ThemeAsset.FindById (ThemeAssetId (assetThemeId, "404.jpg")) + Expect.isNone asset "There should not have been an asset returned" + } + + let ``FindByTheme succeeds when assets exist`` (data: IData) = task { + do! data.ThemeAsset.Save { Id = darkId; UpdatedOn = Noda.epoch; Data = darkFile } + do! data.ThemeAsset.Save { Id = lightId; UpdatedOn = Noda.epoch; Data = lightFile } + let! assets = data.ThemeAsset.FindByTheme assetThemeId + Expect.hasLength assets 2 "There should have been 2 assets returned" + for asset in assets do + Expect.contains [ darkId; lightId ] asset.Id $"Unexpected asset found ({asset.Id})" + Expect.equal asset.UpdatedOn Noda.epoch $"Updated on was incorrect ({asset.Id})" + Expect.isEmpty asset.Data $"Data should not have been retrieved ({asset.Id})" + } + + let ``FindByTheme succeeds when assets do not exist`` (data: IData) = task { + let! assets = data.ThemeAsset.FindByTheme (ThemeId "no-assets-here") + Expect.isEmpty assets "There should have been no assets returned" + } + + let ``FindByThemeWithData succeeds when assets exist`` (data: IData) = task { + let! assets = data.ThemeAsset.FindByThemeWithData assetThemeId + Expect.hasLength assets 2 "There should have been 2 assets returned" + let darkLogo = assets |> List.find (fun it -> it.Id = darkId) + Expect.equal darkLogo.Data darkFile "The dark asset's data is incorrect" + let lightLogo = assets |> List.find (fun it -> it.Id = lightId) + Expect.equal lightLogo.Data lightFile "The light asset's data is incorrect" + } + + let ``FindByThemeWithData succeeds when assets do not exist`` (data: IData) = task { + let! assets = data.ThemeAsset.FindByThemeWithData (ThemeId "still-no-assets") + Expect.isEmpty assets "There should have been no assets returned" + } + + let ``DeleteByTheme succeeds when assets are deleted`` (data: IData) = task { + do! data.ThemeAsset.DeleteByTheme assetThemeId + let! assets = data.ThemeAsset.FindByTheme assetThemeId + Expect.isEmpty assets "There should be no assets remaining" + } + + let ``DeleteByTheme succeeds when no assets are deleted`` (data: IData) = task { + do! data.ThemeAsset.DeleteByTheme assetThemeId // already deleted above + Expect.isTrue true "The above did not raise an exception; that's the test" + } diff --git a/src/MyWebLog.Tests/Data/UploadDataTests.fs b/src/MyWebLog.Tests/Data/UploadDataTests.fs new file mode 100644 index 0000000..636a9f6 --- /dev/null +++ b/src/MyWebLog.Tests/Data/UploadDataTests.fs @@ -0,0 +1,95 @@ +/// +/// Integration tests for implementations +/// +module UploadDataTests + +open System +open System.IO +open Expecto +open MyWebLog +open MyWebLog.Data +open NodaTime + +/// The ID of the root web log +let private rootId = CategoryDataTests.rootId + +/// The ID of the favicon upload +let private faviconId = UploadId "XweKbWQiOkqqrjEdgP9wwg" + +let ``Add succeeds`` (data: IData) = task { + let file = File.ReadAllBytes "../admin-theme/wwwroot/logo-dark.png" + do! data.Upload.Add + { Id = UploadId "new-upload" + WebLogId = rootId + UpdatedOn = Noda.epoch + Duration.FromDays 30 + Path = Permalink "1970/01/logo-dark.png" + Data = file } + let! added = data.Upload.FindByPath "1970/01/logo-dark.png" rootId + Expect.isSome added "There should have been an upload returned" + let upload = added.Value + Expect.equal upload.Id (UploadId "new-upload") "ID is incorrect" + Expect.equal upload.WebLogId rootId "Web log ID is incorrect" + Expect.equal upload.UpdatedOn (Noda.epoch + Duration.FromDays 30) "Updated on is incorrect" + Expect.equal upload.Path (Permalink "1970/01/logo-dark.png") "Path is incorrect" + Expect.equal upload.Data file "Data is incorrect" +} + +let ``FindByPath succeeds when an upload is found`` (data: IData) = task { + let! upload = data.Upload.FindByPath "2022/06/favicon.ico" rootId + Expect.isSome upload "There should have been an upload returned" + let it = upload.Value + Expect.equal it.Id faviconId "ID is incorrect" + Expect.equal it.WebLogId rootId "Web log ID is incorrect" + Expect.equal + it.UpdatedOn (Instant.FromDateTimeOffset(DateTimeOffset.Parse "2022-06-23T21:15:40Z")) "Updated on is incorrect" + Expect.equal it.Path (Permalink "2022/06/favicon.ico") "Path is incorrect" + Expect.isNonEmpty it.Data "Data should have been retrieved" +} + +let ``FindByPath succeeds when an upload is not found (incorrect weblog)`` (data: IData) = task { + let! upload = data.Upload.FindByPath "2022/06/favicon.ico" (WebLogId "wrong") + Expect.isNone upload "There should not have been an upload returned" +} + +let ``FindByPath succeeds when an upload is not found (bad path)`` (data: IData) = task { + let! upload = data.Upload.FindByPath "2022/07/favicon.ico" rootId + Expect.isNone upload "There should not have been an upload returned" +} + +let ``FindByWebLog succeeds when uploads exist`` (data: IData) = task { + let! uploads = data.Upload.FindByWebLog rootId + Expect.hasLength uploads 2 "There should have been 2 uploads returned" + for upload in uploads do + Expect.contains [ faviconId; UploadId "new-upload" ] upload.Id $"Unexpected upload returned ({upload.Id})" + Expect.isEmpty upload.Data $"Upload should not have had its data ({upload.Id})" +} + +let ``FindByWebLog succeeds when no uploads exist`` (data: IData) = task { + let! uploads = data.Upload.FindByWebLog (WebLogId "nothing") + Expect.isEmpty uploads "There should have been no uploads returned" +} + +let ``FindByWebLogWithData succeeds when uploads exist`` (data: IData) = task { + let! uploads = data.Upload.FindByWebLogWithData rootId + Expect.hasLength uploads 2 "There should have been 2 uploads returned" + for upload in uploads do + Expect.contains [ faviconId; UploadId "new-upload" ] upload.Id $"Unexpected upload returned ({upload.Id})" + Expect.isNonEmpty upload.Data $"Upload should have had its data ({upload.Id})" +} + +let ``FindByWebLogWithData succeeds when no uploads exist`` (data: IData) = task { + let! uploads = data.Upload.FindByWebLogWithData (WebLogId "data-nope") + Expect.isEmpty uploads "There should have been no uploads returned" +} + +let ``Delete succeeds when an upload is deleted`` (data: IData) = task { + match! data.Upload.Delete faviconId rootId with + | Ok path -> Expect.equal path "2022/06/favicon.ico" "The path of the deleted upload was incorrect" + | Error it -> Expect.isTrue false $"Upload deletion should have succeeded (message {it})" +} + +let ``Delete succeeds when an upload is not deleted`` (data: IData) = task { + match! data.Upload.Delete faviconId rootId with + | Ok it -> Expect.isTrue false $"Upload deletion should not have succeeded (path {it})" + | Error msg -> Expect.equal msg $"Upload ID {faviconId} not found" "Error message was incorrect" +} diff --git a/src/MyWebLog.Tests/Data/UtilsTests.fs b/src/MyWebLog.Tests/Data/UtilsTests.fs new file mode 100644 index 0000000..9346e87 --- /dev/null +++ b/src/MyWebLog.Tests/Data/UtilsTests.fs @@ -0,0 +1,96 @@ +module UtilsTests + +open Expecto +open MyWebLog +open MyWebLog.Data +open NodaTime + +/// Unit tests for the orderByHierarchy function +let orderByHierarchyTests = test "orderByHierarchy succeeds" { + let rawCats = + [ { Category.Empty with Id = CategoryId "a"; Name = "Audio"; Slug = "audio"; ParentId = Some (CategoryId "p") } + { Category.Empty with + Id = CategoryId "b" + Name = "Breaking" + Description = Some "Breaking News" + Slug = "breaking" + ParentId = Some (CategoryId "n") } + { Category.Empty with Id = CategoryId "l"; Name = "Local"; Slug = "local"; ParentId = Some (CategoryId "b") } + { Category.Empty with Id = CategoryId "n"; Name = "News"; Slug = "news" } + { Category.Empty with Id = CategoryId "p"; Name = "Podcast"; Slug = "podcast" } + { Category.Empty with Id = CategoryId "v"; Name = "Video"; Slug = "vid"; ParentId = Some (CategoryId "p") } ] + let cats = Utils.orderByHierarchy rawCats None None [] |> List.ofSeq + Expect.equal cats.Length 6 "There should have been 6 categories" + Expect.equal cats[0].Id "n" "The first top-level category should have been News" + Expect.equal cats[0].Slug "news" "Slug for News not filled properly" + Expect.isEmpty cats[0].ParentNames "Parent names for News not filled properly" + Expect.equal cats[1].Id "b" "Breaking should have been just below News" + Expect.equal cats[1].Slug "news/breaking" "Slug for Breaking not filled properly" + Expect.equal cats[1].Name "Breaking" "Name not filled properly" + Expect.equal cats[1].Description (Some "Breaking News") "Description not filled properly" + Expect.equal cats[1].ParentNames [| "News" |] "Parent names for Breaking not filled properly" + Expect.equal cats[2].Id "l" "Local should have been just below Breaking" + Expect.equal cats[2].Slug "news/breaking/local" "Slug for Local not filled properly" + Expect.equal cats[2].ParentNames [| "News"; "Breaking" |] "Parent names for Local not filled properly" + Expect.equal cats[3].Id "p" "Podcast should have been the next top-level category" + Expect.equal cats[3].Slug "podcast" "Slug for Podcast not filled properly" + Expect.isEmpty cats[3].ParentNames "Parent names for Podcast not filled properly" + Expect.equal cats[4].Id "a" "Audio should have been just below Podcast" + Expect.equal cats[4].Slug "podcast/audio" "Slug for Audio not filled properly" + Expect.equal cats[4].ParentNames [| "Podcast" |] "Parent names for Audio not filled properly" + Expect.equal cats[5].Id "v" "Video should have been below Audio" + Expect.equal cats[5].Slug "podcast/vid" "Slug for Video not filled properly" + Expect.equal cats[5].ParentNames [| "Podcast" |] "Parent names for Video not filled properly" + Expect.hasCountOf cats 6u (fun it -> it.PostCount = 0) "All post counts should have been 0" +} + +/// Unit tests for the diffLists function +let diffListsTests = testList "diffLists" [ + test "succeeds with identical lists" { + let removed, added = Utils.diffLists [ 1; 2; 3 ] [ 1; 2; 3 ] id + Expect.isEmpty removed "There should have been no removed items returned" + Expect.isEmpty added "There should have been no added items returned" + } + test "succeeds with differing lists" { + let removed, added = Utils.diffLists [ 1; 2; 3 ] [ 3; 4; 5 ] string + Expect.equal removed [ 1; 2 ] "Removed items incorrect" + Expect.equal added [ 4; 5 ] "Added items incorrect" + } +] + +/// Unit tests for the diffRevisions function +let diffRevisionsTests = testList "diffRevisions" [ + test "succeeds with identical lists" { + let oldItems = + [ { AsOf = Noda.epoch + Duration.FromDays 3; Text = Html "

test" } + { AsOf = Noda.epoch; Text = Html "

test test" } ] + let newItems = + [ { AsOf = Noda.epoch; Text = Html "

test test" } + { AsOf = Noda.epoch + Duration.FromDays 3; Text = Html "

test" } ] + let removed, added = Utils.diffRevisions oldItems newItems + Expect.isEmpty removed "There should have been no removed items returned" + Expect.isEmpty added "There should have been no added items returned" + } + test "succeeds with differing lists" { + let oldItems = + [ { AsOf = Noda.epoch + Duration.FromDays 3; Text = Html "

test" } + { AsOf = Noda.epoch + Duration.FromDays 2; Text = Html "

tests" } + { AsOf = Noda.epoch; Text = Html "

test test" } ] + let newItems = + [ { AsOf = Noda.epoch + Duration.FromDays 4; Text = Html "

tests" } + { AsOf = Noda.epoch + Duration.FromDays 3; Text = Html "

test" } + { AsOf = Noda.epoch; Text = Html "

test test" } ] + let removed, added = Utils.diffRevisions oldItems newItems + Expect.equal removed.Length 1 "There should be 1 removed item" + Expect.equal removed[0].AsOf (Noda.epoch + Duration.FromDays 2) "Expected removed item incorrect" + Expect.equal added.Length 1 "There should be 1 added item" + Expect.equal added[0].AsOf (Noda.epoch + Duration.FromDays 4) "Expected added item incorrect" + } +] + +/// All tests for the Utils file +let all = testList "Utils" [ + orderByHierarchyTests + diffListsTests + diffRevisionsTests +] diff --git a/src/MyWebLog.Tests/Data/WebLogDataTests.fs b/src/MyWebLog.Tests/Data/WebLogDataTests.fs new file mode 100644 index 0000000..f87a486 --- /dev/null +++ b/src/MyWebLog.Tests/Data/WebLogDataTests.fs @@ -0,0 +1,198 @@ +///

+/// Integration tests for implementations +/// +module WebLogDataTests + +open System +open Expecto +open MyWebLog +open MyWebLog.Data + +/// The ID of the root web log +let private rootId = CategoryDataTests.rootId + +let ``Add succeeds`` (data: IData) = task { + do! data.WebLog.Add + { Id = WebLogId "new-weblog" + Name = "Test Web Log" + Slug = "test-web-log" + Subtitle = None + DefaultPage = "" + PostsPerPage = 7 + ThemeId = ThemeId "default" + UrlBase = "https://example.com/new" + TimeZone = "America/Los_Angeles" + Rss = + { IsFeedEnabled = true + FeedName = "my-feed.xml" + ItemsInFeed = None + IsCategoryEnabled = false + IsTagEnabled = false + Copyright = Some "go for it" + CustomFeeds = [] } + AutoHtmx = true + Uploads = Disk + RedirectRules = [ { From = "/here"; To = "/there"; IsRegex = false } ] } + let! webLog = data.WebLog.FindById (WebLogId "new-weblog") + Expect.isSome webLog "The web log should have been returned" + let it = webLog.Value + Expect.equal it.Id (WebLogId "new-weblog") "ID is incorrect" + Expect.equal it.Name "Test Web Log" "Name is incorrect" + Expect.equal it.Slug "test-web-log" "Slug is incorrect" + Expect.isNone it.Subtitle "Subtitle is incorrect" + Expect.equal it.DefaultPage "" "Default page is incorrect" + Expect.equal it.PostsPerPage 7 "Posts per page is incorrect" + Expect.equal it.ThemeId (ThemeId "default") "Theme ID is incorrect" + Expect.equal it.UrlBase "https://example.com/new" "URL base is incorrect" + Expect.equal it.TimeZone "America/Los_Angeles" "Time zone is incorrect" + Expect.isTrue it.AutoHtmx "Auto htmx flag is incorrect" + Expect.equal it.Uploads Disk "Upload destination is incorrect" + Expect.equal it.RedirectRules [ { From = "/here"; To = "/there"; IsRegex = false } ] "Redirect rules are incorrect" + let rss = it.Rss + Expect.isTrue rss.IsFeedEnabled "Is feed enabled flag is incorrect" + Expect.equal rss.FeedName "my-feed.xml" "Feed name is incorrect" + Expect.isNone rss.ItemsInFeed "Items in feed is incorrect" + Expect.isFalse rss.IsCategoryEnabled "Is category enabled flag is incorrect" + Expect.isFalse rss.IsTagEnabled "Is tag enabled flag is incorrect" + Expect.equal rss.Copyright (Some "go for it") "Copyright is incorrect" + Expect.isEmpty rss.CustomFeeds "Custom feeds are incorrect" +} + +let ``All succeeds`` (data: IData) = task { + let! webLogs = data.WebLog.All() + Expect.hasLength webLogs 2 "There should have been 2 web logs returned" + for webLog in webLogs do + Expect.contains [ rootId; WebLogId "new-weblog" ] webLog.Id $"Unexpected web log returned ({webLog.Id})" +} + +let ``FindByHost succeeds when a web log is found`` (data: IData) = task { + let! webLog = data.WebLog.FindByHost "http://localhost:8081" + Expect.isSome webLog "A web log should have been returned" + Expect.equal webLog.Value.Id rootId "The wrong web log was returned" +} + +let ``FindByHost succeeds when a web log is not found`` (data: IData) = task { + let! webLog = data.WebLog.FindByHost "https://test.units" + Expect.isNone webLog "There should not have been a web log returned" +} + +let ``FindById succeeds when a web log is found`` (data: IData) = task { + let! webLog = data.WebLog.FindById rootId + Expect.isSome webLog "There should have been a web log returned" + let it = webLog.Value + Expect.equal it.Id rootId "ID is incorrect" + Expect.equal it.Name "Root WebLog" "Name is incorrect" + Expect.equal it.Slug "root-weblog" "Slug is incorrect" + Expect.equal it.Subtitle (Some "This is the main one") "Subtitle is incorrect" + Expect.equal it.DefaultPage "posts" "Default page is incorrect" + Expect.equal it.PostsPerPage 9 "Posts per page is incorrect" + Expect.equal it.ThemeId (ThemeId "default") "Theme ID is incorrect" + Expect.equal it.UrlBase "http://localhost:8081" "URL base is incorrect" + Expect.equal it.TimeZone "America/Denver" "Time zone is incorrect" + Expect.isTrue it.AutoHtmx "Auto htmx flag is incorrect" + Expect.equal it.Uploads Database "Upload destination is incorrect" + Expect.isEmpty it.RedirectRules "Redirect rules are incorrect" + let rss = it.Rss + Expect.isTrue rss.IsFeedEnabled "Is feed enabled flag is incorrect" + Expect.equal rss.FeedName "feed" "Feed name is incorrect" + Expect.equal rss.ItemsInFeed (Some 7) "Items in feed is incorrect" + Expect.isTrue rss.IsCategoryEnabled "Is category enabled flag is incorrect" + Expect.isTrue rss.IsTagEnabled "Is tag enabled flag is incorrect" + Expect.equal rss.Copyright (Some "CC40-NC-BY") "Copyright is incorrect" + Expect.hasLength rss.CustomFeeds 1 "There should be 1 custom feed" + Expect.equal rss.CustomFeeds[0].Id (CustomFeedId "isPQ6drbDEydxohQzaiYtQ") "Custom feed ID incorrect" + Expect.equal rss.CustomFeeds[0].Source (Tag "podcast") "Custom feed source is incorrect" + Expect.equal rss.CustomFeeds[0].Path (Permalink "podcast-feed") "Custom feed path is incorrect" + Expect.isSome rss.CustomFeeds[0].Podcast "There should be podcast settings for this custom feed" + let pod = rss.CustomFeeds[0].Podcast.Value + Expect.equal pod.Title "Root Podcast" "Podcast title is incorrect" + Expect.equal pod.ItemsInFeed 23 "Podcast items in feed is incorrect" + Expect.equal pod.Summary "All things that happen in the domain root" "Podcast summary is incorrect" + Expect.equal pod.DisplayedAuthor "Podcaster Extraordinaire" "Podcast author is incorrect" + Expect.equal pod.Email "podcaster@example.com" "Podcast e-mail is incorrect" + Expect.equal pod.ImageUrl (Permalink "images/cover-art.png") "Podcast image URL is incorrect" + Expect.equal pod.AppleCategory "Fiction" "Podcast Apple category is incorrect" + Expect.equal pod.AppleSubcategory (Some "Drama") "Podcast Apple subcategory is incorrect" + Expect.equal pod.Explicit No "Podcast explicit rating is incorrect" + Expect.equal pod.DefaultMediaType (Some "audio/mpeg") "Podcast default media type is incorrect" + Expect.equal pod.MediaBaseUrl (Some "https://media.example.com/root/") "Podcast media base URL is incorrect" + Expect.equal pod.PodcastGuid (Some (Guid.Parse "10fd7f79-c719-4e1d-9da7-10405dd4fd96")) "Podcast GUID is incorrect" + Expect.equal pod.FundingUrl (Some "https://example.com/support-us") "Podcast funding URL is incorrect" + Expect.equal pod.FundingText (Some "Support Our Work") "Podcast funding text is incorrect" + Expect.equal pod.Medium (Some Newsletter) "Podcast medium is incorrect" +} + +let ``FindById succeeds when a web log is not found`` (data: IData) = task { + let! webLog = data.WebLog.FindById (WebLogId "no-web-log") + Expect.isNone webLog "There should not have been a web log returned" +} + +let ``UpdateRedirectRules succeeds when the web log exists`` (data: IData) = task { + let! webLog = data.WebLog.FindById (WebLogId "new-weblog") + Expect.isSome webLog "The test web log should have been returned" + do! data.WebLog.UpdateRedirectRules + { webLog.Value with + RedirectRules = { From = "/now"; To = "/later"; IsRegex = false } :: webLog.Value.RedirectRules } + let! updated = data.WebLog.FindById (WebLogId "new-weblog") + Expect.isSome updated "The updated web log should have been returned" + Expect.equal + updated.Value.RedirectRules + [ { From = "/now"; To = "/later"; IsRegex = false }; { From = "/here"; To = "/there"; IsRegex = false } ] + "Redirect rules not updated correctly" +} + +let ``UpdateRedirectRules succeeds when the web log does not exist`` (data: IData) = task { + do! data.WebLog.UpdateRedirectRules { WebLog.Empty with Id = WebLogId "no-rules" } + Expect.isTrue true "This not raising an exception is the test" +} + +let ``UpdateRssOptions succeeds when the web log exists`` (data: IData) = task { + let! webLog = data.WebLog.FindById rootId + Expect.isSome webLog "The root web log should have been returned" + do! data.WebLog.UpdateRssOptions { webLog.Value with Rss = { webLog.Value.Rss with CustomFeeds = [] } } + let! updated = data.WebLog.FindById rootId + Expect.isSome updated "The updated web log should have been returned" + Expect.isEmpty updated.Value.Rss.CustomFeeds "RSS options not updated correctly" +} + +let ``UpdateRssOptions succeeds when the web log does not exist`` (data: IData) = task { + do! data.WebLog.UpdateRssOptions { WebLog.Empty with Id = WebLogId "rss-less" } + Expect.isTrue true "This not raising an exception is the test" +} + +let ``UpdateSettings succeeds when the web log exists`` (data: IData) = task { + let! webLog = data.WebLog.FindById rootId + Expect.isSome webLog "The root web log should have been returned" + do! data.WebLog.UpdateSettings { webLog.Value with AutoHtmx = false; Subtitle = None } + let! updated = data.WebLog.FindById rootId + Expect.isSome updated "The updated web log should have been returned" + Expect.isFalse updated.Value.AutoHtmx "Auto htmx flag not updated correctly" + Expect.isNone updated.Value.Subtitle "Subtitle not updated correctly" +} + +let ``UpdateSettings succeeds when the web log does not exist`` (data: IData) = task { + do! data.WebLog.UpdateRedirectRules { WebLog.Empty with Id = WebLogId "no-settings" } + let! webLog = data.WebLog.FindById (WebLogId "no-settings") + Expect.isNone webLog "Updating settings should not have created a web log" +} + +let ``Delete succeeds when the web log exists`` (data: IData) = task { + do! data.WebLog.Delete rootId + let! cats = data.Category.FindByWebLog rootId + Expect.isEmpty cats "There should be no categories remaining" + let! pages = data.Page.FindFullByWebLog rootId + Expect.isEmpty pages "There should be no pages remaining" + let! posts = data.Post.FindFullByWebLog rootId + Expect.isEmpty posts "There should be no posts remaining" + let! tagMappings = data.TagMap.FindByWebLog rootId + Expect.isEmpty tagMappings "There should be no tag mappings remaining" + let! uploads = data.Upload.FindByWebLog rootId + Expect.isEmpty uploads "There should be no uploads remaining" + let! users = data.WebLogUser.FindByWebLog rootId + Expect.isEmpty users "There should be no users remaining" +} + +let ``Delete succeeds when the web log does not exist`` (data: IData) = task { + do! data.WebLog.Delete rootId // already deleted above + Expect.isTrue true "This not raising an exception is the test" +} diff --git a/src/MyWebLog.Tests/Data/WebLogUserDataTests.fs b/src/MyWebLog.Tests/Data/WebLogUserDataTests.fs new file mode 100644 index 0000000..ab5aa1f --- /dev/null +++ b/src/MyWebLog.Tests/Data/WebLogUserDataTests.fs @@ -0,0 +1,184 @@ +/// +/// Integration tests for implementations +/// +module WebLogUserDataTests + +open Expecto +open MyWebLog +open MyWebLog.Data +open NodaTime + +/// The ID of the root web log +let private rootId = CategoryDataTests.rootId + +/// The ID of the admin user +let private adminId = WebLogUserId "5EM2rimH9kONpmd2zQkiVA" + +/// The ID of the editor user +let private editorId = WebLogUserId "GPbJaSOwTkKt14ZKYyveKA" + +/// The ID of the author user +let private authorId = WebLogUserId "iIRNLSeY0EanxRPyqGuwVg" + +/// The ID of the user added during the run of these tests +let private newId = WebLogUserId "new-user" + +let ``Add succeeds`` (data: IData) = task { + do! data.WebLogUser.Add + { Id = newId + WebLogId = rootId + Email = "new@example.com" + FirstName = "New" + LastName = "User" + PreferredName = "n00b" + PasswordHash = "hashed-password" + Url = Some "https://example.com/~new" + AccessLevel = Author + CreatedOn = Noda.epoch + Duration.FromDays 365 + LastSeenOn = None } + let! user = data.WebLogUser.FindById newId rootId + Expect.isSome user "There should have been a user returned" + let it = user.Value + Expect.equal it.Id newId "ID is incorrect" + Expect.equal it.WebLogId rootId "Web log ID is incorrect" + Expect.equal it.Email "new@example.com" "E-mail address is incorrect" + Expect.equal it.FirstName "New" "First name is incorrect" + Expect.equal it.LastName "User" "Last name is incorrect" + Expect.equal it.PreferredName "n00b" "Preferred name is incorrect" + Expect.equal it.PasswordHash "hashed-password" "Password hash is incorrect" + Expect.equal it.Url (Some "https://example.com/~new") "URL is incorrect" + Expect.equal it.AccessLevel Author "Access level is incorrect" + Expect.equal it.CreatedOn (Noda.epoch + Duration.FromDays 365) "Created on is incorrect" + Expect.isNone it.LastSeenOn "Last seen on should not have had a value" +} + +let ``FindByEmail succeeds when a user is found`` (data: IData) = task { + let! user = data.WebLogUser.FindByEmail "root@example.com" rootId + Expect.isSome user "There should have been a user returned" + Expect.equal user.Value.Id adminId "The wrong user was returned" +} + +let ``FindByEmail succeeds when a user is not found (incorrect weblog)`` (data: IData) = task { + let! user = data.WebLogUser.FindByEmail "root@example.com" (WebLogId "other") + Expect.isNone user "There should not have been a user returned" +} + +let ``FindByEmail succeeds when a user is not found (bad email)`` (data: IData) = task { + let! user = data.WebLogUser.FindByEmail "wwwdata@example.com" rootId + Expect.isNone user "There should not have been a user returned" +} + +let ``FindById succeeds when a user is found`` (data: IData) = task { + let! user = data.WebLogUser.FindById adminId rootId + Expect.isSome user "There should have been a user returned" + Expect.equal user.Value.Id adminId "The wrong user was returned" + // The remainder of field population is tested in the "Add succeeds" test above +} + +let ``FindById succeeds when a user is not found (incorrect weblog)`` (data: IData) = task { + let! user = data.WebLogUser.FindById adminId (WebLogId "not-admin") + Expect.isNone user "There should not have been a user returned" +} + +let ``FindById succeeds when a user is not found (bad ID)`` (data: IData) = task { + let! user = data.WebLogUser.FindById (WebLogUserId "tom") rootId + Expect.isNone user "There should not have been a user returned" +} + +let ``FindByWebLog succeeds when users exist`` (data: IData) = task { + let! users = data.WebLogUser.FindByWebLog rootId + Expect.hasLength users 4 "There should have been 4 users returned" + for user in users do + Expect.contains [ adminId; editorId; authorId; newId ] user.Id $"Unexpected user returned ({user.Id})" +} + +let ``FindByWebLog succeeds when no users exist`` (data: IData) = task { + let! users = data.WebLogUser.FindByWebLog (WebLogId "no-users") + Expect.isEmpty users "There should have been no users returned" +} + +let ``FindNames succeeds when users exist`` (data: IData) = task { + let! names = data.WebLogUser.FindNames rootId [ editorId; authorId ] + let expected = + [ { Name = string editorId; Value = "Edits It-Or" }; { Name = string authorId; Value = "Mister Dude" } ] + Expect.hasLength names 2 "There should have been 2 names returned" + for name in names do Expect.contains expected name $"Unexpected name returned ({name.Name}|{name.Value})" +} + +let ``FindNames succeeds when users do not exist`` (data: IData) = task { + let! names = data.WebLogUser.FindNames rootId [ WebLogUserId "nope"; WebLogUserId "no" ] + Expect.isEmpty names "There should have been no names returned" +} + +let ``SetLastSeen succeeds when the user exists`` (data: IData) = task { + let now = Noda.now () + do! data.WebLogUser.SetLastSeen newId rootId + let! user = data.WebLogUser.FindById newId rootId + Expect.isSome user "The user should have been returned" + let it = user.Value + Expect.isSome it.LastSeenOn "Last seen on should have been set" + Expect.isGreaterThanOrEqual it.LastSeenOn.Value now "The last seen on date/time was not set correctly" +} + +let ``SetLastSeen succeeds when the user does not exist`` (data: IData) = task { + do! data.WebLogUser.SetLastSeen (WebLogUserId "matt") rootId + Expect.isTrue true "This not raising an exception is the test" +} + +let ``Update succeeds when the user exists`` (data: IData) = task { + let! currentUser = data.WebLogUser.FindById newId rootId + Expect.isSome currentUser "The current user should have been found" + do! data.WebLogUser.Update + { currentUser.Value with + Email = "newish@example.com" + FirstName = "New-ish" + LastName = "User-ish" + PreferredName = "n00b-ish" + PasswordHash = "hashed-ish-password" + Url = None + AccessLevel = Editor } + let! updated = data.WebLogUser.FindById newId rootId + Expect.isSome updated "The updated user should have been returned" + let it = updated.Value + Expect.equal it.Id newId "ID is incorrect" + Expect.equal it.WebLogId rootId "Web log ID is incorrect" + Expect.equal it.Email "newish@example.com" "E-mail address is incorrect" + Expect.equal it.FirstName "New-ish" "First name is incorrect" + Expect.equal it.LastName "User-ish" "Last name is incorrect" + Expect.equal it.PreferredName "n00b-ish" "Preferred name is incorrect" + Expect.equal it.PasswordHash "hashed-ish-password" "Password hash is incorrect" + Expect.isNone it.Url "URL is incorrect" + Expect.equal it.AccessLevel Editor "Access level is incorrect" + Expect.equal it.CreatedOn (Noda.epoch + Duration.FromDays 365) "Created on is incorrect" + Expect.isSome it.LastSeenOn "Last seen on should have had a value" +} + +let ``Update succeeds when the user does not exist`` (data: IData) = task { + do! data.WebLogUser.Update { WebLogUser.Empty with Id = WebLogUserId "nothing"; WebLogId = rootId } + let! updated = data.WebLogUser.FindById (WebLogUserId "nothing") rootId + Expect.isNone updated "The update of a missing user should not have created the user" +} + +let ``Delete fails when the user is the author of a page`` (data: IData) = task { + match! data.WebLogUser.Delete adminId rootId with + | Ok _ -> Expect.isTrue false "Deletion should have failed because the user is a page author" + | Error msg -> Expect.equal msg "User has pages or posts; cannot delete" "Error message is incorrect" +} + +let ``Delete fails when the user is the author of a post`` (data: IData) = task { + match! data.WebLogUser.Delete authorId rootId with + | Ok _ -> Expect.isTrue false "Deletion should have failed because the user is a post author" + | Error msg -> Expect.equal msg "User has pages or posts; cannot delete" "Error message is incorrect" +} + +let ``Delete succeeds when the user is not an author`` (data: IData) = task { + match! data.WebLogUser.Delete newId rootId with + | Ok _ -> Expect.isTrue true "This is the expected outcome" + | Error msg -> Expect.isTrue false $"Deletion unexpectedly failed (message {msg})" +} + +let ``Delete succeeds when the user does not exist`` (data: IData) = task { + match! data.WebLogUser.Delete newId rootId with // already deleted above + | Ok _ -> Expect.isTrue false "Deletion should have failed because the user does not exist" + | Error msg -> Expect.equal msg "User does not exist" "Error message is incorrect" +} diff --git a/src/MyWebLog.Tests/Domain/DataTypesTests.fs b/src/MyWebLog.Tests/Domain/DataTypesTests.fs new file mode 100644 index 0000000..8d04321 --- /dev/null +++ b/src/MyWebLog.Tests/Domain/DataTypesTests.fs @@ -0,0 +1,87 @@ +module DataTypesTests + +open Expecto +open MyWebLog + +/// Unit tests for the WebLog type +let webLogTests = testList "WebLog" [ + testList "ExtraPath" [ + test "succeeds for blank URL base" { + Expect.equal WebLog.Empty.ExtraPath "" "Extra path should have been blank for blank URL base" + } + test "succeeds for domain root URL" { + Expect.equal + { WebLog.Empty with UrlBase = "https://example.com" }.ExtraPath + "" + "Extra path should have been blank for domain root" + } + test "succeeds for single subdirectory" { + Expect.equal + { WebLog.Empty with UrlBase = "https://a.com/sub" }.ExtraPath + "/sub" + "Extra path incorrect for a single subdirectory" + } + test "succeeds for deeper nesting" { + Expect.equal + { WebLog.Empty with UrlBase = "https://b.com/users/test/units" }.ExtraPath + "/users/test/units" + "Extra path incorrect for deeper nesting" + } + ] + test "AbsoluteUrl succeeds" { + Expect.equal + ({ WebLog.Empty with UrlBase = "https://my.site" }.AbsoluteUrl(Permalink "blog/page.html")) + "https://my.site/blog/page.html" + "Absolute URL is incorrect" + } + testList "RelativeUrl" [ + test "succeeds for domain root URL" { + Expect.equal + ({ WebLog.Empty with UrlBase = "https://test.me" }.RelativeUrl(Permalink "about.htm")) + "/about.htm" + "Relative URL is incorrect for domain root site" + } + test "succeeds for domain non-root URL" { + Expect.equal + ({ WebLog.Empty with UrlBase = "https://site.page/a/b/c" }.RelativeUrl(Permalink "x/y/z")) + "/a/b/c/x/y/z" + "Relative URL is incorrect for domain non-root site" + } + ] + testList "LocalTime" [ + test "succeeds when no time zone is set" { + Expect.equal + (WebLog.Empty.LocalTime(Noda.epoch)) + (Noda.epoch.ToDateTimeUtc()) + "Reference should be UTC when no time zone is specified" + } + test "succeeds when time zone is set" { + Expect.equal + ({ WebLog.Empty with TimeZone = "Etc/GMT-1" }.LocalTime(Noda.epoch)) + (Noda.epoch.ToDateTimeUtc().AddHours 1) + "The time should have been adjusted by one hour" + } + ] +] + +/// Unit tests for the WebLogUser type +let webLogUserTests = testList "WebLogUser" [ + testList "DisplayName" [ + test "succeeds when a preferred name is present" { + Expect.equal + { WebLogUser.Empty with + FirstName = "Thomas"; PreferredName = "Tom"; LastName = "Tester" }.DisplayName + "Tom Tester" + "Display name incorrect when preferred name is present" + } + test "succeeds when a preferred name is absent" { + Expect.equal + { WebLogUser.Empty with FirstName = "Test"; LastName = "Units" }.DisplayName + "Test Units" + "Display name incorrect when preferred name is absent" + } + ] +] + +/// All tests for the Domain.DataTypes file +let all = testList "DataTypes" [ webLogTests; webLogUserTests ] diff --git a/src/MyWebLog.Tests/Domain/SupportTypesTests.fs b/src/MyWebLog.Tests/Domain/SupportTypesTests.fs new file mode 100644 index 0000000..b68494b --- /dev/null +++ b/src/MyWebLog.Tests/Domain/SupportTypesTests.fs @@ -0,0 +1,415 @@ +module SupportTypesTests + +open System +open Expecto +open MyWebLog +open NodaTime + +/// Tests for the NodaTime-wrapping module +let nodaTests = testList "Noda" [ + test "epoch succeeds" { + Expect.equal + (Noda.epoch.ToDateTimeUtc()) + (DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc)) + "The Unix epoch value is not correct" + } + test "toSecondsPrecision succeeds" { + let testDate = Instant.FromDateTimeUtc(DateTime(1970, 1, 1, 0, 0, 0, 444, DateTimeKind.Utc)) + // testDate. + Expect.equal + ((Noda.toSecondsPrecision testDate).ToDateTimeUtc()) + (Noda.epoch.ToDateTimeUtc()) + "Instant value was not rounded to seconds precision" + } + test "fromDateTime succeeds" { + let testDate = DateTime(1970, 1, 1, 0, 0, 0, 444, DateTimeKind.Utc) + Expect.equal (Noda.fromDateTime testDate) Noda.epoch "fromDateTime did not truncate to seconds" + } +] + +/// Tests for the AccessLevel type +let accessLevelTests = testList "AccessLevel" [ + testList "Parse" [ + test "succeeds for \"Author\"" { + Expect.equal Author (AccessLevel.Parse "Author") "Author not parsed correctly" + } + test "succeeds for \"Editor\"" { + Expect.equal Editor (AccessLevel.Parse "Editor") "Editor not parsed correctly" + } + test "succeeds for \"WebLogAdmin\"" { + Expect.equal WebLogAdmin (AccessLevel.Parse "WebLogAdmin") "WebLogAdmin not parsed correctly" + } + test "succeeds for \"Administrator\"" { + Expect.equal Administrator (AccessLevel.Parse "Administrator") "Administrator not parsed correctly" + } + test "fails when given an unrecognized value" { + Expect.throwsT + (fun () -> ignore (AccessLevel.Parse "Hacker")) "Invalid value should have raised an exception" + } + ] + testList "ToString" [ + test "Author succeeds" { + Expect.equal (string Author) "Author" "Author string incorrect" + } + test "Editor succeeds" { + Expect.equal (string Editor) "Editor" "Editor string incorrect" + } + test "WebLogAdmin succeeds" { + Expect.equal (string WebLogAdmin) "WebLogAdmin" "WebLogAdmin string incorrect" + } + test "Administrator succeeds" { + Expect.equal (string Administrator) "Administrator" "Administrator string incorrect" + } + ] + testList "HasAccess" [ + test "Author has Author access" { + Expect.isTrue (Author.HasAccess Author) "Author should have Author access" + } + test "Author does not have Editor access" { + Expect.isFalse (Author.HasAccess Editor) "Author should not have Editor access" + } + test "Author does not have WebLogAdmin access" { + Expect.isFalse (Author.HasAccess WebLogAdmin) "Author should not have WebLogAdmin access" + } + test "Author does not have Administrator access" { + Expect.isFalse (Author.HasAccess Administrator) "Author should not have Administrator access" + } + test "Editor has Author access" { + Expect.isTrue (Editor.HasAccess Author) "Editor should have Author access" + } + test "Editor has Editor access" { + Expect.isTrue (Editor.HasAccess Editor) "Editor should have Editor access" + } + test "Editor does not have WebLogAdmin access" { + Expect.isFalse (Editor.HasAccess WebLogAdmin) "Editor should not have WebLogAdmin access" + } + test "Editor does not have Administrator access" { + Expect.isFalse (Editor.HasAccess Administrator) "Editor should not have Administrator access" + } + test "WebLogAdmin has Author access" { + Expect.isTrue (WebLogAdmin.HasAccess Author) "WebLogAdmin should have Author access" + } + test "WebLogAdmin has Editor access" { + Expect.isTrue (WebLogAdmin.HasAccess Editor) "WebLogAdmin should have Editor access" + } + test "WebLogAdmin has WebLogAdmin access" { + Expect.isTrue (WebLogAdmin.HasAccess WebLogAdmin) "WebLogAdmin should have WebLogAdmin access" + } + test "WebLogAdmin does not have Administrator access" { + Expect.isFalse (WebLogAdmin.HasAccess Administrator) "WebLogAdmin should not have Administrator access" + } + test "Administrator has Author access" { + Expect.isTrue (Administrator.HasAccess Author) "Administrator should have Author access" + } + test "Administrator has Editor access" { + Expect.isTrue (Administrator.HasAccess Editor) "Administrator should have Editor access" + } + test "Administrator has WebLogAdmin access" { + Expect.isTrue (Administrator.HasAccess WebLogAdmin) "Administrator should have WebLogAdmin access" + } + test "Administrator has Administrator access" { + Expect.isTrue (Administrator.HasAccess Administrator) "Administrator should have Administrator access" + } + ] +] + +/// Tests for the CommentStatus type +let commentStatusTests = testList "CommentStatus" [ + testList "Parse" [ + test "succeeds for \"Approved\"" { + Expect.equal Approved (CommentStatus.Parse "Approved") "Approved not parsed correctly" + } + test "succeeds for \"Pending\"" { + Expect.equal Pending (CommentStatus.Parse "Pending") "Pending not parsed correctly" + } + test "succeeds for \"Spam\"" { + Expect.equal Spam (CommentStatus.Parse "Spam") "Spam not parsed correctly" + } + test "fails for unrecognized value" { + Expect.throwsT + (fun () -> ignore (CommentStatus.Parse "Live")) "Invalid value should have raised an exception" + } + ] + testList "ToString" [ + test "Approved succeeds" { + Expect.equal (string Approved) "Approved" "Approved string incorrect" + } + test "Pending succeeds" { + Expect.equal (string Pending) "Pending" "Pending string incorrect" + } + test "Spam succeeds" { + Expect.equal (string Spam) "Spam" "Spam string incorrect" + } + ] +] + +/// Tests for the ExplicitRating type +let explicitRatingTests = testList "ExplicitRating" [ + testList "Parse" [ + test "succeeds for \"yes\"" { + Expect.equal Yes (ExplicitRating.Parse "yes") "\"yes\" not parsed correctly" + } + test "succeeds for \"no\"" { + Expect.equal No (ExplicitRating.Parse "no") "\"no\" not parsed correctly" + } + test "succeeds for \"clean\"" { + Expect.equal Clean (ExplicitRating.Parse "clean") "\"clean\" not parsed correctly" + } + test "fails for unrecognized value" { + Expect.throwsT + (fun () -> ignore (ExplicitRating.Parse "maybe")) "Invalid value should have raised an exception" + } + ] + testList "ToString" [ + test "Yes succeeds" { + Expect.equal (string Yes) "yes" "Yes string incorrect" + } + test "No succeeds" { + Expect.equal (string No) "no" "No string incorrect" + } + test "Clean succeeds" { + Expect.equal (string Clean) "clean" "Clean string incorrect" + } + ] +] + +/// Tests for the Episode type +let episodeTests = testList "Episode" [ + testList "FormatDuration" [ + test "succeeds when no duration is specified" { + Expect.isNone (Episode.Empty.FormatDuration()) "A missing duration should have returned None" + } + test "succeeds when duration is specified" { + Expect.equal + ({ Episode.Empty with + Duration = Some (Duration.FromMinutes 3L + Duration.FromSeconds 13L) }.FormatDuration()) + (Some "0:03:13") + "Duration not formatted correctly" + } + test "succeeds when duration is > 10 hours" { + Expect.equal + ({ Episode.Empty with Duration = Some (Duration.FromHours 11) }.FormatDuration()) + (Some "11:00:00") + "Duration not formatted correctly" + } + ] +] + +/// Unit tests for the MarkupText type +let markupTextTests = testList "MarkupText" [ + testList "Parse" [ + test "succeeds with HTML content" { + let txt = MarkupText.Parse "HTML:

howdy

" + match txt with + | Html it when it = "

howdy

" -> () + | _ -> Expect.isTrue false $"Unexpected parse result for HTML: %A{txt}" + } + test "succeeds with Markdown content" { + let txt = MarkupText.Parse "Markdown: # A Title" + match txt with + | Markdown it when it = "# A Title" -> () + | _ -> Expect.isTrue false $"Unexpected parse result for Markdown: %A{txt}" + } + test "fails with unexpected content" { + Expect.throwsT + (fun () -> ignore (MarkupText.Parse "LaTEX: nope")) "Invalid value should have raised an exception" + } + ] + testList "SourceType" [ + test "succeeds for HTML" { + Expect.equal (MarkupText.Parse "HTML: something").SourceType "HTML" "HTML source type incorrect" + } + test "succeeds for Markdown" { + Expect.equal (MarkupText.Parse "Markdown: blah").SourceType "Markdown" "Markdown source type incorrect" + } + ] + testList "Text" [ + test "succeeds for HTML" { + Expect.equal (MarkupText.Parse "HTML: test").Text "test" "HTML text incorrect" + } + test "succeeds for Markdown" { + Expect.equal (MarkupText.Parse "Markdown: test!").Text "test!" "Markdown text incorrect" + } + ] + testList "ToString" [ + test "succeeds for HTML" { + Expect.equal + (string (MarkupText.Parse "HTML:

HTML

")) "HTML:

HTML

" "HTML string value incorrect" + } + test "succeeds for Markdown" { + Expect.equal + (string (MarkupText.Parse "Markdown: # Some Content")) + "Markdown: # Some Content" + "Markdown string value incorrect" + } + ] + testList "AsHtml" [ + test "succeeds for HTML" { + Expect.equal + ((MarkupText.Parse "HTML:

The Heading

").AsHtml()) "

The Heading

" "HTML value incorrect" + } + test "succeeds for Markdown" { + Expect.equal + ((MarkupText.Parse "Markdown: *emphasis*").AsHtml()) + "

emphasis

\n" + "Markdown HTML value incorrect" + } + ] +] + +/// Unit tests for the PodcastMedium type +let podcastMediumTests = testList "PodcastMedium" [ + testList "Parse" [ + test "succeeds for \"podcast\"" { + Expect.equal (PodcastMedium.Parse "podcast") Podcast "\"podcast\" not parsed correctly" + } + test "succeeds for \"music\"" { + Expect.equal (PodcastMedium.Parse "music") Music "\"music\" not parsed correctly" + } + test "succeeds for \"video\"" { + Expect.equal (PodcastMedium.Parse "video") Video "\"video\" not parsed correctly" + } + test "succeeds for \"film\"" { + Expect.equal (PodcastMedium.Parse "film") Film "\"film\" not parsed correctly" + } + test "succeeds for \"audiobook\"" { + Expect.equal (PodcastMedium.Parse "audiobook") Audiobook "\"audiobook\" not parsed correctly" + } + test "succeeds for \"newsletter\"" { + Expect.equal (PodcastMedium.Parse "newsletter") Newsletter "\"newsletter\" not parsed correctly" + } + test "succeeds for \"blog\"" { + Expect.equal (PodcastMedium.Parse "blog") Blog "\"blog\" not parsed correctly" + } + test "fails for invalid type" { + Expect.throwsT + (fun () -> ignore (PodcastMedium.Parse "laser")) "Invalid value should have raised an exception" + } + ] + testList "ToString" [ + test "succeeds for Podcast" { + Expect.equal (string Podcast) "podcast" "Podcast string incorrect" + } + test "succeeds for Music" { + Expect.equal (string Music) "music" "Music string incorrect" + } + test "succeeds for Video" { + Expect.equal (string Video) "video" "Video string incorrect" + } + test "succeeds for Film" { + Expect.equal (string Film) "film" "Film string incorrect" + } + test "succeeds for Audiobook" { + Expect.equal (string Audiobook) "audiobook" "Audiobook string incorrect" + } + test "succeeds for Newsletter" { + Expect.equal (string Newsletter) "newsletter" "Newsletter string incorrect" + } + test "succeeds for Blog" { + Expect.equal (string Blog) "blog" "Blog string incorrect" + } + ] +] + +/// Unit tests for the PostStatus type +let postStatusTests = testList "PostStatus" [ + testList "Parse" [ + test "succeeds for \"Draft\"" { + Expect.equal (PostStatus.Parse "Draft") Draft "\"Draft\" not parsed correctly" + } + test "succeeds for \"Published\"" { + Expect.equal (PostStatus.Parse "Published") Published "\"Published\" not parsed correctly" + } + test "fails for unrecognized value" { + Expect.throwsT + (fun () -> ignore (PostStatus.Parse "Rescinded")) "Invalid value should have raised an exception" + } + ] +] + +/// Unit tests for the CustomFeedSource type +let customFeedSourceTests = testList "CustomFeedSource" [ + testList "Parse" [ + test "succeeds for category feeds" { + Expect.equal + (CustomFeedSource.Parse "category:abc123") + (Category (CategoryId "abc123")) + "Category feed not parsed correctly" + } + test "succeeds for tag feeds" { + Expect.equal (CustomFeedSource.Parse "tag:turtles") (Tag "turtles") "Tag feed not parsed correctly" + } + test "fails for unknown type" { + Expect.throwsT + (fun () -> ignore (CustomFeedSource.Parse "nasa:sat1")) "Invalid value should have raised an exception" + } + ] + testList "ToString" [ + test "succeeds for category feed" { + Expect.equal + (string (CustomFeedSource.Parse "category:fish")) "category:fish" "Category feed string incorrect" + } + test "succeeds for tag feed" { + Expect.equal (string (CustomFeedSource.Parse "tag:rocks")) "tag:rocks" "Tag feed string incorrect" + } + ] +] + +/// Unit tests for the ThemeAssetId type +let themeAssetIdTests = testList "ThemeAssetId" [ + testList "Parse" [ + test "succeeds with expected values" { + Expect.equal + (ThemeAssetId.Parse "test-theme/the-asset") + (ThemeAssetId ((ThemeId "test-theme"), "the-asset")) + "Theme asset ID not parsed correctly" + } + test "fails if no slash is present" { + Expect.throwsT + (fun () -> ignore (ThemeAssetId.Parse "my-theme-asset")) "Invalid value should have raised an exception" + } + ] + test "ToString succeeds" { + Expect.equal + (string (ThemeAssetId ((ThemeId "howdy"), "pardner"))) "howdy/pardner" "Theme asset ID string incorrect" + } +] + +/// Unit tests for the UploadDestination type +let uploadDestinationTests = testList "UploadDestination" [ + testList "Parse" [ + test "succeeds for \"Database\"" { + Expect.equal (UploadDestination.Parse "Database") Database "\"Database\" not parsed correctly" + } + test "succeeds for \"Disk\"" { + Expect.equal (UploadDestination.Parse "Disk") Disk "\"Disk\" not parsed correctly" + } + test "fails for unrecognized value" { + Expect.throwsT + (fun () -> ignore (UploadDestination.Parse "Azure")) "Invalid value should have raised an exception" + } + ] + testList "ToString" [ + test "succeeds for Database" { + Expect.equal (string Database) "Database" "Database string incorrect" + } + test "succeeds for Disk" { + Expect.equal (string Disk) "Disk" "Disk string incorrect" + } + ] +] + +/// All tests for the Domain.SupportTypes file +let all = testList "SupportTypes" [ + nodaTests + accessLevelTests + commentStatusTests + explicitRatingTests + episodeTests + markupTextTests + podcastMediumTests + postStatusTests + customFeedSourceTests + themeAssetIdTests + uploadDestinationTests +] diff --git a/src/MyWebLog.Tests/Domain/ViewModelsTests.fs b/src/MyWebLog.Tests/Domain/ViewModelsTests.fs new file mode 100644 index 0000000..da29a2b --- /dev/null +++ b/src/MyWebLog.Tests/Domain/ViewModelsTests.fs @@ -0,0 +1,1246 @@ +module ViewModelsTests + +open System +open Expecto +open MyWebLog +open MyWebLog.ViewModels +open NodaTime + +/// Unit tests for the addBaseToRelativeUrls helper function +let addBaseToRelativeUrlsTests = testList "PublicHelpers.addBaseToRelativeUrls" [ + test "succeeds for quoted URLs when there is no extra URL path" { + let testText = """""" + let modified = addBaseToRelativeUrls "" testText + Expect.equal modified testText "The text should not have been modified" + } + test "succeeds for quoted URLs with an extra URL path" { + let testText = + """link""" + let expected = + """link""" + Expect.equal (addBaseToRelativeUrls "/a/b" testText) expected "Relative URLs not modified correctly" + } + test "succeeds for unquoted URLs when there is no extra URL path" { + let testText = "" + let modified = addBaseToRelativeUrls "" testText + Expect.equal modified testText "The text should not have been modified" + } + test "succeeds for unquoted URLs with an extra URL path" { + let testText = "link" + let expected = + "link" + Expect.equal (addBaseToRelativeUrls "/a/b" testText) expected "Relative URLs not modified correctly" + } +] + +/// Unit tests for the DisplayPage type +let displayPageTests = testList "DisplayPage" [ + let page = + { Page.Empty with + Id = PageId "my-page" + AuthorId = WebLogUserId "jim" + Title = "A Fine Example" + Permalink = Permalink "about/a-fine-example.html" + PublishedOn = Noda.epoch + UpdatedOn = Noda.epoch + Duration.FromHours 1 + Text = """Click Me!""" + Metadata = [ { Name = "unit"; Value = "test" } ] } + testList "FromPageMinimal" [ + test "succeeds when page is default page" { + let webLog = { WebLog.Empty with TimeZone = "Etc/GMT-1"; DefaultPage = "my-page" } + let model = DisplayPage.FromPageMinimal webLog page + Expect.equal model.Id "my-page" "Id not filled properly" + Expect.equal model.AuthorId "jim" "AuthorId not filled properly" + Expect.equal model.Title "A Fine Example" "Title not filled properly" + Expect.equal model.Permalink "about/a-fine-example.html" "Permalink not filled properly" + Expect.equal + model.PublishedOn + ((Noda.epoch + Duration.FromHours 1).ToDateTimeUtc()) + "PublishedOn not filled properly" + Expect.equal + model.UpdatedOn ((Noda.epoch + Duration.FromHours 2).ToDateTimeUtc()) "UpdatedOn not filled properly" + Expect.isFalse model.IsInPageList "IsInPageList should not have been set" + Expect.isTrue model.IsDefault "IsDefault should have been set" + Expect.equal model.Text "" "Text should have been blank" + Expect.isEmpty model.Metadata "Metadata should have been empty" + } + test "succeeds when page is not the default page" { + let model = DisplayPage.FromPageMinimal { WebLog.Empty with DefaultPage = "posts" } page + Expect.isFalse model.IsDefault "IsDefault should not have been set" + } + ] + testList "FromPage" [ + test "succeeds when the web log is on the domain root" { + let webLog = { WebLog.Empty with TimeZone = "Etc/GMT-4"; UrlBase = "https://example.com" } + let model = DisplayPage.FromPage webLog page + Expect.equal model.Id "my-page" "Id not filled properly" + Expect.equal model.AuthorId "jim" "AuthorId not filled properly" + Expect.equal model.Title "A Fine Example" "Title not filled properly" + Expect.equal model.Permalink "about/a-fine-example.html" "Permalink not filled properly" + Expect.equal + model.PublishedOn + ((Noda.epoch + Duration.FromHours 4).ToDateTimeUtc()) + "PublishedOn not filled properly" + Expect.equal + model.UpdatedOn + ((Noda.epoch + Duration.FromHours 5).ToDateTimeUtc()) + "UpdatedOn not filled properly" + Expect.isFalse model.IsInPageList "IsInPageList should not have been set" + Expect.isFalse model.IsDefault "IsDefault should not have been set" + Expect.equal model.Text """Click Me!""" "Text not filled properly" + Expect.equal model.Metadata.Length 1 "Metadata not filled properly" + } + test "succeeds when the web log is not on the domain root" { + let model = DisplayPage.FromPage { WebLog.Empty with UrlBase = "https://example.com/a/b/c" } page + Expect.equal model.Text """Click Me!""" "Text not filled properly" + } + ] +] + +open System.IO + +/// Unit tests for the DisplayTheme type +let displayThemeTests = testList "DisplayTheme.FromTheme" [ + let theme = + { Id = ThemeId "the-theme" + Name = "Test Theme" + Version = "v0.1.2" + Templates = [ ThemeTemplate.Empty; ThemeTemplate.Empty ] } + test "succeeds when theme is in use and not on disk" { + let model = + DisplayTheme.FromTheme + (fun it -> Expect.equal it (ThemeId "the-theme") "The theme ID not passed correctly"; true) theme + Expect.equal model.Id "the-theme" "Id not filled properly" + Expect.equal model.Name "Test Theme" "Name not filled properly" + Expect.equal model.Version "v0.1.2" "Version not filled properly" + Expect.equal model.TemplateCount 2 "TemplateCount not filled properly" + Expect.isTrue model.IsInUse "IsInUse should have been set" + Expect.isFalse model.IsOnDisk "IsOnDisk should not have been set" + } + test "succeeds when a non-default theme is not in use and is on disk" { + let dir = Directory.CreateDirectory "themes" + let file = File.Create "./themes/another-theme.zip" + try + let model = DisplayTheme.FromTheme (fun _ -> false) { theme with Id = ThemeId "another" } + Expect.isFalse model.IsInUse "IsInUse should not have been set" + Expect.isTrue model.IsOnDisk "IsOnDisk should have been set" + finally + file.Close() + file.Dispose() + File.Delete "./themes/another-theme.zip" + dir.Delete() + } + test "succeeds when the default theme is on disk" { + let file = File.Create "./default-theme.zip" + try + Expect.isTrue + (DisplayTheme.FromTheme (fun _ -> false) { theme with Id = ThemeId "default" }).IsOnDisk + "IsOnDisk should have been set" + finally + file.Close() + file.Dispose() + File.Delete "./default-theme.zip" + } +] + +/// Unit tests for the DisplayUpload type +let displayUploadTests = test "DisplayUpload.FromUpload succeeds" { + let upload = + { Upload.Empty with + Id = UploadId "test-up" + Path = Permalink "2022/04/my-pic.jpg" + UpdatedOn = Noda.epoch } + let model = DisplayUpload.FromUpload { WebLog.Empty with TimeZone = "Etc/GMT-1" } Database upload + Expect.equal model.Id "test-up" "Id not filled properly" + Expect.equal model.Name "my-pic.jpg" "Name not filled properly" + Expect.equal model.Path "2022/04/" "Path not filled properly" + Expect.equal model.Source "Database" "Source not filled properly" + Expect.isSome model.UpdatedOn "There should have been an UpdatedOn value" + Expect.equal + model.UpdatedOn.Value ((Noda.epoch + Duration.FromHours 1).ToDateTimeUtc()) "UpdatedOn not filled properly" +} + +/// Unit tests for the EditCategoryModel type +let editCategoryModelTests = testList "EditCategoryModel" [ + testList "FromCategory" [ + let minimalCat = { Category.Empty with Id = CategoryId "test-cat"; Name = "test"; Slug = "test-slug" } + test "succeeds with minimal information" { + let model = EditCategoryModel.FromCategory minimalCat + Expect.equal model.CategoryId "test-cat" "CategoryId not filled properly" + Expect.equal model.Name "test" "Name not filled properly" + Expect.equal model.Slug "test-slug" "Slug not filled properly" + Expect.equal model.Description "" "Description not filled properly" + Expect.equal model.ParentId "" "ParentId not filled properly" + } + test "succeeds with complete information" { + let model = + EditCategoryModel.FromCategory + { minimalCat with Description = Some "Testing"; ParentId = Some (CategoryId "parent") } + Expect.equal model.Description "Testing" "Description not filled properly" + Expect.equal model.ParentId "parent" "ParentId not filled properly" + } + ] + testList "IsNew" [ + test "succeeds for a new category" { + let model = EditCategoryModel.FromCategory { Category.Empty with Id = CategoryId "new" } + Expect.isTrue model.IsNew "Category should have been considered new" + } + test "succeeds for a non-new category" { + let model = EditCategoryModel.FromCategory Category.Empty + Expect.isFalse model.IsNew "Category should not have been considered new" + } + ] +] + +/// A full page used to test various models +let private testFullPage = + { Page.Empty with + Id = PageId "the-page" + Title = "Test Page" + Permalink = Permalink "blog/page.html" + Template = Some "bork" + IsInPageList = true + Revisions = + [ { AsOf = Noda.epoch + Duration.FromHours 1; Text = Markdown "# Howdy!" } + { AsOf = Noda.epoch; Text = Html "

howdy

" } ] + Metadata = [ { Name = "Test"; Value = "me" }; { Name = "Two"; Value = "2" } ] } + +/// A full post used to test various models +let testFullPost = + { Post.Empty with + Id = PostId "a-post" + Status = Published + Title = "A Post" + Permalink = Permalink "1970/01/a-post.html" + PublishedOn = Some (Noda.epoch + Duration.FromDays 7) + UpdatedOn = Noda.epoch + Duration.FromDays 365 + Template = Some "demo" + Text = "

A post!

" + CategoryIds = [ CategoryId "cat-a"; CategoryId "cat-b"; CategoryId "cat-n" ] + Tags = [ "demo"; "post" ] + Metadata = [ { Name = "A Meta"; Value = "A Value" } ] + Revisions = + [ { AsOf = Noda.epoch + Duration.FromDays 365; Text = Html "

A post!

" } + { AsOf = Noda.epoch + Duration.FromDays 7; Text = Markdown "A post!" } ] + Episode = + Some { Media = "a-post-ep.mp3" + Length = 15555L + Duration = Some (Duration.FromMinutes 15L + Duration.FromSeconds 22L) + MediaType = Some "audio/mpeg3" + ImageUrl = Some "uploads/podcast-cover.jpg" + Subtitle = Some "Narration" + Explicit = Some Clean + Chapters = None + ChapterFile = Some "uploads/1970/01/chapters.txt" + ChapterType = Some "chapters" + ChapterWaypoints = Some true + TranscriptUrl = Some "uploads/1970/01/transcript.txt" + TranscriptType = Some "transcript" + TranscriptLang = Some "EN-us" + TranscriptCaptions = Some true + SeasonNumber = Some 3 + SeasonDescription = Some "Season Three" + EpisodeNumber = Some 322. + EpisodeDescription = Some "Episode 322" } } + +/// Unit tests for the EditCommonModel type +let editCommonModelTests = testList "EditCommonModel" [ + testList "IsNew" [ + test "succeeds for a new page or post" { + Expect.isTrue (EditCommonModel(Id = "new")).IsNew "IsNew should have been set" + } + test "succeeds for an existing page or post" { + Expect.isFalse (EditCommonModel(Id = string (PageId.Create ()))).IsNew "IsNew should not have been set" + } + ] + testList "PopulateFromPage" [ + test "succeeds for empty page" { + let model = EditCommonModel() + model.PopulateFromPage { Page.Empty with Id = PageId "abc" } + Expect.equal model.Id "abc" "PageId not filled properly" + Expect.equal model.Title "" "Title not filled properly" + Expect.equal model.Permalink "" "Permalink not filled properly" + Expect.equal model.Template "" "Template not filled properly" + Expect.equal model.Source "HTML" "Source not filled properly" + Expect.equal model.Text "" "Text not set properly" + Expect.equal model.MetaNames.Length 1 "MetaNames should have one entry" + Expect.equal model.MetaNames[0] "" "Meta name not set properly" + Expect.equal model.MetaValues.Length 1 "MetaValues should have one entry" + Expect.equal model.MetaValues[0] "" "Meta value not set properly" + } + test "succeeds for filled page" { + let model = EditCommonModel() + model.PopulateFromPage testFullPage + Expect.equal model.Id "the-page" "PageId not filled properly" + Expect.equal model.Title "Test Page" "Title not filled properly" + Expect.equal model.Permalink "blog/page.html" "Permalink not filled properly" + Expect.equal model.Template "bork" "Template not filled properly" + Expect.equal model.Source "Markdown" "Source not filled properly" + Expect.equal model.Text "# Howdy!" "Text not filled properly" + Expect.equal model.MetaNames.Length 2 "MetaNames should have two entries" + Expect.equal model.MetaNames[0] "Test" "Meta name 0 not set properly" + Expect.equal model.MetaNames[1] "Two" "Meta name 1 not set properly" + Expect.equal model.MetaValues.Length 2 "MetaValues should have two entries" + Expect.equal model.MetaValues[0] "me" "Meta value 0 not set properly" + Expect.equal model.MetaValues[1] "2" "Meta value 1 not set properly" + } + ] + testList "PopulateFromPost" [ + test "succeeds for empty post" { + let model = EditCommonModel() + model.PopulateFromPost { Post.Empty with Id = PostId "la-la-la" } + Expect.equal model.Id "la-la-la" "PostId not filled properly" + Expect.equal model.Title "" "Title not filled properly" + Expect.equal model.Permalink "" "Permalink not filled properly" + Expect.equal model.Source "HTML" "Source not filled properly" + Expect.equal model.Text "" "Text not filled properly" + Expect.equal model.Template "" "Template not filled properly" + Expect.equal model.MetaNames.Length 1 "MetaNames not filled properly" + Expect.equal model.MetaNames[0] "" "Meta name 0 not filled properly" + Expect.equal model.MetaValues.Length 1 "MetaValues not filled properly" + Expect.equal model.MetaValues[0] "" "Meta value 0 not filled properly" + } + test "succeeds for full post with external chapters" { + let model = EditCommonModel() + model.PopulateFromPost testFullPost + Expect.equal model.Id "a-post" "PostId not filled properly" + Expect.equal model.Title "A Post" "Title not filled properly" + Expect.equal model.Permalink "1970/01/a-post.html" "Permalink not filled properly" + Expect.equal model.Source "HTML" "Source not filled properly" + Expect.equal model.Text "

A post!

" "Text not filled properly" + Expect.equal model.Template "demo" "Template not filled properly" + Expect.equal model.MetaNames.Length 1 "MetaNames not filled properly" + Expect.equal model.MetaNames[0] "A Meta" "Meta name 0 not filled properly" + Expect.equal model.MetaValues.Length 1 "MetaValues not filled properly" + Expect.equal model.MetaValues[0] "A Value" "Meta value 0 not filled properly" + } + ] +] + +/// Unit tests for the EditCustomFeedModel type +let editCustomFeedModelTests = testList "EditCustomFeedModel" [ + let minimalPodcast = + { PodcastOptions.Empty with + Title = "My Minimal Podcast" + Summary = "As little as possible" + DisplayedAuthor = "The Tester" + Email = "thetester@example.com" + ImageUrl = Permalink "upload/my-image.png" + AppleCategory = "News" + Explicit = Clean } + // A GUID with all zeroes, ending in "a" + let aGuid = + let guidBytes = Guid.Empty.ToByteArray() + guidBytes[15] <- byte 10 + Guid guidBytes + let fullPodcast = + { minimalPodcast with + Subtitle = Some "A Podcast about Little" + ItemsInFeed = 17 + AppleSubcategory = Some "Analysis" + DefaultMediaType = Some "video/mpeg4" + MediaBaseUrl = Some "a/b/c" + PodcastGuid = Some aGuid + FundingUrl = Some "https://pay.me" + FundingText = Some "Gimme Money!" + Medium = Some Newsletter } + testList "FromFeed" [ + test "succeeds with no podcast" { + let model = + EditCustomFeedModel.FromFeed + { Id = CustomFeedId "test-feed" + Source = Category (CategoryId "no-podcast") + Path = Permalink "no-podcast.xml" + Podcast = None } + Expect.equal model.Id "test-feed" "Id not filled properly" + Expect.equal model.SourceType "category" "SourceType not filled properly" + Expect.equal model.SourceValue "no-podcast" "SourceValue not filled properly" + Expect.equal model.Path "no-podcast.xml" "Path not filled properly" + Expect.isFalse model.IsPodcast "IsPodcast should not have been set" + Expect.equal model.Title "" "Title should be the default value" + Expect.equal model.Subtitle "" "Subtitle should be the default value" + Expect.equal model.ItemsInFeed 25 "ItemsInFeed should be the default value" + Expect.equal model.Summary "" "Summary should be the default value" + Expect.equal model.DisplayedAuthor "" "DisplayedAuthor should be the default value" + Expect.equal model.Email "" "Email should be the default value" + Expect.equal model.ImageUrl "" "ImageUrl should be the default value" + Expect.equal model.AppleCategory "" "AppleCategory should be the default value" + Expect.equal model.AppleSubcategory "" "AppleSubcategory should be the default value" + Expect.equal model.Explicit "no" "Explicit should be the default value" + Expect.equal model.DefaultMediaType "audio/mpeg" "DefaultMediaType should be the default value" + Expect.equal model.MediaBaseUrl "" "MediaBaseUrl should be the default value" + Expect.equal model.FundingUrl "" "FundingUrl should be the default value" + Expect.equal model.FundingText "" "FundingText should be the default value" + Expect.equal model.PodcastGuid "" "PodcastGuid should be the default value" + Expect.equal model.Medium "" "Medium should be the default value" + } + test "succeeds with minimal podcast" { + let model = + EditCustomFeedModel.FromFeed + { Id = CustomFeedId "minimal-feed" + Source = Tag "min-podcast" + Path = Permalink "min-podcast.xml" + Podcast = Some minimalPodcast } + Expect.equal model.Id "minimal-feed" "Id not filled properly" + Expect.equal model.SourceType "tag" "SourceType not filled properly" + Expect.equal model.SourceValue "min-podcast" "SourceValue not filled properly" + Expect.equal model.Path "min-podcast.xml" "Path not filled properly" + Expect.isTrue model.IsPodcast "IsPodcast should have been set" + Expect.equal model.Title "My Minimal Podcast" "Title not filled properly" + Expect.equal model.Subtitle "" "Subtitle not filled properly (should be blank)" + Expect.equal model.ItemsInFeed 0 "ItemsInFeed not filled properly" + Expect.equal model.Summary "As little as possible" "Summary not filled properly" + Expect.equal model.DisplayedAuthor "The Tester" "DisplayedAuthor not filled properly" + Expect.equal model.Email "thetester@example.com" "Email not filled properly" + Expect.equal model.ImageUrl "upload/my-image.png" "ImageUrl not filled properly" + Expect.equal model.AppleCategory "News" "AppleCategory not filled properly" + Expect.equal model.AppleSubcategory "" "AppleSubcategory not filled properly (should be blank)" + Expect.equal model.Explicit "clean" "Explicit not filled properly" + Expect.equal model.DefaultMediaType "" "DefaultMediaType not filled properly (should be blank)" + Expect.equal model.MediaBaseUrl "" "MediaBaseUrl not filled properly (should be blank)" + Expect.equal model.FundingUrl "" "FundingUrl not filled properly (should be blank)" + Expect.equal model.FundingText "" "FundingText not filled properly (should be blank)" + Expect.equal model.PodcastGuid "" "PodcastGuid not filled properly (should be blank)" + Expect.equal model.Medium "" "Medium not filled properly (should be blank)" + } + test "succeeds with full podcast" { + let model = + EditCustomFeedModel.FromFeed + { Id = CustomFeedId "full-feed" + Source = Tag "whole-enchilada" + Path = Permalink "full-podcast.xml" + Podcast = Some fullPodcast } + Expect.equal model.Id "full-feed" "Id not filled properly" + Expect.equal model.SourceType "tag" "SourceType not filled properly" + Expect.equal model.SourceValue "whole-enchilada" "SourceValue not filled properly" + Expect.equal model.Path "full-podcast.xml" "Path not filled properly" + Expect.isTrue model.IsPodcast "IsPodcast should have been set" + Expect.equal model.Title "My Minimal Podcast" "Title not filled properly" + Expect.equal model.Subtitle "A Podcast about Little" "Subtitle not filled properly" + Expect.equal model.ItemsInFeed 17 "ItemsInFeed not filled properly" + Expect.equal model.Summary "As little as possible" "Summary not filled properly" + Expect.equal model.DisplayedAuthor "The Tester" "DisplayedAuthor not filled properly" + Expect.equal model.Email "thetester@example.com" "Email not filled properly" + Expect.equal model.ImageUrl "upload/my-image.png" "ImageUrl not filled properly" + Expect.equal model.AppleCategory "News" "AppleCategory not filled properly" + Expect.equal model.AppleSubcategory "Analysis" "AppleSubcategory not filled properly" + Expect.equal model.Explicit "clean" "Explicit not filled properly" + Expect.equal model.DefaultMediaType "video/mpeg4" "DefaultMediaType not filled properly" + Expect.equal model.MediaBaseUrl "a/b/c" "MediaBaseUrl not filled properly" + Expect.equal model.FundingUrl "https://pay.me" "FundingUrl not filled properly" + Expect.equal model.FundingText "Gimme Money!" "FundingText not filled properly" + Expect.equal model.PodcastGuid "00000000-0000-0000-0000-00000000000a" "PodcastGuid not filled properly" + Expect.equal model.Medium "newsletter" "Medium not filled properly" + } + ] + testList "UpdateFeed" [ + test "succeeds with no podcast" { + let model = + { EditCustomFeedModel.Empty with SourceType = "tag"; SourceValue = "no-audio"; Path = "no-podcast.xml" } + let feed = + model.UpdateFeed + { CustomFeed.Empty with Id = CustomFeedId "no-podcast-feed"; Podcast = Some fullPodcast } + Expect.equal feed.Id (CustomFeedId "no-podcast-feed") "Id not filled properly" + Expect.equal feed.Source (Tag "no-audio") "Source not filled properly" + Expect.equal feed.Path (Permalink "no-podcast.xml") "Path not filled properly" + Expect.isNone feed.Podcast "Podcast not filled properly" + } + test "succeeds with minimal podcast" { + let model = EditCustomFeedModel.FromFeed { CustomFeed.Empty with Podcast = Some minimalPodcast } + let feed = model.UpdateFeed CustomFeed.Empty + Expect.equal feed.Source (Category (CategoryId "")) "Source not filled properly" + Expect.equal feed.Path (Permalink "") "Path not filled properly" + Expect.isSome feed.Podcast "Podcast should be present" + let podcast = feed.Podcast.Value + Expect.equal podcast.Title "My Minimal Podcast" "Podcast title not filled properly" + Expect.isNone podcast.Subtitle "Podcast subtitle not filled properly" + Expect.equal podcast.ItemsInFeed 0 "Podcast items in feed not filled properly" + Expect.equal podcast.Summary "As little as possible" "Podcast summary not filled properly" + Expect.equal podcast.DisplayedAuthor "The Tester" "Podcast author not filled properly" + Expect.equal podcast.Email "thetester@example.com" "Podcast email not filled properly" + Expect.equal podcast.Explicit Clean "Podcast explicit rating not filled properly" + Expect.equal podcast.AppleCategory "News" "Podcast Apple category not filled properly" + Expect.isNone podcast.AppleSubcategory "Podcast Apple subcategory not filled properly" + Expect.isNone podcast.DefaultMediaType "Podcast default media type not filled properly" + Expect.isNone podcast.MediaBaseUrl "Podcast media base URL not filled properly" + Expect.isNone podcast.PodcastGuid "Podcast GUID not filled properly" + Expect.isNone podcast.FundingUrl "Podcast funding URL not filled properly" + Expect.isNone podcast.FundingText "Podcast funding text not filled properly" + Expect.isNone podcast.Medium "Podcast medium not filled properly" + } + test "succeeds with full podcast" { + let model = EditCustomFeedModel.FromFeed { CustomFeed.Empty with Podcast = Some fullPodcast } + let feed = model.UpdateFeed CustomFeed.Empty + Expect.equal feed.Source (Category (CategoryId "")) "Source not filled properly" + Expect.equal feed.Path (Permalink "") "Path not filled properly" + Expect.isSome feed.Podcast "Podcast should be present" + let podcast = feed.Podcast.Value + Expect.equal podcast.Title "My Minimal Podcast" "Podcast title not filled properly" + Expect.equal podcast.Subtitle (Some "A Podcast about Little") "Podcast subtitle not filled properly" + Expect.equal podcast.ItemsInFeed 17 "Podcast items in feed not filled properly" + Expect.equal podcast.Summary "As little as possible" "Podcast summary not filled properly" + Expect.equal podcast.DisplayedAuthor "The Tester" "Podcast author not filled properly" + Expect.equal podcast.Email "thetester@example.com" "Podcast email not filled properly" + Expect.equal podcast.Explicit Clean "Podcast explicit rating not filled properly" + Expect.equal podcast.AppleCategory "News" "Podcast Apple category not filled properly" + Expect.equal podcast.AppleSubcategory (Some "Analysis") "Podcast Apple subcategory not filled properly" + Expect.equal podcast.DefaultMediaType (Some "video/mpeg4") "Podcast default media type not filled properly" + Expect.equal podcast.MediaBaseUrl (Some "a/b/c") "Podcast media base URL not filled properly" + Expect.equal podcast.PodcastGuid (Some aGuid) "Podcast GUID not filled properly" + Expect.equal podcast.FundingUrl (Some "https://pay.me") "Podcast funding URL not filled properly" + Expect.equal podcast.FundingText (Some "Gimme Money!") "Podcast funding text not filled properly" + Expect.equal podcast.Medium (Some Newsletter) "Podcast medium not filled properly" + } + ] +] + +/// Unit tests for the EditMyInfoModel type +let editMyInfoModelTests = test "EditMyInfoModel.FromUser succeeds" { + let model = EditMyInfoModel.FromUser { WebLogUser.Empty with FirstName = "A"; LastName = "B"; PreferredName = "C" } + Expect.equal model.FirstName "A" "FirstName not filled properly" + Expect.equal model.LastName "B" "LastName not filled properly" + Expect.equal model.PreferredName "C" "PreferredName not filled properly" + Expect.equal model.NewPassword "" "NewPassword not filled properly" + Expect.equal model.NewPasswordConfirm "" "NewPasswordConfirm not filled properly" +} + +/// Unit tests for the EditPageModel type +let editPageModelTests = testList "EditPageModel" [ + testList "FromPage" [ + test "succeeds for empty page" { + let model = EditPageModel.FromPage { Page.Empty with Id = PageId "abc" } + Expect.equal model.Id "abc" "Parent fields not filled properly" + Expect.isFalse model.IsShownInPageList "IsShownInPageList should not have been set" + } + test "succeeds for filled page" { + let model = EditPageModel.FromPage testFullPage + Expect.equal model.Id "the-page" "Parent fields not filled properly" + Expect.isTrue model.IsShownInPageList "IsShownInPageList should have been set" + } + ] + testList "UpdatePage" [ + test "succeeds with minimal changes" { + let model = EditPageModel.FromPage testFullPage + model.Title <- "Updated Page" + model.IsShownInPageList <- false + let page = model.UpdatePage testFullPage (Noda.epoch + Duration.FromHours 4) + Expect.equal page.Title "Updated Page" "Title not filled properly" + Expect.equal page.Permalink (Permalink "blog/page.html") "Permalink not filled properly" + Expect.isEmpty page.PriorPermalinks "PriorPermalinks should be empty" + Expect.equal page.UpdatedOn (Noda.epoch + Duration.FromHours 4) "UpdatedOn not filled properly" + Expect.isFalse page.IsInPageList "IsInPageList should have been unset" + Expect.equal page.Template (Some "bork") "Template not filled properly" + Expect.equal page.Text "

Howdy!

\n" "Text not filled properly" + Expect.equal page.Metadata.Length 2 "There should be 2 metadata items" + let item1 = List.item 0 page.Metadata + Expect.equal item1.Name "Test" "Meta item 0 name not filled properly" + Expect.equal item1.Value "me" "Meta item 0 value not filled properly" + let item2 = List.item 1 page.Metadata + Expect.equal item2.Name "Two" "Meta item 1 name not filled properly" + Expect.equal item2.Value "2" "Meta item 1 value not filled properly" + Expect.equal page.Revisions.Length 2 "There should be 2 revisions" + let rev1 = List.item 0 page.Revisions + Expect.equal rev1.AsOf (Noda.epoch + Duration.FromHours 1) "Revision 0 as-of not filled properly" + Expect.equal rev1.Text (Markdown "# Howdy!") "Revision 0 text not filled properly" + let rev2 = List.item 1 page.Revisions + Expect.equal rev2.AsOf Noda.epoch "Revision 1 as-of not filled properly" + Expect.equal rev2.Text (Html "

howdy

") "Revision 1 text not filled properly" + } + test "succeeds with all changes" { + let model = EditPageModel() + model.Id <- "this-page" + model.Title <- "My Updated Page" + model.Permalink <- "blog/updated.html" + model.Template <- "" + model.IsShownInPageList <- false + model.Source <- "HTML" + model.Text <- "

Howdy, partners!

" + model.MetaNames <- [| "banana"; "apple"; "grape" |] + model.MetaValues <- [| "monkey"; "zebra"; "ape" |] + let now = Noda.epoch + Duration.FromDays 7 + let page = model.UpdatePage testFullPage now + Expect.equal page.Title "My Updated Page" "Title not filled properly" + Expect.equal page.Permalink (Permalink "blog/updated.html") "Permalink not filled properly" + Expect.equal page.PriorPermalinks [ Permalink "blog/page.html" ] "PriorPermalinks not filled properly" + Expect.equal page.UpdatedOn now "UpdatedOn not filled properly" + Expect.isFalse page.IsInPageList "IsInPageList should not have been set" + Expect.isNone page.Template "Template not filled properly" + Expect.equal page.Text "

Howdy, partners!

" "Text not filled properly" + Expect.equal page.Metadata.Length 3 "There should be 3 metadata items" + let item1 = List.item 0 page.Metadata + Expect.equal item1.Name "apple" "Meta item 0 name not filled properly" + Expect.equal item1.Value "zebra" "Meta item 0 value not filled properly" + let item2 = List.item 1 page.Metadata + Expect.equal item2.Name "banana" "Meta item 1 name not filled properly" + Expect.equal item2.Value "monkey" "Meta item 1 value not filled properly" + let item3 = List.item 2 page.Metadata + Expect.equal item3.Name "grape" "Meta item 2 name not filled properly" + Expect.equal item3.Value "ape" "Meta item 2 value not filled properly" + Expect.equal page.Revisions.Length 3 "There should be 3 revisions" + Expect.equal page.Revisions.Head.AsOf now "Head revision as-of not filled properly" + Expect.equal + page.Revisions.Head.Text (Html "

Howdy, partners!

") "Head revision text not filled properly" + } + ] +] + +/// Unit tests for the EditPostModel type +let editPostModelTests = testList "EditPostModel" [ + testList "FromPost" [ + test "succeeds for empty post" { + let model = EditPostModel.FromPost WebLog.Empty { Post.Empty with Id = PostId "la-la-la" } + Expect.equal model.Id "la-la-la" "Parent fields not filled properly" + Expect.equal model.Tags "" "Tags not filled properly" + Expect.isEmpty model.CategoryIds "CategoryIds not filled properly" + Expect.equal model.Status (string Draft) "Status not filled properly" + Expect.isFalse model.DoPublish "DoPublish should not have been set" + Expect.isFalse model.SetPublished "SetPublished should not have been set" + Expect.isFalse model.PubOverride.HasValue "PubOverride not filled properly" + Expect.isFalse model.SetUpdated "SetUpdated should not have been set" + Expect.isFalse model.IsEpisode "IsEpisode should not have been set" + Expect.equal model.Media "" "Media not filled properly" + Expect.equal model.Length 0L "Length not filled properly" + Expect.equal model.Duration "" "Duration not filled properly" + Expect.equal model.MediaType "" "MediaType not filled properly" + Expect.equal model.ImageUrl "" "ImageUrl not filled properly" + Expect.equal model.Subtitle "" "Subtitle not filled properly" + Expect.equal model.Explicit "" "Explicit not filled properly" + Expect.equal model.ChapterSource "none" "ChapterSource not filled properly" + Expect.equal model.ChapterFile "" "ChapterFile not filled properly" + Expect.equal model.ChapterType "" "ChapterType not filled properly" + Expect.isFalse model.ContainsWaypoints "ContainsWaypoints should not have been set" + Expect.equal model.TranscriptUrl "" "TranscriptUrl not filled properly" + Expect.equal model.TranscriptType "" "TranscriptType not filled properly" + Expect.equal model.TranscriptLang "" "TranscriptLang not filled properly" + Expect.isFalse model.TranscriptCaptions "TranscriptCaptions should not have been set" + Expect.equal model.SeasonNumber 0 "SeasonNumber not filled properly" + Expect.equal model.SeasonDescription "" "SeasonDescription not filled properly" + Expect.equal model.EpisodeNumber "" "EpisodeNumber not filled properly" + Expect.equal model.EpisodeDescription "" "EpisodeDescription not filled properly" + } + test "succeeds for full post with external chapters" { + let model = EditPostModel.FromPost { WebLog.Empty with TimeZone = "Etc/GMT+1" } testFullPost + Expect.equal model.Id "a-post" "Parent fields not filled properly" + Expect.equal model.Tags "demo, post" "Tags not filled properly" + Expect.equal model.CategoryIds [| "cat-a"; "cat-b"; "cat-n" |] "CategoryIds not filled properly" + Expect.equal model.Status (string Published) "Status not filled properly" + Expect.isFalse model.DoPublish "DoPublish should not have been set" + Expect.isFalse model.SetPublished "SetPublished should not have been set" + Expect.isTrue model.PubOverride.HasValue "PubOverride should not have been null" + Expect.equal + model.PubOverride.Value + ((Noda.epoch + Duration.FromDays 7 - Duration.FromHours 1).ToDateTimeUtc()) + "PubOverride not filled properly" + Expect.isFalse model.SetUpdated "SetUpdated should not have been set" + Expect.isTrue model.IsEpisode "IsEpisode should have been set" + Expect.equal model.Media "a-post-ep.mp3" "Media not filled properly" + Expect.equal model.Length 15555L "Length not filled properly" + Expect.equal model.Duration "0:15:22" "Duration not filled properly" + Expect.equal model.MediaType "audio/mpeg3" "MediaType not filled properly" + Expect.equal model.ImageUrl "uploads/podcast-cover.jpg" "ImageUrl not filled properly" + Expect.equal model.Subtitle "Narration" "Subtitle not filled properly" + Expect.equal model.Explicit "clean" "Explicit not filled properly" + Expect.equal model.ChapterSource "external" "ChapterSource not filled properly" + Expect.equal model.ChapterFile "uploads/1970/01/chapters.txt" "ChapterFile not filled properly" + Expect.equal model.ChapterType "chapters" "ChapterType not filled properly" + Expect.isTrue model.ContainsWaypoints "ContainsWaypoints should have been set" + Expect.equal model.TranscriptUrl "uploads/1970/01/transcript.txt" "TranscriptUrl not filled properly" + Expect.equal model.TranscriptType "transcript" "TranscriptType not filled properly" + Expect.equal model.TranscriptLang "EN-us" "TranscriptLang not filled properly" + Expect.isTrue model.TranscriptCaptions "TranscriptCaptions should have been set" + Expect.equal model.SeasonNumber 3 "SeasonNumber not filled properly" + Expect.equal model.SeasonDescription "Season Three" "SeasonDescription not filled properly" + Expect.equal model.EpisodeNumber "322" "EpisodeNumber not filled properly" + Expect.equal model.EpisodeDescription "Episode 322" "EpisodeDescription not filled properly" + } + test "succeeds for full post with internal chapters" { + let model = + EditPostModel.FromPost + { WebLog.Empty with TimeZone = "Etc/GMT+1" } + { testFullPost with + Episode = + Some + { testFullPost.Episode.Value with + Chapters = Some [] + ChapterFile = None + ChapterType = None } } + Expect.equal model.ChapterSource "internal" "ChapterSource not filled properly" + } + ] + let updatedModel () = + let model = EditPostModel.FromPost WebLog.Empty testFullPost + model.Title <- "An Updated Post" + model.Permalink <- "1970/01/updated-post.html" + model.Source <- "HTML" + model.Text <- "

An updated post!

" + model.Tags <- "Zebras, Aardvarks, , Turkeys" + model.Template <- "updated" + model.CategoryIds <- [| "cat-x"; "cat-y" |] + model.MetaNames <- [| "Zed Meta"; "A Meta" |] + model.MetaValues <- [| "A Value"; "Zed Value" |] + model.Media <- "an-updated-ep.mp3" + model.Length <- 14444L + model.Duration <- "0:14:42" + model.MediaType <- "audio/mp3" + model.ImageUrl <- "updated-cover.png" + model.Subtitle <- "Talking" + model.Explicit <- "no" + model.ChapterSource <- "external" + model.ChapterFile <- "updated-chapters.txt" + model.ChapterType <- "indexes" + model.TranscriptUrl <- "updated-transcript.txt" + model.TranscriptType <- "subtitles" + model.TranscriptLang <- "ES-mx" + model.SeasonNumber <- 4 + model.SeasonDescription <- "Season Fo" + model.EpisodeNumber <- "432.1" + model.EpisodeDescription <- "Four Three Two pt One" + model + testList "UpdatePost" [ + test "succeeds for a full podcast episode" { + let post = (updatedModel ()).UpdatePost testFullPost (Noda.epoch + Duration.FromDays 400) + Expect.equal post.Title "An Updated Post" "Title not filled properly" + Expect.equal post.Permalink (Permalink "1970/01/updated-post.html") "Permalink not filled properly" + Expect.equal post.PriorPermalinks [ Permalink "1970/01/a-post.html" ] "PriorPermalinks not filled properly" + Expect.equal post.PublishedOn testFullPost.PublishedOn "PublishedOn should not have changed" + Expect.equal post.UpdatedOn (Noda.epoch + Duration.FromDays 400) "UpdatedOn not filled properly" + Expect.equal post.Text "

An updated post!

" "Text not filled properly" + Expect.equal post.Tags [ "aardvarks"; "turkeys"; "zebras" ] "Tags not filled properly" + Expect.equal post.Template (Some "updated") "Template not filled properly" + Expect.equal post.CategoryIds [ CategoryId "cat-x"; CategoryId "cat-y" ] "Categories not filled properly" + Expect.equal post.Metadata.Length 2 "There should have been 2 meta items" + Expect.equal post.Metadata[0].Name "A Meta" "Meta item 0 name not filled properly" + Expect.equal post.Metadata[0].Value "Zed Value" "Meta item 0 value not filled properly" + Expect.equal post.Metadata[1].Name "Zed Meta" "Meta item 1 name not filled properly" + Expect.equal post.Metadata[1].Value "A Value" "Meta item 1 value not filled properly" + Expect.equal post.Revisions.Length 3 "There should have been 3 revisions" + Expect.equal + post.Revisions[0].AsOf (Noda.epoch + Duration.FromDays 400) "Revision 0 AsOf not filled properly" + Expect.equal post.Revisions[0].Text (Html "

An updated post!

") "Revision 0 Text not filled properly" + Expect.isSome post.Episode "There should have been a podcast episode" + let ep = post.Episode.Value + Expect.equal ep.Media "an-updated-ep.mp3" "Media not filled properly" + Expect.equal ep.Length 14444L "Length not filled properly" + Expect.equal + ep.Duration (Some (Duration.FromMinutes 14L + Duration.FromSeconds 42L)) "Duration not filled properly" + Expect.equal ep.MediaType (Some "audio/mp3") "MediaType not filled properly" + Expect.equal ep.ImageUrl (Some "updated-cover.png") "ImageUrl not filled properly" + Expect.equal ep.Subtitle (Some "Talking") "Subtitle not filled properly" + Expect.equal ep.Explicit (Some No) "ExplicitRating not filled properly" + Expect.isNone ep.Chapters "Chapters should have had no value" + Expect.equal ep.ChapterFile (Some "updated-chapters.txt") "ChapterFile not filled properly" + Expect.equal ep.ChapterType (Some "indexes") "ChapterType not filled properly" + Expect.equal ep.ChapterWaypoints (Some true) "ChapterWaypoints should have been set" + Expect.equal ep.TranscriptUrl (Some "updated-transcript.txt") "TranscriptUrl not filled properly" + Expect.equal ep.TranscriptType (Some "subtitles") "TranscriptType not filled properly" + Expect.equal ep.TranscriptLang (Some "ES-mx") "TranscriptLang not filled properly" + Expect.equal ep.TranscriptCaptions (Some true) "TranscriptCaptions should have been set" + Expect.equal ep.SeasonNumber (Some 4) "SeasonNumber not filled properly" + Expect.equal ep.SeasonDescription (Some "Season Fo") "SeasonDescription not filled properly" + Expect.equal ep.EpisodeNumber (Some 432.1) "EpisodeNumber not filled properly" + Expect.equal ep.EpisodeDescription (Some "Four Three Two pt One") "EpisodeDescription not filled properly" + } + test "succeeds for a minimal podcast episode" { + let minModel = updatedModel () + minModel.Duration <- "" + minModel.MediaType <- "" + minModel.ImageUrl <- "" + minModel.Subtitle <- "" + minModel.Explicit <- "" + minModel.ChapterFile <- "" + minModel.ChapterType <- "" + minModel.ContainsWaypoints <- false + minModel.TranscriptUrl <- "" + minModel.TranscriptType <- "" + minModel.TranscriptLang <- "" + minModel.TranscriptCaptions <- false + minModel.SeasonNumber <- 0 + minModel.SeasonDescription <- "" + minModel.EpisodeNumber <- "" + minModel.EpisodeDescription <- "" + let post = minModel.UpdatePost testFullPost (Noda.epoch + Duration.FromDays 500) + Expect.isSome post.Episode "There should have been a podcast episode" + let ep = post.Episode.Value + Expect.equal ep.Media "an-updated-ep.mp3" "Media not filled properly" + Expect.equal ep.Length 14444L "Length not filled properly" + Expect.isNone ep.Duration "Duration not filled properly" + Expect.isNone ep.MediaType "MediaType not filled properly" + Expect.isNone ep.ImageUrl "ImageUrl not filled properly" + Expect.isNone ep.Subtitle "Subtitle not filled properly" + Expect.isNone ep.Explicit "ExplicitRating not filled properly" + Expect.isNone ep.ChapterFile "ChapterFile not filled properly" + Expect.isNone ep.ChapterType "ChapterType not filled properly" + Expect.isNone ep.ChapterWaypoints "ChapterWaypoints should have been set" + Expect.isNone ep.TranscriptUrl "TranscriptUrl not filled properly" + Expect.isNone ep.TranscriptType "TranscriptType not filled properly" + Expect.isNone ep.TranscriptLang "TranscriptLang not filled properly" + Expect.isNone ep.TranscriptCaptions "TranscriptCaptions should have been set" + Expect.isNone ep.SeasonNumber "SeasonNumber not filled properly" + Expect.isNone ep.SeasonDescription "SeasonDescription not filled properly" + Expect.isNone ep.EpisodeNumber "EpisodeNumber not filled properly" + Expect.isNone ep.EpisodeDescription "EpisodeDescription not filled properly" + } + test "succeeds for a podcast episode with internal chapters" { + let minModel = updatedModel () + minModel.ChapterSource <- "internal" + minModel.ChapterFile <- "" + minModel.ChapterType <- "" + let post = minModel.UpdatePost testFullPost (Noda.epoch + Duration.FromDays 500) + Expect.isSome post.Episode "There should have been a podcast episode" + let ep = post.Episode.Value + Expect.equal ep.Chapters (Some []) "Chapters not filled properly" + Expect.isNone ep.ChapterFile "ChapterFile not filled properly" + Expect.isNone ep.ChapterType "ChapterType not filled properly" + } + test "succeeds for a podcast episode with no chapters" { + let minModel = updatedModel () + minModel.ChapterSource <- "none" + let post = + minModel.UpdatePost + { testFullPost with Episode = Some { testFullPost.Episode.Value with Chapters = Some [] } } + (Noda.epoch + Duration.FromDays 500) + Expect.isSome post.Episode "There should have been a podcast episode" + let ep = post.Episode.Value + Expect.isNone ep.Chapters "Chapters not filled properly" + Expect.isNone ep.ChapterFile "ChapterFile not filled properly" + Expect.isNone ep.ChapterType "ChapterType not filled properly" + Expect.isNone ep.ChapterWaypoints "ChapterWaypoints not filled properly" + } + test "succeeds for no podcast episode and no template" { + let model = updatedModel () + model.IsEpisode <- false + model.Template <- "" + let post = model.UpdatePost testFullPost Noda.epoch + Expect.isNone post.Template "Template not filled properly" + Expect.isNone post.Episode "Episode not filled properly" + } + test "succeeds when publishing a draft" { + let model = updatedModel () + model.DoPublish <- true + let post = model.UpdatePost { testFullPost with Status = Draft } (Noda.epoch + Duration.FromDays 375) + Expect.equal post.Status Published "Status not set properly" + Expect.equal post.PublishedOn (Some (Noda.epoch + Duration.FromDays 375)) "PublishedOn not set properly" + } + ] +] + +/// Unit tests for the EditRedirectRuleModel type +let editRedirectRuleModelTests = testList "EditRedirectRuleModel" [ + test "FromRule succeeds" { + let model = EditRedirectRuleModel.FromRule 15 { From = "here"; To = "there"; IsRegex = true } + Expect.equal model.RuleId 15 "RuleId not filled properly" + Expect.equal model.From "here" "From not filled properly" + Expect.equal model.To "there" "To not filled properly" + Expect.isTrue model.IsRegex "IsRegex should have been set" + Expect.isFalse model.InsertAtTop "InsertAtTop should not have been set" + } + test "ToRule succeeds" { + let rule = { RuleId = 10; From = "me"; To = "you"; IsRegex = false; InsertAtTop = false }.ToRule() + Expect.equal rule.From "me" "From not filled properly" + Expect.equal rule.To "you" "To not filled properly" + Expect.isFalse rule.IsRegex "IsRegex should not have been set" + } +] + +/// Unit tests for the EditRssModel type +let editRssModelTests = testList "EditRssModel" [ + testList "FromRssOptions" [ + test "succeeds with minimal options" { + let model = EditRssModel.FromRssOptions RssOptions.Empty + Expect.isTrue model.IsFeedEnabled "IsFeedEnabled should have been set" + Expect.equal model.FeedName "feed.xml" "FeedName not filled properly" + Expect.equal model.ItemsInFeed 0 "ItemsInFeed not filled properly" + Expect.isTrue model.IsCategoryEnabled "IsCategoryEnabled should have been set" + Expect.isTrue model.IsTagEnabled "IsTagEnabled should have been set" + Expect.equal model.Copyright "" "Copyright not filled properly" + } + test "succeeds with full options" { + let model = + EditRssModel.FromRssOptions + { RssOptions.Empty with + IsFeedEnabled = false + FeedName = "custom.xml" + ItemsInFeed = Some 82 + IsCategoryEnabled = false + IsTagEnabled = false + Copyright = Some "yep" } + Expect.isFalse model.IsFeedEnabled "IsFeedEnabled should not have been set" + Expect.equal model.FeedName "custom.xml" "FeedName not filled properly" + Expect.equal model.ItemsInFeed 82 "ItemsInFeed not filled properly" + Expect.isFalse model.IsCategoryEnabled "IsCategoryEnabled should not have been set" + Expect.isFalse model.IsTagEnabled "IsTagEnabled should not have been set" + Expect.equal model.Copyright "yep" "Copyright not filled properly" + } + ] + testList "UpdateOptions" [ + test "succeeds with minimal options" { + let opts = + { IsFeedEnabled = true + FeedName = "blah.xml" + ItemsInFeed = 0 + IsCategoryEnabled = true + IsTagEnabled = true + Copyright = "" }.UpdateOptions RssOptions.Empty + Expect.isTrue opts.IsFeedEnabled "IsFeedEnabled should have been set" + Expect.equal opts.FeedName "blah.xml" "FeedName not filled properly" + Expect.isNone opts.ItemsInFeed "ItemsInFeed should not have had a value" + Expect.isTrue opts.IsCategoryEnabled "IsCategoryEnabled should have been set" + Expect.isTrue opts.IsTagEnabled "IsTagEnabled should have been set" + Expect.isNone opts.Copyright "Copyright should not have had a value" + } + test "succeeds with full options" { + let opts = + { IsFeedEnabled = false + FeedName = "again.xml" + ItemsInFeed = 22 + IsCategoryEnabled = false + IsTagEnabled = false + Copyright = "none" }.UpdateOptions RssOptions.Empty + Expect.isFalse opts.IsFeedEnabled "IsFeedEnabled should not have been set" + Expect.equal opts.FeedName "again.xml" "FeedName not filled properly" + Expect.equal opts.ItemsInFeed (Some 22) "ItemsInFeed not filled properly" + Expect.isFalse opts.IsCategoryEnabled "IsCategoryEnabled should not have been set" + Expect.isFalse opts.IsTagEnabled "IsTagEnabled should not have been set" + Expect.equal opts.Copyright (Some "none") "Copyright not filled properly" + } + ] +] + +/// Unit tests for the EditTagMapModel type +let editTagMapModelTests = testList "EditTagMapModel" [ + test "FromMapping succeeds" { + let model = + EditTagMapModel.FromMapping + { Id = TagMapId "howdy"; Tag = "f#"; UrlValue = "f-sharp"; WebLogId = WebLogId "" } + Expect.equal model.Id "howdy" "Id not filled properly" + Expect.equal model.Tag "f#" "Tag not filled properly" + Expect.equal model.UrlValue "f-sharp" "UrlValue not filled properly" + } + testList "IsNew" [ + test "succeeds when tag mapping is new" { + Expect.isTrue + (EditTagMapModel.FromMapping { TagMap.Empty with Id = TagMapId "new" }).IsNew + "IsNew should have been set" + } + test "succeeds when tag mapping is not new" { + Expect.isFalse + (EditTagMapModel.FromMapping { TagMap.Empty with Id = TagMapId "ancient" }).IsNew + "IsNew should not have been set" + } + ] +] + +/// Unit tests for the EditUserModel type +let editUserModelTests = testList "EditUserModel" [ + testList "FromUser" [ + test "succeeds when the user does not have a URL" { + let model = + EditUserModel.FromUser + { WebLogUser.Empty with + Id = WebLogUserId "test" + AccessLevel = Editor + Email = "test@example.com" + FirstName = "Test" + LastName = "User" + PreferredName = "Tester" } + Expect.equal model.Id "test" "Id not filled properly" + Expect.equal model.AccessLevel "Editor" "AccessLevel not filled properly" + Expect.equal model.Url "" "Url not filled properly" + Expect.equal model.Email "test@example.com" "Email not filled properly" + Expect.equal model.FirstName "Test" "FirstName not filled properly" + Expect.equal model.LastName "User" "LastName not filled properly" + Expect.equal model.PreferredName "Tester" "PreferredName not filled properly" + Expect.equal model.Password "" "Password not filled properly" + Expect.equal model.PasswordConfirm "" "PasswordConfirm not filled properly" + } + test "succeeds when the user has a URL" { + let model = EditUserModel.FromUser { WebLogUser.Empty with Url = Some "https://test.me" } + Expect.equal model.Url "https://test.me" "Url not filled properly" + } + ] + testList "IsNew" [ + test "succeeds when the user is new" { + Expect.isTrue + (EditUserModel.FromUser { WebLogUser.Empty with Id = WebLogUserId "new" }).IsNew + "IsNew should have been set" + } + test "succeeds when the user is not new" { + Expect.isFalse + (EditUserModel.FromUser { WebLogUser.Empty with Id = WebLogUserId "not-new" }).IsNew + "IsNew should not have been set" + } + ] + testList "UpdateUser" [ + let model = + { Id = "test-user" + AccessLevel = "WebLogAdmin" + Email = "again@example.com" + Url = "" + FirstName = "Another" + LastName = "One" + PreferredName = "Happy" + Password = "my-password" + PasswordConfirm = "my-password" } + test "succeeds when user has no URL" { + let user = model.UpdateUser WebLogUser.Empty + Expect.equal user.AccessLevel WebLogAdmin "AccessLevel not filled properly" + Expect.equal user.Email "again@example.com" "Email not filled properly" + Expect.isNone user.Url "Url should not have had a value" + Expect.equal user.FirstName "Another" "FirstName not filled properly" + Expect.equal user.LastName "One" "LastName not filled properly" + Expect.equal user.PreferredName "Happy" "FirstName not filled properly" + } + test "succeeds when user has a URL" { + let user = { model with Url = "https://over.there" }.UpdateUser WebLogUser.Empty + Expect.equal user.Url (Some "https://over.there") "Url not filled properly" + } + ] +] + +/// Unit tests for the ManageChaptersModel type +let manageChaptersModelTests = testList "ManageChaptersModel.Create" [ + test "succeeds" { + let model = + ManageChaptersModel.Create + { Post.Empty with + Id = PostId "test-post" + Title = "Look at all these chapters" + Episode = Some + { Episode.Empty with + Chapters = Some + [ { Chapter.Empty with StartTime = Duration.FromSeconds 18L } + { Chapter.Empty with StartTime = Duration.FromSeconds 36L } + { Chapter.Empty with StartTime = Duration.FromSeconds 180.7 } ] } } + Expect.equal model.Id "test-post" "ID not filled properly" + Expect.equal model.Title "Look at all these chapters" "Title not filled properly" + Expect.hasLength model.Chapters 3 "There should be three chapters" + Expect.equal model.Chapters[0].StartTime (Duration.FromSeconds 18L) "First chapter not filled properly" + Expect.equal model.Chapters[1].StartTime (Duration.FromSeconds 36L) "Second chapter not filled properly" + Expect.equal model.Chapters[2].StartTime (Duration.FromSeconds 180.7) "Third chapter not filled properly" + } +] + +/// Unit tests for the ManagePermalinksModel type +let managePermalinksModelTests = testList "ManagePermalinksModel" [ + test "FromPage succeeds" { + let model = + ManagePermalinksModel.FromPage + { Page.Empty with + Id = PageId "links" + Title = "My Awesome Page" + Permalink = Permalink "2018/02/my-awesome-page.html" + PriorPermalinks = [ Permalink "2018/02/my-awesome-pages.html"; Permalink "2018/02/oops.html" ] } + Expect.equal model.Id "links" "Id not filled properly" + Expect.equal model.Entity "page" "Entity not filled properly" + Expect.equal model.CurrentTitle "My Awesome Page" "CurrentTitle not filled properly" + Expect.equal model.CurrentPermalink "2018/02/my-awesome-page.html" "CurrentPermalink not filled properly" + Expect.equal model.Prior [| "2018/02/my-awesome-pages.html"; "2018/02/oops.html" |] "Prior not filled properly" + } + test "FromPost succeeds" { + let model = + ManagePermalinksModel.FromPost + { Post.Empty with + Id = PostId "test" + Title = "Errata" + Permalink = Permalink "2020/01/errata.html" + PriorPermalinks = [ Permalink "2020/01/errors.html"; Permalink "2020/01/whoops.html" ] } + Expect.equal model.Id "test" "Id not filled properly" + Expect.equal model.Entity "post" "Entity not filled properly" + Expect.equal model.CurrentTitle "Errata" "CurrentTitle not filled properly" + Expect.equal model.CurrentPermalink "2020/01/errata.html" "CurrentPermalink not filled properly" + Expect.equal model.Prior [| "2020/01/errors.html"; "2020/01/whoops.html" |] "Prior not filled properly" + } +] + +/// Unit tests for the ManageRevisionsModel type +let manageRevisionsModelTests = testList "ManageRevisionsModel" [ + test "FromPage succeeds" { + let revisions = + [ { AsOf = Noda.epoch + Duration.FromDays 24; Text = Html "wow" } + { AsOf = Noda.epoch + Duration.FromDays 20; Text = Html "

huh

" } ] + let model = + ManageRevisionsModel.FromPage + { Page.Empty with Id = PageId "revs"; Title = "A Revised Page"; Revisions = revisions } + Expect.equal model.Id "revs" "Id not filled properly" + Expect.equal model.Entity "page" "Entity not filled properly" + Expect.equal model.CurrentTitle "A Revised Page" "CurrentTitle not filled properly" + Expect.equal model.Revisions.Length 2 "There should be two revisions" + } + test "FromPost succeeds" { + let revisions = + [ { AsOf = Noda.epoch + Duration.FromDays 13; Text = Html "

again

" } + { AsOf = Noda.epoch + Duration.FromDays 12; Text = Html "

original

" } ] + let model = + ManageRevisionsModel.FromPost + { Post.Empty with Id = PostId "altered"; Title = "Round Two"; Revisions = revisions } + Expect.equal model.Id "altered" "Id not filled properly" + Expect.equal model.Entity "post" "Entity not filled properly" + Expect.equal model.CurrentTitle "Round Two" "CurrentTitle not filled properly" + Expect.equal model.Revisions.Length 2 "There should be two revisions" + } +] + +/// Unit tests for the PostListItem type +let postListItemTests = testList "PostListItem" [ + testList "FromPost" [ + test "succeeds for a draft post" { + let post = + { Post.Empty with + Id = PostId "draft-post" + AuthorId = WebLogUserId "myself" + Title = "Not Ready for Prime Time" + Permalink = Permalink "2021/draft.html" + UpdatedOn = Noda.epoch + Duration.FromHours 8 + Text = "

WIP

" } + let model = PostListItem.FromPost { WebLog.Empty with TimeZone = "Etc/GMT-1" } post + Expect.equal model.Id "draft-post" "Id not filled properly" + Expect.equal model.AuthorId "myself" "AuthorId not filled properly" + Expect.equal model.Status "Draft" "Status not filled properly" + Expect.equal model.Title "Not Ready for Prime Time" "Title not filled properly" + Expect.equal model.Permalink "2021/draft.html" "Permalink not filled properly" + Expect.isFalse model.PublishedOn.HasValue "PublishedOn should not have had a value" + Expect.equal + model.UpdatedOn ((Noda.epoch + Duration.FromHours 9).ToDateTimeUtc()) "UpdatedOn not filled properly" + Expect.equal model.Text "

WIP

" "Text not filled properly" + Expect.isEmpty model.CategoryIds "There should have been no category IDs" + Expect.isEmpty model.Tags "There should have been no tags" + Expect.isNone model.Episode "There should not have been an episode" + Expect.isEmpty model.Metadata "There should have been no metadata" + } + test "succeeds for a published post in a non-root domain" { + let post = + { Post.Empty with + Id = PostId "full-post" + AuthorId = WebLogUserId "me" + Status = Published + Title = "Finished Product" + Permalink = Permalink "2021/post.html" + PublishedOn = Some (Noda.epoch + Duration.FromHours 12) + UpdatedOn = Noda.epoch + Duration.FromHours 13 + Text = """Click""" + CategoryIds = [ CategoryId "z"; CategoryId "y" ] + Tags = [ "test"; "unit" ] + Episode = Some { Episode.Empty with Media = "test.mp3" } + Metadata = [ { Name = "MyMeta"; Value = "MyValue" } ] } + let model = + PostListItem.FromPost { WebLog.Empty with UrlBase = "https://u.t/w"; TimeZone = "Etc/GMT+1" } post + Expect.equal model.Id "full-post" "Id not filled properly" + Expect.equal model.AuthorId "me" "AuthorId not filled properly" + Expect.equal model.Status "Published" "Status not filled properly" + Expect.equal model.Title "Finished Product" "Title not filled properly" + Expect.equal model.Permalink "2021/post.html" "Permalink not filled properly" + Expect.isTrue model.PublishedOn.HasValue "PublishedOn should not have had a value" + Expect.equal + model.PublishedOn.Value + ((Noda.epoch + Duration.FromHours 11).ToDateTimeUtc()) + "PublishedOn not filled properly" + Expect.equal + model.UpdatedOn ((Noda.epoch + Duration.FromHours 12).ToDateTimeUtc()) "UpdatedOn not filled properly" + Expect.equal model.Text """Click""" "Text not filled properly" + Expect.equal model.CategoryIds [ "z"; "y" ] "CategoryIds not filled properly" + Expect.equal model.Tags [ "test"; "unit" ] "Tags not filled properly" + Expect.isSome model.Episode "There should have been an episode" + Expect.equal model.Episode.Value.Media "test.mp3" "Episode not filled properly" + Expect.equal model.Metadata.Length 1 "There should have been 1 metadata item" + Expect.equal model.Metadata[0].Name "MyMeta" "Metadata not filled properly" + } + ] +] + +/// Unit tests for the SettingModel type +let settingsModelTests = testList "SettingsModel" [ + testList "FromWebLog" [ + test "succeeds with no subtitle" { + let model = + SettingsModel.FromWebLog + { WebLog.Empty with + Name = "The Web Log" + Slug = "the-web-log" + DefaultPage = "this-one" + PostsPerPage = 18 + TimeZone = "America/Denver" + ThemeId = ThemeId "my-theme" + AutoHtmx = true } + Expect.equal model.Name "The Web Log" "Name not filled properly" + Expect.equal model.Slug "the-web-log" "Slug not filled properly" + Expect.equal model.Subtitle "" "Subtitle not filled properly" + Expect.equal model.DefaultPage "this-one" "DefaultPage not filled properly" + Expect.equal model.PostsPerPage 18 "PostsPerPage not filled properly" + Expect.equal model.TimeZone "America/Denver" "TimeZone not filled properly" + Expect.equal model.ThemeId "my-theme" "ThemeId not filled properly" + Expect.isTrue model.AutoHtmx "AutoHtmx should have been set" + Expect.equal model.Uploads "Database" "Uploads not filled properly" + } + test "succeeds with a subtitle" { + let model = SettingsModel.FromWebLog { WebLog.Empty with Subtitle = Some "sub here!" } + Expect.equal model.Subtitle "sub here!" "Subtitle not filled properly" + } + ] + testList "Update" [ + test "succeeds with no subtitle" { + let webLog = + { Name = "Interesting" + Slug = "some-stuff" + Subtitle = "" + DefaultPage = "that-one" + PostsPerPage = 8 + TimeZone = "America/Chicago" + ThemeId = "test-theme" + AutoHtmx = true + Uploads = "Disk" }.Update WebLog.Empty + Expect.equal webLog.Name "Interesting" "Name not filled properly" + Expect.equal webLog.Slug "some-stuff" "Slug not filled properly" + Expect.isNone webLog.Subtitle "Subtitle should not have had a value" + Expect.equal webLog.DefaultPage "that-one" "DefaultPage not filled properly" + Expect.equal webLog.PostsPerPage 8 "PostsPerPage not filled properly" + Expect.equal webLog.TimeZone "America/Chicago" "TimeZone not filled properly" + Expect.equal webLog.ThemeId (ThemeId "test-theme") "ThemeId not filled properly" + Expect.isTrue webLog.AutoHtmx "AutoHtmx should have been set" + Expect.equal webLog.Uploads Disk "Uploads not filled properly" + } + test "succeeds with a subtitle" { + let webLog = { SettingsModel.FromWebLog WebLog.Empty with Subtitle = "Sub" }.Update WebLog.Empty + Expect.equal webLog.Subtitle (Some "Sub") "Subtitle should have had a value" + } + ] +] + +/// Unit tests for the UserMessage type +let userMessageTests = testList "UserMessage" [ + test "Success succeeds" { + Expect.equal UserMessage.Success.Level "success" "Level incorrect" + } + test "Info succeeds" { + Expect.equal UserMessage.Info.Level "primary" "Level incorrect" + } + test "Warning succeeds" { + Expect.equal UserMessage.Warning.Level "warning" "Level incorrect" + } + test "Error succeeds" { + Expect.equal UserMessage.Error.Level "danger" "Level incorrect" + } +] + +/// All tests in the Domain.ViewModels file +let all = testList "ViewModels" [ + addBaseToRelativeUrlsTests + displayPageTests + displayThemeTests + displayUploadTests + editCategoryModelTests + editCommonModelTests + editCustomFeedModelTests + editMyInfoModelTests + editPageModelTests + editPostModelTests + editRedirectRuleModelTests + editRssModelTests + editTagMapModelTests + editUserModelTests + manageChaptersModelTests + managePermalinksModelTests + manageRevisionsModelTests + postListItemTests + settingsModelTests + userMessageTests +] diff --git a/src/MyWebLog.Tests/MyWebLog.Tests.fsproj b/src/MyWebLog.Tests/MyWebLog.Tests.fsproj new file mode 100644 index 0000000..353d71d --- /dev/null +++ b/src/MyWebLog.Tests/MyWebLog.Tests.fsproj @@ -0,0 +1,38 @@ + + + + Exe + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/MyWebLog.Tests/Program.fs b/src/MyWebLog.Tests/Program.fs new file mode 100644 index 0000000..b2ed6a9 --- /dev/null +++ b/src/MyWebLog.Tests/Program.fs @@ -0,0 +1,31 @@ +open Expecto + +/// Whether to only run RethinkDB data tests +let rethinkOnly = (RethinkDbDataTests.env "RETHINK_ONLY" "0") = "1" + +/// Whether to only run SQLite data tests +let sqliteOnly = (RethinkDbDataTests.env "SQLITE_ONLY" "0") = "1" + +/// Whether to only run PostgreSQL data tests +let postgresOnly = (RethinkDbDataTests.env "PG_ONLY" "0") = "1" + +/// Whether any of the data tests are being isolated +let dbOnly = rethinkOnly || sqliteOnly || postgresOnly + +/// Whether to only run the unit tests (skip database/integration tests) +let unitOnly = (RethinkDbDataTests.env "UNIT_ONLY" "0") = "1" + +let allTests = testList "MyWebLog" [ + if not dbOnly then testList "Domain" [ SupportTypesTests.all; DataTypesTests.all; ViewModelsTests.all ] + if not unitOnly then + testList "Data" [ + if not dbOnly then ConvertersTests.all + if not dbOnly then UtilsTests.all + if not dbOnly || (dbOnly && rethinkOnly) then RethinkDbDataTests.all + if not dbOnly || (dbOnly && sqliteOnly) then SQLiteDataTests.all + if not dbOnly || (dbOnly && postgresOnly) then PostgresDataTests.all + ] +] + +[] +let main args = runTestsWithCLIArgs [] args allTests diff --git a/src/MyWebLog.Tests/root-weblog.json b/src/MyWebLog.Tests/root-weblog.json new file mode 100644 index 0000000..9c820c8 --- /dev/null +++ b/src/MyWebLog.Tests/root-weblog.json @@ -0,0 +1,380 @@ +{ + "WebLog": { + "Id": "uSitJEuD3UyzWC9jgOHc8g", + "Name": "Root WebLog", + "Slug": "root-weblog", + "Subtitle": "This is the main one", + "DefaultPage": "posts", + "PostsPerPage": 9, + "ThemeId": "default", + "UrlBase": "http://localhost:8081", + "TimeZone": "America/Denver", + "Rss": { + "IsFeedEnabled": true, + "FeedName": "feed", + "ItemsInFeed": 7, + "IsCategoryEnabled": true, + "IsTagEnabled": true, + "Copyright": "CC40-NC-BY", + "CustomFeeds": [ + { + "Id": "isPQ6drbDEydxohQzaiYtQ", + "Source": "tag:podcast", + "Path": "podcast-feed", + "Podcast": { + "Title": "Root Podcast", + "ItemsInFeed": 23, + "Summary": "All things that happen in the domain root", + "DisplayedAuthor": "Podcaster Extraordinaire", + "Email": "podcaster@example.com", + "ImageUrl": "images/cover-art.png", + "AppleCategory": "Fiction", + "AppleSubcategory": "Drama", + "Explicit": "no", + "DefaultMediaType": "audio/mpeg", + "MediaBaseUrl": "https://media.example.com/root/", + "PodcastGuid": "10fd7f79-c719-4e1d-9da7-10405dd4fd96", + "FundingUrl": "https://example.com/support-us", + "FundingText": "Support Our Work", + "Medium": "newsletter" + } + } + ] + }, + "AutoHtmx": true, + "Uploads": "Database", + "RedirectRules": [] + }, + "Users": [ + { + "Id": "5EM2rimH9kONpmd2zQkiVA", + "WebLogId": "uSitJEuD3UyzWC9jgOHc8g", + "Email": "root@example.com", + "FirstName": "Root", + "LastName": "Owner", + "PreferredName": "Admin", + "PasswordHash": "AQAAAAIAAYagAAAAEEnq9J9lKZoMQZaTOJHKIQo44skDdzDigzqS+o6myMop38YuHfm/vNs9b/WpYjsOxg==", + "AccessLevel": "Administrator", + "CreatedOn": "2024-01-20T21:49:03Z", + "LastSeenOn": "2024-01-20T22:25:03Z" + }, + { + "Id": "GPbJaSOwTkKt14ZKYyveKA", + "WebLogId": "uSitJEuD3UyzWC9jgOHc8g", + "Email": "editor@example.com", + "FirstName": "Ed", + "LastName": "It-Or", + "PreferredName": "Edits", + "PasswordHash": "AQAAAAIAAYagAAAAEA8E3NwJkZO+q35FTmUT0wMNB8IpBOSVACKQcccXpaWfZJMRmZzjPEzd4j/f9h+rEA==", + "AccessLevel": "Editor", + "CreatedOn": "2024-01-20T21:58:42Z" + }, + { + "Id": "iIRNLSeY0EanxRPyqGuwVg", + "WebLogId": "uSitJEuD3UyzWC9jgOHc8g", + "Email": "author@example.com", + "FirstName": "Author", + "LastName": "Dude", + "PreferredName": "Mister", + "PasswordHash": "AQAAAAIAAYagAAAAEBYNf1sR/pjaX2dZgqlvpH/Tqpz2h/CG3rsk/wH2ReTysjpK/gxSqht7IULWXM7KHQ==", + "Url": "https://example.com/author", + "AccessLevel": "Author", + "CreatedOn": "2024-01-20T21:53:27Z" + } + ], + "Theme": { + "Id": "default", + "Name": "myWebLog Default Theme", + "Version": "2.1.0", + "Templates": [ + { + "Name": "single-post", + "Text": "{%- assign post = model.posts | first -%}\n

{{ post.title }}

\n

\n {% if post.published_on -%}\n Published {{ post.published_on | date: \"dddd, MMMM d, yyyy\" }}\n at {{ post.published_on | date: \"h:mm tt\" | downcase }}\n {%- else -%}\n **DRAFT**\n {% endif %}\n by {{ model.authors | value: post.author_id }}\n {%- if is_editor or is_author and user_id == post.author_id %}\n • Edit Post\n {%- endif %}\n

\n
\n
\n
{{ post.text }}
\n {%- assign cat_count = post.category_ids | size -%}\n {% if cat_count > 0 -%}\n

\n Categorized under\n {% for cat_id in post.category_ids -%}\n {% assign cat = categories | where: \"Id\", cat_id | first %}\n \n \n {{ cat.name }}\n \n \n {% unless forloop.last %} • {% endunless %}\n {%- endfor %}\n

\n {%- endif %}\n {%- assign tag_count = post.tags | size -%}\n {% if tag_count > 0 -%}\n

\n Tagged\n {% for tag in post.tags %}\n \n {{ tag }}\n \n {% unless forloop.last %} • {% endunless %}\n {%- endfor %}\n

\n {%- endif %}\n
\n
\n \n
\n
\n" + }, + { + "Name": "single-page", + "Text": "

{{ page.title }}

\n
{{ page.text }}
\n" + }, + { + "Name": "layout", + "Text": "\n\n \n \n \n {{ page_title | strip_html }}{% if page_title %} « {% endif %}{{ web_log.name | strip_html }}\n {% page_head -%}\n
\n \n
\n
\n {% if messages %}\n
\n {% for msg in messages %}\n \n {% endfor %}\n
\n {% endif %}\n {{ content }} \n
\n
\n
\n
\n myWebLog\n
\n
\n \n\n" + }, + { + "Name": "index", + "Text": "{%- if is_category or is_tag %}\n

{{ page_title }}

\n {%- if subtitle %}

{{ subtitle }}

{% endif -%}\n{% endif %}\n{%- assign post_count = model.posts | size -%}\n{%- if post_count > 0 %}\n
\n {%- for post in model.posts %}\n
\n

\n \n {{ post.title }}\n \n

\n

\n Published on {{ post.published_on | date: \"MMMM d, yyyy\" }}\n at {{ post.published_on | date: \"h:mmtt\" | downcase }}\n by {{ model.authors | value: post.author_id }}\n {{ post.text }}\n {%- assign category_count = post.category_ids | size -%}\n {%- assign tag_count = post.tags | size -%}\n {% if category_count > 0 or tag_count > 0 %}\n

\n

\n {%- if category_count > 0 -%}\n Categorized under:\n {% for cat in post.category_ids -%}\n {%- assign this_cat = categories | where: \"Id\", cat | first -%}\n {{ this_cat.name }}{% unless forloop.last %}, {% endunless %}\n {%- assign cat_names = this_cat.name | concat: cat_names -%}\n {%- endfor -%}\n {%- assign cat_names = \"\" -%}\n
\n {% endif -%}\n {%- if tag_count > 0 %}\n Tagged: {{ post.tags | join: \", \" }}\n {% endif -%}\n

\n {% endif %}\n
\n
\n {% endfor %}\n
\n \n{%- else %}\n

No posts found

\n{%- endif %}\n" + } + ] + }, + "Assets": [ + { + "Id": "default/style.css", + "UpdatedOn": "2023-07-02T20:36:28Z", + "Data": "Lm1lc3NhZ2VzIHsKICBtYXgtd2lkdGg6IDYwcmVtOwogIG1hcmdpbjogYXV0bzsKfQpibG9ja3F1b3RlIHsKICBib3JkZXItbGVmdDogc29saWQgNHB4IGxpZ2h0Z3JheTsKICBwYWRkaW5nLWxlZnQ6IDFyZW07Cn0KLml0ZW0tbWV0YSB7CiAgZm9udC1zaXplOiAxLjFyZW07CiAgZm9udC13ZWlnaHQ6IG5vcm1hbDs7Cn0KLml0ZW0tbWV0YTo6YmVmb3JlIHsKICBjb250ZW50OiAiwrsiOwogIHZlcnRpY2FsLWFsaWduOiB0ZXh0LXRvcDsKfQphOmxpbmssIGE6dmlzaXRlZCB7CiAgdGV4dC1kZWNvcmF0aW9uOiBub25lOwp9CmE6aG92ZXIgewogIHRleHQtZGVjb3JhdGlvbjogdW5kZXJsaW5lOwp9Cg==" + } + ], + "Categories": [ + { + "Id": "S5JflPsJ9EG7gA2LD4m92A", + "WebLogId": "uSitJEuD3UyzWC9jgOHc8g", + "Name": "Favorites", + "Slug": "favorites", + "Description": "Favorite posts" + }, + { + "Id": "jw6N69YtTEWVHAO33jHU-w", + "WebLogId": "uSitJEuD3UyzWC9jgOHc8g", + "Name": "Spitball", + "Slug": "spitball", + "Description": "Posts that may or may not work" + }, + { + "Id": "ScVpyu1e7UiP7bDdge3ZEw", + "WebLogId": "uSitJEuD3UyzWC9jgOHc8g", + "Name": "Moonshot", + "Slug": "moonshot", + "ParentId": "jw6N69YtTEWVHAO33jHU-w" + } + ], + "TagMappings": [ + { + "Id": "Icm027noqE-rPHKZA98vAw", + "WebLogId": "uSitJEuD3UyzWC9jgOHc8g", + "Tag": "f#", + "UrlValue": "f-sharp" + }, + { + "Id": "GdryXh-S0kGsNBs2RIacGA", + "WebLogId": "uSitJEuD3UyzWC9jgOHc8g", + "Tag": "ghoti", + "UrlValue": "fish" + } + ], + "Pages": [ + { + "Id": "hgc_BLEZ50SoAWLuPNISvA", + "WebLogId": "uSitJEuD3UyzWC9jgOHc8g", + "AuthorId": "5EM2rimH9kONpmd2zQkiVA", + "Title": "Page Title", + "Permalink": "a-cool-page.html", + "PublishedOn": "2024-01-20T22:14:28Z", + "UpdatedOn": "2024-01-20T22:14:28Z", + "IsInPageList": false, + "Text": "

A Cool Page

\n

It really is cool!

\n", + "Metadata": [ + { + "Name": "Cool", + "Value": "true" + }, + { + "Name": "Warm", + "Value": "false" + } + ], + "PriorPermalinks": [ + "a-cool-pg.html" + ], + "Revisions": [ + { + "AsOf": "2024-01-20T22:14:28Z", + "Text": "Markdown: # A Cool Page\n\nIt really is cool!" + } + ] + }, + { + "Id": "KouRjvSmm0Wz6TMD8xf67A", + "WebLogId": "uSitJEuD3UyzWC9jgOHc8g", + "AuthorId": "5EM2rimH9kONpmd2zQkiVA", + "Title": "Yet Another Page", + "Permalink": "this-again.html", + "PublishedOn": "2024-01-20T22:15:08Z", + "UpdatedOn": "2024-01-20T22:15:08Z", + "IsInPageList": true, + "Text": "

Page 2

\n\n

It's a trip.", + "Metadata": [], + "PriorPermalinks": [], + "Revisions": [ + { + "AsOf": "2024-01-20T22:15:08Z", + "Text": "HTML:

Page 2

\n\n

It's a trip." + } + ] + } + ], + "Posts": [ + { + "Id": "RCsCU2puYEmkpzotoi8p4g", + "WebLogId": "uSitJEuD3UyzWC9jgOHc8g", + "AuthorId": "5EM2rimH9kONpmd2zQkiVA", + "Status": "Published", + "Title": "Test Post 1", + "Permalink": "2024/test-post-1.html", + "PublishedOn": "2024-01-20T22:17:29Z", + "UpdatedOn": "2024-01-20T22:17:29Z", + "Text": "

Introduction

\n

Visit my web site or my local page for more information.

\n", + "CategoryIds": [ + "ScVpyu1e7UiP7bDdge3ZEw" + ], + "Tags": [ + "f#", + "howdy", + "intro" + ], + "Metadata": [], + "PriorPermalinks": [], + "Revisions": [ + { + "AsOf": "2024-01-20T22:17:29Z", + "Text": "Markdown: ## Introduction\n\nVisit [my web site](https://example.com) or [my local page](/a-fine-page.html) for more information." + } + ] + }, + { + "Id": "osxMfWGlAkyugUbJ1-xD1g", + "WebLogId": "uSitJEuD3UyzWC9jgOHc8g", + "AuthorId": "5EM2rimH9kONpmd2zQkiVA", + "Status": "Published", + "Title": "Episode 1", + "Permalink": "2024/episode-1.html", + "PublishedOn": "2024-01-20T22:24:01Z", + "UpdatedOn": "2024-01-20T22:24:01Z", + "Text": "

It's the launch of my new podcast - y'all come listen!", + "CategoryIds": [ + "S5JflPsJ9EG7gA2LD4m92A" + ], + "Tags": [ + "general", + "podcast" + ], + "Episode": { + "Media": "episode-1.mp3", + "Length": 124302, + "Duration": "0:12:22", + "ImageUrl": "images/ep1-cover.png", + "Subtitle": "An introduction to this podcast", + "Explicit": "clean", + "ChapterFile": "uploads/chapters.json", + "TranscriptUrl": "uploads/transcript.srt", + "TranscriptType": "application/srt", + "TranscriptLang": "en", + "TranscriptCaptions": true, + "SeasonNumber": 1, + "SeasonDescription": "The First Season", + "EpisodeNumber": 1.0, + "EpisodeDescription": "The first episode ever!" + }, + "Metadata": [ + { + "Name": "Density", + "Value": "Non-existent" + }, + { + "Name": "Intensity", + "Value": "Low" + } + ], + "PriorPermalinks": [ + "2024/ep-1.html" + ], + "Revisions": [ + { + "AsOf": "2024-01-20T22:24:01Z", + "Text": "HTML:

It's the launch of my new podcast - y'all come listen!" + } + ] + }, + { + "Id": "l4_Eh4aFO06SqqJjOymNzA", + "WebLogId": "uSitJEuD3UyzWC9jgOHc8g", + "AuthorId": "iIRNLSeY0EanxRPyqGuwVg", + "Status": "Published", + "Title": "Episode 2", + "Permalink": "2024/episode-2.html", + "PublishedOn": "2024-01-20T22:31:32Z", + "UpdatedOn": "2024-01-20T22:31:32Z", + "Text": "

m i n i m a l", + "CategoryIds": [], + "Tags": [ + "podcast" + ], + "Episode": { + "Media": "episode-2.mp3", + "Length": 12873952, + "Duration": "1:03:24", + "SeasonNumber": 1, + "SeasonDescription": "The First Season", + "EpisodeNumber": 2.0, + "EpisodeDescription": "A long update" + }, + "Metadata": [], + "PriorPermalinks": [], + "Revisions": [ + { + "AsOf": "2024-01-20T22:31:32Z", + "Text": "HTML:

m i n i m a l" + } + ] + }, + { + "Id": "QweKbWQiOkqqrjEdgP9wwg", + "WebLogId": "uSitJEuD3UyzWC9jgOHc8g", + "AuthorId": "5EM2rimH9kONpmd2zQkiVA", + "Status": "Published", + "Title": "Something May Happen", + "Permalink": "2024/something.html", + "PublishedOn": "2024-01-20T22:32:59Z", + "UpdatedOn": "2024-01-20T22:32:59Z", + "Text": "

Hmm

", + "CategoryIds": [ + "jw6N69YtTEWVHAO33jHU-w" + ], + "Tags": [ + "f#", + "ghoti", + "speculation" + ], + "Metadata": [], + "PriorPermalinks": [ + "2024/some-thing.html" + ], + "Revisions": [ + { + "AsOf": "2024-01-20T22:32:59Z", + "Text": "HTML:

Hmm

" + } + ] + }, + { + "Id": "VweKbWQiOkqqrjEdgP9wwg", + "WebLogId": "uSitJEuD3UyzWC9jgOHc8g", + "AuthorId": "5EM2rimH9kONpmd2zQkiVA", + "Status": "Draft", + "Title": "An Incomplete Thought", + "Permalink": "2024/still-cooking.html", + "UpdatedOn": "2024-01-24T22:35:00Z", + "Text": "

Think think think", + "CategoryIds": [ + "jw6N69YtTEWVHAO33jHU-w" + ], + "Tags": [ + "speculation" + ], + "Metadata": [], + "PriorPermalinks": [], + "Revisions": [ + { + "AsOf": "2024-01-24T22:35:00Z", + "Text": "HTML:

Think think think" + } + ] + } + ], + "Uploads": [ + { + "Id": "XweKbWQiOkqqrjEdgP9wwg", + "WebLogId": "uSitJEuD3UyzWC9jgOHc8g", + "Path": "2022/06/favicon.ico", + "UpdatedOn": "2022-06-23T21:15:40Z", + "Data": "AAABAAQAEBAAAAEACACrAgAARgAAABgYAAABABgA8QQAAPECAAAgIAAAAQAYAJ8HAADiBwAAQEAAAAEAGAC3FQAAgQ8AAIlQTkcNChoKAAAADUlIRFIAAAAQAAAAEAgGAAAAH/P/YQAAAnJJREFUOI2Nkb1rU2EUxn/nvDc3uTGxqdaPtipVo2L8GLQUcRAVRVAEEfwLFFyquLgI0sHNQZFuDoIUCi46iCgqrh0Lgk5KbWuTkFTz1SSmt/e+DrcVpUM98JyXl8N5eM7zyMRE3qoKooIAD0efk93rsGdgPXuyA6xVTr7QRo0gIqiAtS6eF6daraC6a22CQqmDqkQQwQ8cPC9OvVpERNYmKJZ8RAWjkYpFP0Y87lILLKr6HwrKS6jIHxWtTgw37hKErCKo1Wv4vk/Pxp6/TwgxKqiCqLDQdoi7MYIQrLVUKj8pFOZoNBf48O4tmUyG02fOUa/XeP/2NU5x3mKWl1Us7uJHEvGTdDqLlMslxseeUirk8X2fhJcglU7z4tk4jXqDzq82TnF+OQEFYyxnuyaYm06zb3cPjx7cZ+j4cbLZLO12m2IxT35mllq1yoq9YrZPWpFIAQBBhdGRr1y5fImu9RmMMQBYLKUfRX7O/6BaqzHzbZowDBDZ8dlGVFGzfpM3Yz5fvkzxfWqSwPfp6s4QBAH92/oZOnYMz/Ow1hKGIQ4msbId1ZJgTIWDh4/Qv9kjl8v9Gb15/Yrhq9e4fvMGBw4dolGr4FiTWLkmeoyh9avOppRLs9n6J8rzFy5y5Ogg9+7ewVrLho0bUNTjH5gUzZbixl0skcF/o7e3j9HHT/CSSebLJVS1RXe6ys6+IoO5AqdO5PGSHq7rImJQ1VWIOQ53Ru6RTK3D+fTSx5gYYjxEDCKKvyQYJ4aog6gigLUgEqUhCFu3bGX41m0cqylCMdjAkp+bZXZminK5TLPZondTetkDiyJYLEr0xwr79x/gNzoU7W4mtXWQAAAAAElFTkSuQmCCiVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAEuElEQVRIiZ2Uy4scVRTGf+fcW1X9mGmT6WRIJqPmpdFEBUMwIELEB1kEdZFFcOlCUBE3gq5cCAE3+Qtc+EREd6IbcSPBB75FUYwzmUwmiZlJMv2Yqe7prqp7r4vumYxGBVNwoYq6nN/9vvudI19/fTGICCIgIthImZ1t88ZbJ9m9p0PwGZvrCYfuO8D1PLbZyhARVARRMEbpdBzGxjjXpVqtEEJBURREUfT/Aa1WhuhVBcYInY5HNcZ5oVKpglui3+8Tx/F1KGgXiA6Kq4JRJe16EIt3QqVSpru0TK/Xo1arXQdgqRjYI6AqqHrSjscT4ZwZ2CJCv99HRP4/oNV26NAeFUGM0Ot5nDM4b7DWEkRwzl0nYMkPAas2CVkeyJzivCWKDAQhBPlXQHelS/CeanXkb39kAFgtLhpQEZyDLDc4b7GRBRF8CGuANE1RI4BwaWGBH374lhAChw8fIQSP94E8zzh9egrbHCpQ0QFIIQToZYrzhsiagbwA3W4HBC5fnmdu7ixpmrK42KDZuEJSKjE19RuI0Gw0mJ2Z5szpaWxrGRQQCcO4BjT0oD9L1nXE0TYExQdPo9lg6tRvXDg/x+zpGVa6XcqVChvGNlIql/n1l5/ppintZosL58+hKtjW0tCe1SQJlLXF7fEnjJst9Hqb2TZxA520wffffMHcubPMzcxgjSXr9ymKAjOrVEZrVMol0qUlBEFVcR5sc0nWADq8g5SEejlmnMssXJim3WwxdeoUCwvzHLz3Xo4de5yxsTpFXjC/8AfT01PMTE9zfu4cnW4Xo2btviS++fsgg/ta6wfnHdaf55FDZ3jwUIXdO3cxtrFOqVSivmkT9fqmtZz08y5n584yf3GexStXaCwu0lhs4JwDAjb3djVRgCBACBFZuJE01Ng6qVxaXOannz5lud0ispakVGK0NsrWyW3cvf9uNtbrJEmJiclJXFHQ7/cJIRBCwKLx+tgSYJB7B7lLKJcjen1FTJUtE1W6nQ7GGHwQvvr8S95+7XX23rmP+w7dz/bt2/HOkSQJIQS8c1g0Yp2Eq6+FkLsIY4Sx+mY67QV2bt/Chg0bCMOeyPYfYH7+Ih99+AEnjh/nrv37eezoUcbHx+l2OqTLbWz4JwACquQuxntHFFsQJc9zkiRZ25UkCSOjozz9zLN8dvIk77/7Nj9+9x0Tk5PsuW0P45vHsEi8rvY6iBqKEFP4HpXIIqp4H64ZFwKUyxUeeOhhduzcyYlXjjNz6nfO/D6FsRZFI5C/LY3AxDg/mKg2sojoun65dllr2X3Lrbz40suYyA7SWOQoGnN1DYqLRqAJLsQUhRJFFtHByPgviKqyY8cOnnv+BTSyJKUEO76xSTXJqZQdI+WC0aqjNhKolj27bhJGayMYtaiaNRWr1oS/GDX4Msawb+8dPPHkU7z3zpvYj1/NUGMwJkLUoqoABIQQwHtQoxhjBn0ybMbhpnW15Wo+xHLgwEEuXVrA2qT2F+nOe5xzeF/gnSMEj6oly3JEysjwAITBkEUCghAkIEHWOEkSc+TIo1hrDVmW02q3WF5u0Wq2SNNlVlZWWOn1yPoZBAeh4OaJe1CR4YHDsO8HxRUdFh/KEiFJSvwJAgAKftfyeo4AAAAASUVORK5CYIKJUE5HDQoaCgAAAA1JSERSAAAAIAAAACAIBgAAAHN6evQAAAdmSURBVFiFpZdrjF1VFcd/a+99z3105k5n2qGBIa1t0UBFAn3ED9AgtrQJoRCI8UGUT4ookRgNglETEhITP2iCMUJMIOKjgIiKgFS0RNsglnFKFZsUMh2gpe1M27kz0/uYe89r+eGce8+9M1N87eRmn7Pv3mv/13/999rryOjoKRURjAEQREAk6X0/ZutHHuHzXyiwfHCAXM7jwMu/4+v33p3MAZS0db2IgGr3uKTjmo2nf7paLUTSzY3Q86yAmDxhKFhrESv09Q8QhiGe53Xsd9nrPIhotrlkyDLYSe9q9RCEzHNSNgRyziLiEUYB1lqMGEp9ZaIoQqRna3rpyNCIgJJtnJAhnTmuWgt7aO/u83kolEoE/jmsNYgIff3nAbCIjqWGZdEUV6uHiEn+aG9OyoLvK/3lEn5QxVoLQN+ygfMD+B+aq9VjEDAdz+kY9zyl1LeMwKcDwFpLFMW0Bau0Q6xouk4UNI1zMqSoprro0QC4aj3CCKkODCKagEDI+VAsevihwVqLokiiUsQkBowqbQMdGYhiMF1+Zsy233sYyDzXJBQmYcFaxcvn8H2TMeBcEoKO7d5QtKH06PE9Q9CIOmdaDIhK5yg6J+Q8RyuwGGsQwPM8ZmZnGRm56D0Nd3C1T2CHHXpOi6vWQYgxRtIwaAeQtYq1Dj+wOGdRVfL5AmfPnPq3Ipybm+PdE8dQVdavu4RisZSAysJPo9FYGIL2SVAExVgBsfiBwZpEA17e48TJ6R4AYRhy+vQUMzMzjIyM0N9f5smnfs7xiQka802++KUvc8n69xOGIcYYxsff5MCrr/DawTFcrZHE3Qhg0hh2AYrU4gcO5yyxKoVCgbNnMwD79v+Za66+mpE1F3J6epIX9+5BEFavXsPWrVvxWz5RFHDs+Fv0lfs5efIkr42N8e7bb+ElGkg2b4u0EwIBYyCIWrQCEg3ESs45KtMVqtVz7N+/j8/cfjtP7N7N4z99DM85rEkSVhhHtIKA/sFBhoeHQZUzU1M0G3X6S32IEVQVV6snLpvUY7rzgU7y4dJjTBZjZiu3MrTyIqwxTM+cIV/IM7RikE987BZiP6BvWQmUznFzkqNYKBJHETt27uTRhx+mlC9QKpQI4wiiRKGyZvOoZrRLD/05jnHzBY9SzAlsuJxLNlyOs5bLLt3A0089yQvPPcvOm27i4OgB5qYr9PeXqddrNOoNcjmHcy4JlbUQhonROBMhgKzYMKrSThQpE+3LyRplvfkqHygrb4YtNFa8nMOJEEUxcRxTazb55G2f5o477qRQKIIqlZkKv//DC+x98UUO/+N18s5hTHdi6gLgrRnTtsemKwRGIAgBqgyX9vPAvSNct+1agmbExNGjzDcaDK9axcaNm8h7+UWG54M6zVaL2coMR944wltHJzhx7Dinp6bwfb+TJEQuPrQoabUV3i4eyqWz/Og7wugrf+TGXddz1ZUbsZhML8ZgJMmWsUT4fotWq8n8fJNWq0UYhqgqJhVo234cRYis/ud5smYWqLyd5iffCyj1Lefc7CyHDu7ntdE/YTTGWYvneTjnyBeLlAeXc9327Xx02zaq1RqtZpM4jpfcIY4jRNYc0YUbLp45yxPfr1IeWAnAbGWaWmWCwcEBTk1NUqtWKQ8MYIxhbPRV3jh8mDCO+Phtn2LTli2sWrVqEQiN4/Raf994L4AlcGjzHL/44RkGhy5EAedyHNj3a2695dbk2k2LQElLsPlWk2ef+RXP//YZSvk8F69byz333YeXzxNFEVEUEQY+vt/CyuBX7kcMKjYtCBf8xEAQc9P1cwwsX55USoUCfz/4Mhs3XpVdZNI+PpBzjg9e/iGu3LSZ8fFx3hkf5/lnn6NRbxDFMYW8R9BqcnZqEqfGLRn3nmY85urCamfRWHHGoJqJ9Xxt3dp13P/At3nooR8w9spfeGnPHl7as4cgjlm7fi1bNl+BQ1xPNbtksx5z1RjnDFGkGGtRNf9RWSYi3HXX3ey99DJ2//gRjBjyRjg+8TbvTLydAli0asGDUeaq4KyD9OrO5wvZcT0/9E7bvn0HQRDw9OM/Q8TgxKAoDrMQwBKmrOFczWCtoGIRY8gXSx0PF69YGtINN9xIFEU8/5tfJlWTxjjEy0oVVSBOem33CnFEtW4wxuDSj5ZisYSqnicMSbJRtPsbBAR27bqZycmTHD40Bqq45cWjDPW36F/WYrAcMViOGRqIGR6yDA85Vg4ZigVHubwKMRYjYMRQLC3rMLCQu/aegkHTIlfblaKBz332Th588LtMnXgHOfj6GyrGYo1DjEOMQcSCQqwZCYkXWdL821/3c+01V6XFalJFaRuGauZ1G1ynhE+YCcOQb33zazibG+jMUBLms0Vd13PbSuqxcW4BA11aaI8tiE6n6EHI5XLcc883cO1yO1uXxM9YizXJBWOM6Vw4URRwemqSE+8eR+SK/+sLaeXwMM5ag6oyPz9Po1GnOd+gUjnLqZMnOD01xZkzU1SmK8zMzNBo1EFhxYoL2LljK866/+ooLjXrXzMv8sJi1rtuAAAAAElFTkSuQmCCiVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAVfklEQVR4nL2beZRlRX3HP1V3eUu/fv1ezwxMz0JkURhgGGBmUHQgJiyGgB5jgnpOEojE9ahDxEMAVyIqUTxoAggHEnCJEkhU0CRoouYYhkEGFJgVRgaYpRdmuqf3fstdKn/UXere97pnjCfWnJp7X9Wv6tZv+/5+Vfe2+OUvX1EAQkB6FUf1W0rB29/5A3bv3sqfXVlj+cploBRxEUIwPLSPY+ohl156Kb/tIgB1BAK73Q7S30IvOr7PM66UZlpfNW2p5AAOga+wpESp7CN7eiocOrQnkbCI+pXQCwQ9r4gfED0XZdAozUhKEtGpiMWkQ8SXhHGhcs8ivtcddqsdGkzqCWPijBDiSYRI1iiFoFB0UVgEQYgQAqUUQggEAoXCth1GR8eSORNBmIoQ+ZvoefFqDQaSh2fuI65RqQLNecxnJXcShNIWkNV63grmcwNtDYWihcIh8EFKmXmaRGLZNuPjExlmVay5aE6lTC1G/CAizalUJUIhI412Y8m8jxWgrUklLdq4onsEdrsdpprNMXhkwQhc1wEsvKCNlAJCmVmPZVuMj08kQjQFmgol/2DzIjJs5TXaKQSzNTdvMmMsBLDbnopoVPahkfNlta+yQhIC17VR2KjQ05Pm1mJbNuPj4/Mu8kglC2Sp5eThrRvgdQdB3SpjF9UuEHfkzf7IVuAWLBQ2oRKgQu0GRrEdh6np6cTK4qJQkY8nkAQoRIxYKVqRozD0mM6mW1JKkaPuFIKmt9tt1aHt7oKIr7HF6IaCaxMqmzCUmCAUz2NJG8uyyZuGyNlywlR2eJdFdyv5ubu3dyt22wtT8gRJ499meycoAriuBTi0PQ1mQsrMY6W0cWyHiYkJ6rUaSggdmmLtiOgustUY4FIIixpUBIYiig6oFAyT0BdRqzQq6O4udhDNEYXBmEEdcIXUyKmvecZVRiCWo12g1dLIakkZxW0VCUBSKvcwNjZGvV5PIleMwwmLWfwzWU8eHiN3PECkpNmxXUDP5Dt9nsD2PJVIyhTEkd1C9zmOBcKh3YokLjS5REa0kr5aP2NjY5x00kmZ5chYQ8lQgRIqXaW52nyGk7CW6jcZa4B2EkKVyopD6Da71dYuECnbEIJJroxkSNtgvC7LtgBJs6UxQIpcLiAEtXo/o6OjBj50YSBZ1zw4kAuRmTliwzBTPnKm3yUBQyQgaBheAnLMc00TGADbkoCk1ZYoEWq3SZYmEEJQqy1mdHQ0v/IuXHbvXTCfN6cQ3ZvnHyN0HtDBtPGjOxCmQpOxAFoSoSILMJ4uhKDev5jR0X2JCcdzJhYe5/G5RXfG9TxFF2/J9ySbgu5ysNttZeKKZkpoDJYmshpmbwpKCAFC0ogwQOY2RDISwN49v8w8I3PbzTyN36KjZSGaXM/CBobdivKAvKmn92pBKxAydgFLQ46IhZJO1L9oCZNTU8mChNJgl5p3NK9SOkzSLd9LQUxFCZOKgCwGT5HggdL7DWFYR2JlKgVcwPb8MMOcvjcQ07COzqvCtgUIi2ZLJ5dJHmCYXr1/Ca1mk1AprAgku+VzyU7TaO+mfZFZl0jbErJsSt5BZ5DabS9l2hwYm31Mrd1Bu0WiMYEGPdui0bQQQpt8nrNyuUyj0YAwBMvKHpqQ7vUh05X2mqYQz2seEHQhTYbE61XpBCK2EJ0IdWZ+CXPRWNN3RfT0dAenKJUcGlMSKUVi/qZmhQDLdgiVwjYflDusyK8hw3Gu3XEdtm3bSrPR5KRXn0Sl0ttBmh1iPihWtMBut03/N6wg0xYDXjpt3I5QFEsOjcOSIABt4bnVKnALZcIwzLaLDr6OqmzdupUf/Nt3WbtuHZVKhX+8727OP+/3OOvMs+cflPU1PK/N4bGxOAzGzOp8XsYpsNTEWcvIuopSioJrg7RoNBVCSCOJSgcWi6WUaUPtca6mkixXm6tKcCI1EwV8/+HvIS3FkmOO5T8feQTbtvnTK67gW9/8JmeuORMprXhiMjE3Ki+/9CL/8p0H2bv3JXzPj84DTPDJxf/OMwHDKoR2a9e1AUmjGSCif1m7hlKphzAMO/oSMBTw4ot72LlzBxOT4ziOQ+AH+L5HrVanUumlXu/nZz/7b0589Yns3r6dWrUCCHZs38HAsmVZ/BBgWTZDg4MMDg2yc8c2JqYnOPbYpWw4bwOokOED+2MQjHw6QnZTACmCdg+HQQi2Y4G0aTYDjQPGmYD2JkWpp4JSYWRlgjivUEowNTXJLbfczCmnnsJ1N1yPEIJWs0Xb8/B9HxUqdu3cya9+9SvOXreW3c8/T1vB3pFDWFIysWULrVaLb3zjPprNBgcPHuTQoYM0Gg1OX72aDRs28K53X4Xnedx155386JH/YGDRInqKhXwUoIu2IwGZoYc0eZIW2JYWQKPRRghhRIKEmr5qPzMzs/T2VjNYcuedt7Fs+TLuuOMuVq48joA2jz76KJs3b2ZkZISp8Un8IKBYKtLb20t/vc6fX3klK1esYMXKlbRaLaamppiZmUEpvRstFAr0VqvU63X279vHk089xS1f/CJ7du/GFnBMrYbv+wDZvUB35smAlZARR5GSrUDpdFjYEQYIhMieCgHU+hcxPDzMwMAAAIcOHeSrd97ODTd8jHVrz2F2dpaHv/89PnvT3+C1WlSKJWx99q6tKIowfhjy7w89RDOyjv5FixgYGGDFccdpRURlcHCQF3bvZm5ujmpPmUqpTNlxUCh8z0+iqt32RYdpZ10g9YUkLBo+nJi8dJlr6oVKY0MUE1erfQwNDbF27VoefPB+/NDj3nu/Tl+1j/u+9g/cf/+3mR4bo2w7iB690Ew8j1DSlhKnVKK3XCYIQ6bm5hgfH2f7tm0sqddTABeC3mKRarGIQhEEAUqphPH4UEa22pBWRasNbY/06inavqLtQdvXbboKfF8QTH2b1Ytv5NyT/5WpiWGkEJEVRDViolgssX//Pu644+9ZedxKbv7cLezbt5e3/tFl3P7lW/FnZim6biaC5LIJ4mMWpRRhVC0hWHXqKv7pgQewSyUE2gUFEIYhQRASBtE1VKgwRIWKQIX6BOuUN2xJEybDCrKhL23L5ALtRzi//1EGesqEwFMjhygtfxVvvPCt9FYXJ+NtC+7/1p1UKzZf+fJt1Gp1bvnSF/jOg/+MJSWzzSYC6Cv3pGwamZ8wbpJ74xIqRX3pUjZu3Mg1GzdSr1RIjCYqmXBrivWEc55UyfEXhpknV5H5bQrH9b7H+YueYEW1igJavs8+x2X1urWMjo9T7lmMLSEIW1xwwYWcs/51jI6NcuPffIItmzbp7FBKbrvjdp7b9Ry33XorlWIpk+123c9mCRACZnyfguMgggCZHHpnS3IOaZTovYCZB8RXkfutn2z+tpw/ZN/Ej1lSKuM6NpYQLF16LO945zvo6eujt9wPKMIwwPN8vvmtr/HlL32JeqVCpVyhiaI1O8vTTz/D3XfdRbVYRhHSVirZUscRxYp3n5EWE2wAVAhFIZFBSMZVYksyiPNiEUtOe1JlmDYRPy8MQ/rxEdrxhb9mVcGj4DrUK2V+PjqKVSrQ01PBth1CFK1mi7FDB1FBSKlYjFaiaIUhQRCAUszMznLcq36HK/7iKtacsYZKpYoKQ5rNJodGD3H48GF27NjG1mefYce2bfieT6lQoFQooLe2YZJNHvkIyRBA32ueVPkXiok75Ew+7x4gCFtPcHr1bvqlRVPAopNfzcTEBIHvoYIQv90i8IN0c4VAhSGBUoRBgF0ssmRggI0fvprXn7vhiAsOCfDDNlueeIJNjz7G5s2bmZ2epjkzC2GIFKLj5cyCAigd/5Ra2Oy7mH/U2GgEeAH0lfZRlT+mrzLMu654A+//4AeYmpjUyUZgMfLKEI9t2sSWJ3/O+OFx+mp9nHbaai688CJOPOEkFi1azNGWtt9krjmH53v4no/veUxMTjI5Mclzu3bx+OOPs+XxnyNQuLZDwbGxpBW5kuiYT9grf2GcX+XNXxi7wKyA/EDheeZMFsf0Pstl5z1BqRjwnve+l9ec/Bpcq5SeE/6GRSnFXGuatufhtds6VfZ8giAgCAKklFi2heu47Nq5k+HhYfbs2cP44cO02u2sZygVnWCteHpBj0nDYVZ6+XACFot6tnHjR0v016t898HbWbPmDK772PWEPhSc4m/EfNtv0vZaBBGg+p6H53n4vmY+DMMo0VFHhQEqDPXmTKx4Jj1S6bAQMf9k0R4hLRa10i4+/RHBCSe8CpRi1/an2LZ1M2eeeRoXXXwxfjtI0uRioYi0JK5boFwqUa/Xcd1C5hEhPs12Ez/wUKFObMIgwA98PM8n8H18XzMfqlDvnDs108m8UqB0dmgjrIWpO91mng5JEDoEQRspJWEYsmr1ek5dvZ6DI3u59pprGRkZoug6CAVBGKKAUOkMLYgiQqFYZGDpUk49/XROOvFE1p2zjoGBZTiugxCSwI9M3g8IgkjrSh0183rpIomlQhy349cJGgv0Scr2Hm68eprTTj853ftHxbItvvPAfcxMDrH7+V0UbJuC4+h3iXFRsUB06uoHAX4QUCiVOGZgKedueAOXX345rlug0WhozUemD0cvAC00PdYSfR+6Md616y2e+D9WiRTTnL9+lmXLjo0+eyGSst4hzsxMI8IWn/nM53CKJeZaTYTrYrkFhG1jFQr0VPso9vbSU6tRKJf0mUOrTXNmhm1PP8O9997LyCuvsGjxYnp6yli2nVrBUTAe3SQWc2QX6CjzWYEgCPTBJ3SJxUKxdOlyXnz+KRzX5c2XvYU3X/YWnbEFAUEY6peVjpNEmyAIaDab7N+/l9tu+wpzh9ss6avxi02PsWXTYzilIn9wySW87fI/QQrZeeaYZz4SlFIqspwQS9Q23pjXpKG6X6NKlJrjgtdOMzCwOLsjjGq5VOaJzT/hwgsuyIhTSIllWViWldmSSylxXZfFi5dw6aVvZvnKlewfGmRiYgLXshBBwNZnt/L9hx8mCEOWr1hBpVLpEETMfKjinWFAEPj4vo9EWGSrTCvGfQdd57gwdFHRoZqUMlOFEFTrdSYnJyOZRRvexEW0ONL8QySyjW9e99rXc/Pnb+Hqa65lxvMIlKJcKOAi+OFDD/Geq67ih4880sG8UoogDJIzxna7SavZpDk3h1RCkq1WthJXaVSR/R2N1V+KSFBRSir0gUlcBYJWq526kbm5ICeEpC89U4hv1q5bzz333Mv6Decx227Fo6mVynzt7nv49Cc/yU9/8hPCMKRY0KE1DEJ8v007Ynx2ZoqZ6UlkRuNCHqW5d7EUJAiH6Tm9G4mBTyTalDr65PbyuYOHDMRkdyimqKC30sv73/dBPnXjZ2kLkQBcuVDglZf38vV77uHtb/tjPnvTTWzb+ixTU1Na660mM9PTTE1OMDE+hp0c7plF5LdUC4U/k8xmtmFpAcj4C5E8TfzewDizzxClQtBwar74zu7nBbBq1al89at3c8cdf8f2Z5+haDsopSg6LkXH5fmtW9nx9DMUesr0VqusWXMGa1avotVoMDs73c0CYhA0NNsl5HWtwmF2zkJ/YhMfTcUWkJp4p1C7CzhvAd3oBNDT08N1132cN15wESoC0hhMLcui4DjQ9pgZHeO/HvkRf/uFW/npzzYxfvgwNke9UTkKKxAW03MRBuS+FImvth2d+xnfL4j4UC7zCEPzxldf5meucd4vhUQJ+Mt3v4/168/h5s/fRG/B2HsY85YKLih4Yddudu9+Id7/ddOqZdSjSZAkSMnMnE2KAQIZmXx82FKp9OJ5XicOdsg3hw05USa/jEMcAaxZcxbXXf8JGr4fvazNz6rXYkmJFSozDC4Ehl3cpCM0aqHNRBiQhD8p0oqgr1ZnaGgoy70wbQQDDKMoIOahTZg330oLzjprLZ/81GeYarUQ+XWI1B2lFPNgwBGZnUdo0mZ6NisAsyKg2ldjcHAwmwdETMcxXySaIsGNrrSJFXTIk1NWreL6Gz5Bw/OwbUsfikiJtGT07iJa17yA1lEXsoS0zsxZQGgYUDYb7OurceDAgYzddzusSZ0rC5qiS1/yf2wF0ZCzzzqbK696Lw3Pw7J1timjrDO2jBwIHmW4m68Ii6k5GxFZQCdL0NdXY/DlnR0HLcnHKLnPvbJH2Z0EWSHE4JgK4eKLLkaFAQ98+xv0FAqEoQ7vYfThZM4FxBHqkVzDZmrWikzVDH8pEPbV6hwYPNBFeLlrjsFOwm4thjAMsje96RLOWvdamr6H7dhYtoUTXY/sAgZzXTGAAFQL1AyEk4RzUktXSoQV+3+aDvfVDAwQIhFWDGoJoCE63Cehj3wgblNGlEnGmuMRbPzwRzjn9eejBDiOjR1XwhaEPoQBqEC/ZVBh9DsEO8Ap+JSLPuWCT6mkr+WCT7kYUCqGlAoK14FSUeE6AiEXRS8pU3SKtVKr9UcYYGhNGAqLh8zrjabJg5kdJtfoDbbOGFRC/K4rr+KvnnmaQAW4jq0d5sxL/kf194XUq4r+WkilpKhXFfU+RX9VUXBTn7NsB9t2sC0Hy3GwLP23AJZt4ziubrd1FDAPKEwznp2d4fc3nM5zzz2XLtAUUv4TL2G8IFPxTNn0OD7XF2S/D8x+YJ0mU9d89GqKMiAIFeKxJ3cq/W1f7KtRrp7cSyyZfveTAagupzCdH0Rn+6S0WHv6crZv347jODmKHAL+P5WXXn6Rj99wLcevXIYsl/soFasUi70UCj24hRKOW9QatW0sS8dv05RMhpKMz9j3z1dB5+1SSmZnZ7ssLW/e8xQhOmlM1zHCqsiTCDjh+BP46LXXMzkzixRGliStzuQlf6hhMnu0fZlkCFg6sJxGo6HXKUzAIsno6Ap+cWyhs98ETwMEySkgBty1Z6+nUu3H7jDZbtLNSHABE++iqXxRKJYtX6ktoMOt8gGty+y/iYcY6xFC8IEPfAg7n7CIeQYcYb6OlS3Ut2zZSuYac9kFRZ8Dpq94DfxK+rOv8rtuGOnS2VF03/HHn4Cdbluz+CoSSI77FgI38wzQ0tWSWNJCSMH01CRjo4cYPfQKExPjTEwcZnpqmryMulmYyPR3ZEmZNczbuUC7HftNXGR+AZH/KaUIAp8gCKO3Mj5+EBCGPr7nMzR4gP0H9jJ4YC8v73mB/fv2Mji4j5HhQUBg2w69vTWWDizjjb97LmecsSZZTvTZYLLXV/qBKWcq+kYxyStU7g/Dos/l049KDdWJ5A+ulSFNgSYWz710OIll09PTjAwPcfDgMAdHhjl4cISDr4wwMjzI+PgYjUZDv0+zXWzHxZIujlPALRTprdbp6enFcQv09JSp12v01/uo1ar09vZSq/ZSqZQ49pg6PWV3Hu389sv/AjBdvhBnmBuqAAAAAElFTkSuQmCC" + } + ] +} \ No newline at end of file diff --git a/src/MyWebLog.sln b/src/MyWebLog.sln index a594b6e..973bd50 100644 --- a/src/MyWebLog.sln +++ b/src/MyWebLog.sln @@ -9,6 +9,8 @@ Project("{6EC3EE1D-3C4E-46DD-8F32-0CC8E7565705}") = "MyWebLog.Data", "MyWebLog.D EndProject Project("{F2A71F9B-5D33-465A-A702-920D77279786}") = "MyWebLog", "MyWebLog\MyWebLog.fsproj", "{5655B63D-429F-4CCD-A14C-FBD74D987ECB}" EndProject +Project("{F2A71F9B-5D33-465A-A702-920D77279786}") = "MyWebLog.Tests", "MyWebLog.Tests\MyWebLog.Tests.fsproj", "{D927D39F-26EC-4A54-989A-9D474F232398}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -27,6 +29,10 @@ Global {5655B63D-429F-4CCD-A14C-FBD74D987ECB}.Debug|Any CPU.Build.0 = Debug|Any CPU {5655B63D-429F-4CCD-A14C-FBD74D987ECB}.Release|Any CPU.ActiveCfg = Release|Any CPU {5655B63D-429F-4CCD-A14C-FBD74D987ECB}.Release|Any CPU.Build.0 = Release|Any CPU + {D927D39F-26EC-4A54-989A-9D474F232398}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D927D39F-26EC-4A54-989A-9D474F232398}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D927D39F-26EC-4A54-989A-9D474F232398}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D927D39F-26EC-4A54-989A-9D474F232398}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/src/MyWebLog/Caches.fs b/src/MyWebLog/Caches.fs index 2c4e74b..c459920 100644 --- a/src/MyWebLog/Caches.fs +++ b/src/MyWebLog/Caches.fs @@ -13,25 +13,25 @@ module Extensions = open Microsoft.Extensions.DependencyInjection /// Hold variable for the configured generator string - let mutable private generatorString : string option = None + let mutable private generatorString: string option = None type HttpContext with /// The anti-CSRF service - member this.AntiForgery = this.RequestServices.GetRequiredService () + member this.AntiForgery = this.RequestServices.GetRequiredService() /// The cross-site request forgery token set for this request member this.CsrfTokenSet = this.AntiForgery.GetAndStoreTokens this /// The data implementation - member this.Data = this.RequestServices.GetRequiredService () + member this.Data = this.RequestServices.GetRequiredService() /// The generator string member this.Generator = match generatorString with | Some gen -> gen | None -> - let cfg = this.RequestServices.GetRequiredService () + let cfg = this.RequestServices.GetRequiredService() generatorString <- match Option.ofObj cfg["Generator"] with | Some gen -> Some gen @@ -42,7 +42,7 @@ module Extensions = member this.UserAccessLevel = this.User.Claims |> Seq.tryFind (fun claim -> claim.Type = ClaimTypes.Role) - |> Option.map (fun claim -> AccessLevel.parse claim.Value) + |> Option.map (fun claim -> AccessLevel.Parse claim.Value) /// The user ID for the current request member this.UserId = @@ -53,7 +53,7 @@ module Extensions = /// Does the current user have the requested level of access? member this.HasAccessLevel level = - defaultArg (this.UserAccessLevel |> Option.map (AccessLevel.hasAccess level)) false + defaultArg (this.UserAccessLevel |> Option.map _.HasAccess(level)) false open System.Collections.Concurrent @@ -65,30 +65,56 @@ open System.Collections.Concurrent /// settings update page module WebLogCache = + open System.Text.RegularExpressions + + /// A redirect rule that caches compiled regular expression rules + type CachedRedirectRule = + /// A straight text match rule + | Text of string * string + /// A regular expression match rule + | RegEx of Regex * string + /// The cache of web log details let mutable private _cache : WebLog list = [] + /// Redirect rules with compiled regular expressions + let mutable private _redirectCache = ConcurrentDictionary () + /// Try to get the web log for the current request (longest matching URL base wins) let tryGet (path : string) = _cache |> List.filter (fun wl -> path.StartsWith wl.UrlBase) - |> List.sortByDescending (fun wl -> wl.UrlBase.Length) + |> List.sortByDescending _.UrlBase.Length |> List.tryHead /// Cache the web log for a particular host let set webLog = _cache <- webLog :: (_cache |> List.filter (fun wl -> wl.Id <> webLog.Id)) + _redirectCache[webLog.Id] <- + webLog.RedirectRules + |> List.map (fun it -> + let relUrl = Permalink >> webLog.RelativeUrl + let urlTo = if it.To.Contains "://" then it.To else relUrl it.To + if it.IsRegex then + let pattern = if it.From.StartsWith "^" then $"^{relUrl it.From[1..]}" else it.From + RegEx(Regex(pattern, RegexOptions.Compiled ||| RegexOptions.IgnoreCase), urlTo) + else + Text(relUrl it.From, urlTo)) /// Get all cached web logs let all () = _cache /// Fill the web log cache from the database - let fill (data : IData) = backgroundTask { - let! webLogs = data.WebLog.All () - _cache <- webLogs + let fill (data: IData) = backgroundTask { + let! webLogs = data.WebLog.All() + webLogs |> List.iter set } + /// Get the cached redirect rules for the given web log + let redirectRules webLogId = + _redirectCache[webLogId] + /// Is the given theme in use by any web logs? let isThemeInUse themeId = _cache |> List.exists (fun wl -> wl.ThemeId = themeId) @@ -100,28 +126,28 @@ module PageListCache = open MyWebLog.ViewModels /// Cache of displayed pages - let private _cache = ConcurrentDictionary () + let private _cache = ConcurrentDictionary () - let private fillPages (webLog : WebLog) pages = + let private fillPages (webLog: WebLog) pages = _cache[webLog.Id] <- pages - |> List.map (fun pg -> DisplayPage.fromPage webLog { pg with Text = "" }) + |> List.map (fun pg -> DisplayPage.FromPage webLog { pg with Text = "" }) |> Array.ofList /// Are there pages cached for this web log? - let exists (ctx : HttpContext) = _cache.ContainsKey ctx.WebLog.Id + let exists (ctx: HttpContext) = _cache.ContainsKey ctx.WebLog.Id /// Get the pages for the web log for this request - let get (ctx : HttpContext) = _cache[ctx.WebLog.Id] + let get (ctx: HttpContext) = _cache[ctx.WebLog.Id] /// Update the pages for the current web log - let update (ctx : HttpContext) = backgroundTask { + let update (ctx: HttpContext) = backgroundTask { let! pages = ctx.Data.Page.FindListed ctx.WebLog.Id fillPages ctx.WebLog pages } /// Refresh the pages for the given web log - let refresh (webLog : WebLog) (data : IData) = backgroundTask { + let refresh (webLog: WebLog) (data: IData) = backgroundTask { let! pages = data.Page.FindListed webLog.Id fillPages webLog pages } @@ -133,22 +159,22 @@ module CategoryCache = open MyWebLog.ViewModels /// The cache itself - let private _cache = ConcurrentDictionary () + let private _cache = ConcurrentDictionary () /// Are there categories cached for this web log? - let exists (ctx : HttpContext) = _cache.ContainsKey ctx.WebLog.Id + let exists (ctx: HttpContext) = _cache.ContainsKey ctx.WebLog.Id /// Get the categories for the web log for this request - let get (ctx : HttpContext) = _cache[ctx.WebLog.Id] + let get (ctx: HttpContext) = _cache[ctx.WebLog.Id] /// Update the cache with fresh data - let update (ctx : HttpContext) = backgroundTask { + let update (ctx: HttpContext) = backgroundTask { let! cats = ctx.Data.Category.FindAllForView ctx.WebLog.Id _cache[ctx.WebLog.Id] <- cats } /// Refresh the category cache for the given web log - let refresh webLogId (data : IData) = backgroundTask { + let refresh webLogId (data: IData) = backgroundTask { let! cats = data.Category.FindAllForView webLogId _cache[webLogId] <- cats } @@ -165,11 +191,11 @@ module TemplateCache = let private _cache = ConcurrentDictionary () /// Custom include parameter pattern - let private hasInclude = Regex ("""{% include_template \"(.*)\" %}""", RegexOptions.None, TimeSpan.FromSeconds 2) + let private hasInclude = Regex("""{% include_template \"(.*)\" %}""", RegexOptions.None, TimeSpan.FromSeconds 2) /// Get a template for the given theme and template name - let get (themeId : ThemeId) (templateName : string) (data : IData) = backgroundTask { - let templatePath = $"{ThemeId.toString themeId}/{templateName}" + let get (themeId: ThemeId) (templateName: string) (data: IData) = backgroundTask { + let templatePath = $"{themeId}/{templateName}" match _cache.ContainsKey templatePath with | true -> return Ok _cache[templatePath] | false -> @@ -189,16 +215,16 @@ module TemplateCache = if childNotFound = "" then child.Groups[1].Value else $"{childNotFound}; {child.Groups[1].Value}" "" - text <- text.Replace (child.Value, childText) + text <- text.Replace(child.Value, childText) if childNotFound <> "" then let s = if childNotFound.IndexOf ";" >= 0 then "s" else "" return Error $"Could not find the child template{s} {childNotFound} required by {templateName}" else - _cache[templatePath] <- Template.Parse (text, SyntaxCompatibility.DotLiquid22) + _cache[templatePath] <- Template.Parse(text, SyntaxCompatibility.DotLiquid22) return Ok _cache[templatePath] | None -> - return Error $"Theme ID {ThemeId.toString themeId} does not have a template named {templateName}" - | None -> return Result.Error $"Theme ID {ThemeId.toString themeId} does not exist" + return Error $"Theme ID {themeId} does not have a template named {templateName}" + | None -> return Error $"Theme ID {themeId} does not exist" } /// Get all theme/template names currently cached @@ -206,16 +232,16 @@ module TemplateCache = _cache.Keys |> Seq.sort |> Seq.toList /// Invalidate all template cache entries for the given theme ID - let invalidateTheme (themeId : ThemeId) = - let keyPrefix = ThemeId.toString themeId + let invalidateTheme (themeId: ThemeId) = + let keyPrefix = string themeId _cache.Keys - |> Seq.filter (fun key -> key.StartsWith keyPrefix) + |> Seq.filter _.StartsWith(keyPrefix) |> List.ofSeq |> List.iter (fun key -> match _cache.TryRemove key with _, _ -> ()) /// Remove all entries from the template cache let empty () = - _cache.Clear () + _cache.Clear() /// A cache of asset names by themes @@ -228,14 +254,14 @@ module ThemeAssetCache = let get themeId = _cache[themeId] /// Refresh the list of assets for the given theme - let refreshTheme themeId (data : IData) = backgroundTask { + let refreshTheme themeId (data: IData) = backgroundTask { let! assets = data.ThemeAsset.FindByTheme themeId _cache[themeId] <- assets |> List.map (fun a -> match a.Id with ThemeAssetId (_, path) -> path) } /// Fill the theme asset cache - let fill (data : IData) = backgroundTask { - let! assets = data.ThemeAsset.All () + let fill (data: IData) = backgroundTask { + let! assets = data.ThemeAsset.All() for asset in assets do let (ThemeAssetId (themeId, path)) = asset.Id if not (_cache.ContainsKey themeId) then _cache[themeId] <- [] diff --git a/src/MyWebLog/DotLiquidBespoke.fs b/src/MyWebLog/DotLiquidBespoke.fs index 4cbd799..e68f448 100644 --- a/src/MyWebLog/DotLiquidBespoke.fs +++ b/src/MyWebLog/DotLiquidBespoke.fs @@ -7,6 +7,7 @@ open System.Web open DotLiquid open Giraffe.ViewEngine open MyWebLog.ViewModels +open MyWebLog.Views /// Extensions on the DotLiquid Context object type Context with @@ -17,11 +18,11 @@ type Context with /// Does an asset exist for the current theme? -let assetExists fileName (webLog : WebLog) = +let assetExists fileName (webLog: WebLog) = ThemeAssetCache.get webLog.ThemeId |> List.exists (fun it -> it = fileName) /// Obtain the link from known types -let permalink (ctx : Context) (item : obj) (linkFunc : WebLog -> Permalink -> string) = +let permalink (item: obj) (linkFunc: Permalink -> string) = match item with | :? String as link -> Some link | :? DisplayPage as page -> Some page.Permalink @@ -29,130 +30,130 @@ let permalink (ctx : Context) (item : obj) (linkFunc : WebLog -> Permalink -> st | :? DropProxy as proxy -> Option.ofObj proxy["Permalink"] |> Option.map string | _ -> None |> function - | Some link -> linkFunc ctx.WebLog (Permalink link) + | Some link -> linkFunc (Permalink link) | None -> $"alert('unknown item type {item.GetType().Name}')" /// A filter to generate an absolute link -type AbsoluteLinkFilter () = - static member AbsoluteLink (ctx : Context, item : obj) = - permalink ctx item WebLog.absoluteUrl +type AbsoluteLinkFilter() = + static member AbsoluteLink(ctx: Context, item: obj) = + permalink item ctx.WebLog.AbsoluteUrl /// A filter to generate a link with posts categorized under the given category -type CategoryLinkFilter () = - static member CategoryLink (ctx : Context, catObj : obj) = +type CategoryLinkFilter() = + static member CategoryLink(ctx: Context, catObj: obj) = match catObj with | :? DisplayCategory as cat -> Some cat.Slug | :? DropProxy as proxy -> Option.ofObj proxy["Slug"] |> Option.map string | _ -> None |> function - | Some slug -> WebLog.relativeUrl ctx.WebLog (Permalink $"category/{slug}/") + | Some slug -> ctx.WebLog.RelativeUrl(Permalink $"category/{slug}/") | None -> $"alert('unknown category object type {catObj.GetType().Name}')" - + /// A filter to generate a link that will edit a page -type EditPageLinkFilter () = - static member EditPageLink (ctx : Context, pageObj : obj) = +type EditPageLinkFilter() = + static member EditPageLink(ctx: Context, pageObj: obj) = match pageObj with | :? DisplayPage as page -> Some page.Id | :? DropProxy as proxy -> Option.ofObj proxy["Id"] |> Option.map string | :? String as theId -> Some theId | _ -> None |> function - | Some pageId -> WebLog.relativeUrl ctx.WebLog (Permalink $"admin/page/{pageId}/edit") + | Some pageId -> ctx.WebLog.RelativeUrl(Permalink $"admin/page/{pageId}/edit") | None -> $"alert('unknown page object type {pageObj.GetType().Name}')" - - + + /// A filter to generate a link that will edit a post -type EditPostLinkFilter () = - static member EditPostLink (ctx : Context, postObj : obj) = +type EditPostLinkFilter() = + static member EditPostLink(ctx: Context, postObj: obj) = match postObj with | :? PostListItem as post -> Some post.Id | :? DropProxy as proxy -> Option.ofObj proxy["Id"] |> Option.map string | :? String as theId -> Some theId | _ -> None |> function - | Some postId -> WebLog.relativeUrl ctx.WebLog (Permalink $"admin/post/{postId}/edit") + | Some postId -> ctx.WebLog.RelativeUrl(Permalink $"admin/post/{postId}/edit") | None -> $"alert('unknown post object type {postObj.GetType().Name}')" /// A filter to generate nav links, highlighting the active link (exact match) -type NavLinkFilter () = - static member NavLink (ctx : Context, url : string, text : string) = - let _, path = WebLog.hostAndPath ctx.WebLog - let path = if path = "" then path else $"{path.Substring 1}/" +type NavLinkFilter() = + static member NavLink(ctx: Context, url: string, text: string) = + let extraPath = ctx.WebLog.ExtraPath + let path = if extraPath = "" then "" else $"{extraPath[1..]}/" seq { - "

  • " text - "
  • " + "" } |> String.concat "" /// A filter to generate a link for theme asset (image, stylesheet, script, etc.) -type ThemeAssetFilter () = - static member ThemeAsset (ctx : Context, asset : string) = - WebLog.relativeUrl ctx.WebLog (Permalink $"themes/{ThemeId.toString ctx.WebLog.ThemeId}/{asset}") +type ThemeAssetFilter() = + static member ThemeAsset(ctx: Context, asset: string) = + ctx.WebLog.RelativeUrl(Permalink $"themes/{ctx.WebLog.ThemeId}/{asset}") /// Create various items in the page header based on the state of the page being generated -type PageHeadTag () = - inherit Tag () +type PageHeadTag() = + inherit Tag() - override this.Render (context : Context, result : TextWriter) = + override this.Render(context: Context, result: TextWriter) = let webLog = context.WebLog // spacer let s = " " let getBool name = defaultArg (context.Environments[0].[name] |> Option.ofObj |> Option.map Convert.ToBoolean) false - result.WriteLine $"""""" + result.WriteLine $"""""" // Theme assets if assetExists "style.css" webLog then - result.WriteLine $"""{s}""" + result.WriteLine $"""{s}""" if assetExists "favicon.ico" webLog then - result.WriteLine $"""{s}""" + result.WriteLine $"""{s}""" // RSS feeds and canonical URLs let feedLink title url = let escTitle = HttpUtility.HtmlAttributeEncode title - let relUrl = WebLog.relativeUrl webLog (Permalink url) - $"""{s}""" + let relUrl = webLog.RelativeUrl(Permalink url) + $"""{s}""" if webLog.Rss.IsFeedEnabled && getBool "is_home" then - result.WriteLine (feedLink webLog.Name webLog.Rss.FeedName) - result.WriteLine $"""{s}""" + result.WriteLine(feedLink webLog.Name webLog.Rss.FeedName) + result.WriteLine $"""{s}""" if webLog.Rss.IsCategoryEnabled && getBool "is_category_home" then let slug = context.Environments[0].["slug"] :?> string - result.WriteLine (feedLink webLog.Name $"category/{slug}/{webLog.Rss.FeedName}") + result.WriteLine(feedLink webLog.Name $"category/{slug}/{webLog.Rss.FeedName}") if webLog.Rss.IsTagEnabled && getBool "is_tag_home" then let slug = context.Environments[0].["slug"] :?> string - result.WriteLine (feedLink webLog.Name $"tag/{slug}/{webLog.Rss.FeedName}") + result.WriteLine(feedLink webLog.Name $"tag/{slug}/{webLog.Rss.FeedName}") if getBool "is_post" then let post = context.Environments[0].["model"] :?> PostDisplay - let url = WebLog.absoluteUrl webLog (Permalink post.Posts[0].Permalink) - result.WriteLine $"""{s}""" + let url = webLog.AbsoluteUrl (Permalink post.Posts[0].Permalink) + result.WriteLine $"""{s}""" if getBool "is_page" then let page = context.Environments[0].["page"] :?> DisplayPage - let url = WebLog.absoluteUrl webLog (Permalink page.Permalink) - result.WriteLine $"""{s}""" + let url = webLog.AbsoluteUrl (Permalink page.Permalink) + result.WriteLine $"""{s}""" /// Create various items in the page header based on the state of the page being generated -type PageFootTag () = - inherit Tag () +type PageFootTag() = + inherit Tag() - override this.Render (context : Context, result : TextWriter) = + override this.Render(context: Context, result: TextWriter) = let webLog = context.WebLog // spacer let s = " " @@ -161,48 +162,48 @@ type PageFootTag () = result.WriteLine $"{s}{RenderView.AsString.htmlNode Htmx.Script.minified}" if assetExists "script.js" webLog then - result.WriteLine $"""{s}""" + result.WriteLine $"""{s}""" + - /// A filter to generate a relative link -type RelativeLinkFilter () = - static member RelativeLink (ctx : Context, item : obj) = - permalink ctx item WebLog.relativeUrl +type RelativeLinkFilter() = + static member RelativeLink(ctx: Context, item: obj) = + permalink item ctx.WebLog.RelativeUrl /// A filter to generate a link with posts tagged with the given tag -type TagLinkFilter () = - static member TagLink (ctx : Context, tag : string) = +type TagLinkFilter() = + static member TagLink(ctx: Context, tag: string) = ctx.Environments[0].["tag_mappings"] :?> TagMap list |> List.tryFind (fun it -> it.Tag = tag) |> function | Some tagMap -> tagMap.UrlValue - | None -> tag.Replace (" ", "+") - |> function tagUrl -> WebLog.relativeUrl ctx.WebLog (Permalink $"tag/{tagUrl}/") + | None -> tag.Replace(" ", "+") + |> function tagUrl -> ctx.WebLog.RelativeUrl(Permalink $"tag/{tagUrl}/") /// Create links for a user to log on or off, and a dashboard link if they are logged off -type UserLinksTag () = - inherit Tag () +type UserLinksTag() = + inherit Tag() - override this.Render (context : Context, result : TextWriter) = - let link it = WebLog.relativeUrl context.WebLog (Permalink it) + override this.Render(context: Context, result: TextWriter) = + let link it = context.WebLog.RelativeUrl(Permalink it) seq { """" } |> Seq.iter result.WriteLine /// A filter to retrieve the value of a meta item from a list // (shorter than `{% assign item = list | where: "Name", [name] | first %}{{ item.value }}`) -type ValueFilter () = - static member Value (_ : Context, items : MetaItem list, name : string) = +type ValueFilter() = + static member Value(_: Context, items: MetaItem list, name: string) = match items |> List.tryFind (fun it -> it.Name = name) with | Some item -> item.Value | None -> $"-- {name} not found --" @@ -224,15 +225,11 @@ let register () = Template.RegisterTag "user_links" [ // Domain types - typeof; typeof; typeof; typeof; typeof - typeof; typeof; typeof; typeof + typeof; typeof; typeof; typeof; typeof; typeof + typeof; typeof // View models - typeof; typeof; typeof; typeof - typeof; typeof; typeof; typeof - typeof; typeof; typeof; typeof - typeof; typeof; typeof; typeof - typeof; typeof; typeof; typeof - typeof; typeof; typeof + typeof; typeof; typeof; typeof; typeof + typeof; typeof // Framework types typeof; typeof; typeof; typeof typeof; typeof; typeof; typeof diff --git a/src/MyWebLog/Handlers/Admin.fs b/src/MyWebLog/Handlers/Admin.fs index 30ebac4..a482305 100644 --- a/src/MyWebLog/Handlers/Admin.fs +++ b/src/MyWebLog/Handlers/Admin.fs @@ -3,16 +3,17 @@ module MyWebLog.Handlers.Admin open System.Threading.Tasks open Giraffe +open Giraffe.Htmx open MyWebLog open MyWebLog.ViewModels open NodaTime -/// ~~ DASHBOARDS ~~ +/// ~~~ DASHBOARDS ~~~ module Dashboard = // GET /admin/dashboard let user : HttpHandler = requireAccess Author >=> fun next ctx -> task { - let getCount (f : WebLogId -> Task) = f ctx.WebLog.Id + let getCount (f: WebLogId -> Task) = f ctx.WebLog.Id let data = ctx.Data let! posts = getCount (data.Post.CountByStatus Published) let! drafts = getCount (data.Post.CountByStatus Draft) @@ -20,62 +21,27 @@ module Dashboard = let! listed = getCount data.Page.CountListed let! cats = getCount data.Category.CountAll let! topCats = getCount data.Category.CountTopLevel - return! - hashForPage "Dashboard" - |> addToHash ViewContext.Model { - Posts = posts - Drafts = drafts - Pages = pages - ListedPages = listed - Categories = cats - TopLevelCategories = topCats - } - |> adminView "dashboard" next ctx + let model = + { Posts = posts + Drafts = drafts + Pages = pages + ListedPages = listed + Categories = cats + TopLevelCategories = topCats } + return! adminPage "Dashboard" false next ctx (Views.WebLog.dashboard model) } // GET /admin/administration let admin : HttpHandler = requireAccess Administrator >=> fun next ctx -> task { - match! TemplateCache.get adminTheme "theme-list-body" ctx.Data with - | Ok bodyTemplate -> - let! themes = ctx.Data.Theme.All () - let cachedTemplates = TemplateCache.allNames () - let! hash = - hashForPage "myWebLog Administration" - |> withAntiCsrf ctx - |> addToHash "themes" ( - themes - |> List.map (DisplayTheme.fromTheme WebLogCache.isThemeInUse) - |> Array.ofList) - |> addToHash "cached_themes" ( - themes - |> Seq.ofList - |> Seq.map (fun it -> [| - ThemeId.toString it.Id - it.Name - cachedTemplates - |> List.filter (fun n -> n.StartsWith (ThemeId.toString it.Id)) - |> List.length - |> string - |]) - |> Array.ofSeq) - |> addToHash "web_logs" ( - WebLogCache.all () - |> Seq.ofList - |> Seq.sortBy (fun it -> it.Name) - |> Seq.map (fun it -> [| WebLogId.toString it.Id; it.Name; it.UrlBase |]) - |> Array.ofSeq) - |> addViewContext ctx - return! - addToHash "theme_list" (bodyTemplate.Render hash) hash - |> adminView "admin-dashboard" next ctx - | Error message -> return! Error.server message next ctx + let! themes = ctx.Data.Theme.All() + return! adminPage "myWebLog Administration" true next ctx (Views.Admin.dashboard themes) } /// Redirect the user to the admin dashboard let toAdminDashboard : HttpHandler = redirectToGet "admin/administration" -/// ~~ CACHES ~~ +/// ~~~ CACHES ~~~ module Cache = // POST /admin/cache/web-log/{id}/refresh @@ -87,17 +53,17 @@ module Cache = do! PageListCache.refresh webLog data do! CategoryCache.refresh webLog.Id data do! addMessage ctx - { UserMessage.success with Message = "Successfully refresh web log cache for all web logs" } + { UserMessage.Success with Message = "Successfully refresh web log cache for all web logs" } else - match! data.WebLog.FindById (WebLogId webLogId) with + match! data.WebLog.FindById(WebLogId webLogId) with | Some webLog -> WebLogCache.set webLog do! PageListCache.refresh webLog data do! CategoryCache.refresh webLog.Id data do! addMessage ctx - { UserMessage.success with Message = $"Successfully refreshed web log cache for {webLog.Name}" } + { UserMessage.Success with Message = $"Successfully refreshed web log cache for {webLog.Name}" } | None -> - do! addMessage ctx { UserMessage.error with Message = $"No web log exists with ID {webLogId}" } + do! addMessage ctx { UserMessage.Error with Message = $"No web log exists with ID {webLogId}" } return! toAdminDashboard next ctx } @@ -108,55 +74,38 @@ module Cache = TemplateCache.empty () do! ThemeAssetCache.fill data do! addMessage ctx - { UserMessage.success with - Message = "Successfully cleared template cache and refreshed theme asset cache" - } + { UserMessage.Success with + Message = "Successfully cleared template cache and refreshed theme asset cache" } else - match! data.Theme.FindById (ThemeId themeId) with + match! data.Theme.FindById(ThemeId themeId) with | Some theme -> TemplateCache.invalidateTheme theme.Id do! ThemeAssetCache.refreshTheme theme.Id data do! addMessage ctx - { UserMessage.success with - Message = $"Successfully cleared template cache and refreshed theme asset cache for {theme.Name}" - } + { UserMessage.Success with + Message = $"Successfully cleared template cache and refreshed theme asset cache for {theme.Name}" } | None -> - do! addMessage ctx { UserMessage.error with Message = $"No theme exists with ID {themeId}" } + do! addMessage ctx { UserMessage.Error with Message = $"No theme exists with ID {themeId}" } return! toAdminDashboard next ctx } -/// ~~ CATEGORIES ~~ +/// ~~~ CATEGORIES ~~~ module Category = open MyWebLog.Data // GET /admin/categories - let all : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task { - match! TemplateCache.get adminTheme "category-list-body" ctx.Data with - | Ok catListTemplate -> - let! hash = - hashForPage "Categories" - |> withAntiCsrf ctx - |> addViewContext ctx - return! - addToHash "category_list" (catListTemplate.Render hash) hash - |> adminView "category-list" next ctx - | Error message -> return! Error.server message next ctx - } - - // GET /admin/categories/bare - let bare : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> - hashForPage "Categories" - |> withAntiCsrf ctx - |> adminBareView "category-list-body" next ctx - + let all : HttpHandler = fun next ctx -> + let response = fun next ctx -> + adminPage "Categories" true next ctx (Views.WebLog.categoryList (ctx.Request.Query.ContainsKey "new")) + (withHxPushUrl (ctx.WebLog.RelativeUrl (Permalink "admin/categories")) >=> response) next ctx // GET /admin/category/{id}/edit - let edit catId : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task { + let edit catId : HttpHandler = fun next ctx -> task { let! result = task { match catId with - | "new" -> return Some ("Add a New Category", { Category.empty with Id = CategoryId "new" }) + | "new" -> return Some ("Add a New Category", { Category.Empty with Id = CategoryId "new" }) | _ -> match! ctx.Data.Category.FindById (CategoryId catId) ctx.WebLog.Id with | Some cat -> return Some ("Edit Category", cat) @@ -165,19 +114,17 @@ module Category = match result with | Some (title, cat) -> return! - hashForPage title - |> withAntiCsrf ctx - |> addToHash ViewContext.Model (EditCategoryModel.fromCategory cat) - |> adminBareView "category-edit" next ctx + Views.WebLog.categoryEdit (EditCategoryModel.FromCategory cat) + |> adminBarePage title true next ctx | None -> return! Error.notFound next ctx } // POST /admin/category/save - let save : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task { + let save : HttpHandler = fun next ctx -> task { let data = ctx.Data - let! model = ctx.BindFormAsync () + let! model = ctx.BindFormAsync() let category = - if model.IsNew then someTask { Category.empty with Id = CategoryId.create (); WebLogId = ctx.WebLog.Id } + if model.IsNew then someTask { Category.Empty with Id = CategoryId.Create(); WebLogId = ctx.WebLog.Id } else data.Category.FindById (CategoryId model.CategoryId) ctx.WebLog.Id match! category with | Some cat -> @@ -186,16 +133,15 @@ module Category = Name = model.Name Slug = model.Slug Description = if model.Description = "" then None else Some model.Description - ParentId = if model.ParentId = "" then None else Some (CategoryId model.ParentId) - } + ParentId = if model.ParentId = "" then None else Some (CategoryId model.ParentId) } do! (if model.IsNew then data.Category.Add else data.Category.Update) updatedCat do! CategoryCache.update ctx - do! addMessage ctx { UserMessage.success with Message = "Category saved successfully" } - return! bare next ctx + do! addMessage ctx { UserMessage.Success with Message = "Category saved successfully" } + return! all next ctx | None -> return! Error.notFound next ctx } - // POST /admin/category/{id}/delete + // DELETE /admin/category/{id} let delete catId : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task { let! result = ctx.Data.Category.Delete (CategoryId catId) ctx.WebLog.Id match result with @@ -207,78 +153,142 @@ module Category = | ReassignedChildCategories -> Some "(Its child categories were reassigned to its parent category)" | _ -> None - do! addMessage ctx { UserMessage.success with Message = "Category deleted successfully"; Detail = detail } + do! addMessage ctx { UserMessage.Success with Message = "Category deleted successfully"; Detail = detail } | CategoryNotFound -> - do! addMessage ctx { UserMessage.error with Message = "Category not found; cannot delete" } - return! bare next ctx - } - - -/// ~~ TAG MAPPINGS ~~ -module TagMapping = - - open Microsoft.AspNetCore.Http - - /// Add tag mappings to the given hash - let withTagMappings (ctx : HttpContext) hash = task { - let! mappings = ctx.Data.TagMap.FindByWebLog ctx.WebLog.Id - return - addToHash "mappings" mappings hash - |> addToHash "mapping_ids" ( - mappings - |> List.map (fun it -> { Name = it.Tag; Value = TagMapId.toString it.Id })) - } - - // GET /admin/settings/tag-mappings - let all : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task { - let! hash = - hashForPage "" - |> withAntiCsrf ctx - |> withTagMappings ctx - return! adminBareView "tag-mapping-list-body" next ctx hash - } - - // GET /admin/settings/tag-mapping/{id}/edit - let edit tagMapId : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task { - let isNew = tagMapId = "new" - let tagMap = - if isNew then someTask { TagMap.empty with Id = TagMapId "new" } - else ctx.Data.TagMap.FindById (TagMapId tagMapId) ctx.WebLog.Id - match! tagMap with - | Some tm -> - return! - hashForPage (if isNew then "Add Tag Mapping" else $"Mapping for {tm.Tag} Tag") - |> withAntiCsrf ctx - |> addToHash ViewContext.Model (EditTagMapModel.fromMapping tm) - |> adminBareView "tag-mapping-edit" next ctx - | None -> return! Error.notFound next ctx - } - - // POST /admin/settings/tag-mapping/save - let save : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task { - let data = ctx.Data - let! model = ctx.BindFormAsync () - let tagMap = - if model.IsNew then someTask { TagMap.empty with Id = TagMapId.create (); WebLogId = ctx.WebLog.Id } - else data.TagMap.FindById (TagMapId model.Id) ctx.WebLog.Id - match! tagMap with - | Some tm -> - do! data.TagMap.Save { tm with Tag = model.Tag.ToLower (); UrlValue = model.UrlValue.ToLower () } - do! addMessage ctx { UserMessage.success with Message = "Tag mapping saved successfully" } - return! all next ctx - | None -> return! Error.notFound next ctx - } - - // POST /admin/settings/tag-mapping/{id}/delete - let delete tagMapId : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task { - match! ctx.Data.TagMap.Delete (TagMapId tagMapId) ctx.WebLog.Id with - | true -> do! addMessage ctx { UserMessage.success with Message = "Tag mapping deleted successfully" } - | false -> do! addMessage ctx { UserMessage.error with Message = "Tag mapping not found; nothing deleted" } + do! addMessage ctx { UserMessage.Error with Message = "Category not found; cannot delete" } return! all next ctx } -/// ~~ THEMES ~~ +/// ~~~ REDIRECT RULES ~~~ +module RedirectRules = + + open Microsoft.AspNetCore.Http + + // GET /admin/settings/redirect-rules + let all : HttpHandler = fun next ctx -> + adminPage "Redirect Rules" true next ctx (Views.WebLog.redirectList ctx.WebLog.RedirectRules) + + // GET /admin/settings/redirect-rules/[index] + let edit idx : HttpHandler = fun next ctx -> + let titleAndView = + if idx = -1 then + Some ("Add", Views.WebLog.redirectEdit (EditRedirectRuleModel.FromRule -1 RedirectRule.Empty)) + else + let rules = ctx.WebLog.RedirectRules + if rules.Length < idx || idx < 0 then + None + else + Some + ("Edit", (Views.WebLog.redirectEdit (EditRedirectRuleModel.FromRule idx (List.item idx rules)))) + match titleAndView with + | Some (title, view) -> adminBarePage $"{title} Redirect Rule" true next ctx view + | None -> Error.notFound next ctx + + /// Update the web log's redirect rules in the database, the request web log, and the web log cache + let private updateRedirectRules (ctx: HttpContext) webLog = backgroundTask { + do! ctx.Data.WebLog.UpdateRedirectRules webLog + ctx.Items["webLog"] <- webLog + WebLogCache.set webLog + } + + // POST /admin/settings/redirect-rules/[index] + let save idx : HttpHandler = fun next ctx -> task { + let! model = ctx.BindFormAsync() + let rule = model.ToRule() + let rules = + ctx.WebLog.RedirectRules + |> match idx with + | -1 when model.InsertAtTop -> List.insertAt 0 rule + | -1 -> List.insertAt ctx.WebLog.RedirectRules.Length rule + | _ -> List.removeAt idx >> List.insertAt idx rule + do! updateRedirectRules ctx { ctx.WebLog with RedirectRules = rules } + do! addMessage ctx { UserMessage.Success with Message = "Redirect rule saved successfully" } + return! all next ctx + } + + // POST /admin/settings/redirect-rules/[index]/up + let moveUp idx : HttpHandler = fun next ctx -> task { + if idx < 1 || idx >= ctx.WebLog.RedirectRules.Length then + return! Error.notFound next ctx + else + let toMove = List.item idx ctx.WebLog.RedirectRules + let newRules = ctx.WebLog.RedirectRules |> List.removeAt idx |> List.insertAt (idx - 1) toMove + do! updateRedirectRules ctx { ctx.WebLog with RedirectRules = newRules } + return! all next ctx + } + + // POST /admin/settings/redirect-rules/[index]/down + let moveDown idx : HttpHandler = fun next ctx -> task { + if idx < 0 || idx >= ctx.WebLog.RedirectRules.Length - 1 then + return! Error.notFound next ctx + else + let toMove = List.item idx ctx.WebLog.RedirectRules + let newRules = ctx.WebLog.RedirectRules |> List.removeAt idx |> List.insertAt (idx + 1) toMove + do! updateRedirectRules ctx { ctx.WebLog with RedirectRules = newRules } + return! all next ctx + } + + // DELETE /admin/settings/redirect-rules/[index] + let delete idx : HttpHandler = fun next ctx -> task { + if idx < 0 || idx >= ctx.WebLog.RedirectRules.Length then + return! Error.notFound next ctx + else + let rules = ctx.WebLog.RedirectRules |> List.removeAt idx + do! updateRedirectRules ctx { ctx.WebLog with RedirectRules = rules } + do! addMessage ctx { UserMessage.Success with Message = "Redirect rule deleted successfully" } + return! all next ctx + } + + +/// ~~~ TAG MAPPINGS ~~~ +module TagMapping = + + // GET /admin/settings/tag-mappings + let all : HttpHandler = fun next ctx -> task { + let! mappings = ctx.Data.TagMap.FindByWebLog ctx.WebLog.Id + return! adminBarePage "Tag Mapping List" true next ctx (Views.WebLog.tagMapList mappings) + } + + // GET /admin/settings/tag-mapping/{id}/edit + let edit tagMapId : HttpHandler = fun next ctx -> task { + let isNew = tagMapId = "new" + let tagMap = + if isNew then someTask { TagMap.Empty with Id = TagMapId "new" } + else ctx.Data.TagMap.FindById (TagMapId tagMapId) ctx.WebLog.Id + match! tagMap with + | Some tm -> + return! + Views.WebLog.tagMapEdit (EditTagMapModel.FromMapping tm) + |> adminBarePage (if isNew then "Add Tag Mapping" else $"Mapping for {tm.Tag} Tag") true next ctx + | None -> return! Error.notFound next ctx + } + + // POST /admin/settings/tag-mapping/save + let save : HttpHandler = fun next ctx -> task { + let data = ctx.Data + let! model = ctx.BindFormAsync() + let tagMap = + if model.IsNew then someTask { TagMap.Empty with Id = TagMapId.Create(); WebLogId = ctx.WebLog.Id } + else data.TagMap.FindById (TagMapId model.Id) ctx.WebLog.Id + match! tagMap with + | Some tm -> + do! data.TagMap.Save { tm with Tag = model.Tag.ToLower(); UrlValue = model.UrlValue.ToLower() } + do! addMessage ctx { UserMessage.Success with Message = "Tag mapping saved successfully" } + return! all next ctx + | None -> return! Error.notFound next ctx + } + + // DELETE /admin/settings/tag-mapping/{id} + let delete tagMapId : HttpHandler = fun next ctx -> task { + match! ctx.Data.TagMap.Delete (TagMapId tagMapId) ctx.WebLog.Id with + | true -> do! addMessage ctx { UserMessage.Success with Message = "Tag mapping deleted successfully" } + | false -> do! addMessage ctx { UserMessage.Error with Message = "Tag mapping not found; nothing deleted" } + return! all next ctx + } + + +/// ~~~ THEMES ~~~ module Theme = open System @@ -291,30 +301,26 @@ module Theme = let all : HttpHandler = requireAccess Administrator >=> fun next ctx -> task { let! themes = ctx.Data.Theme.All () return! - hashForPage "Themes" - |> withAntiCsrf ctx - |> addToHash "themes" (themes |> List.map (DisplayTheme.fromTheme WebLogCache.isThemeInUse) |> Array.ofList) - |> adminBareView "theme-list-body" next ctx + Views.Admin.themeList (List.map (DisplayTheme.FromTheme WebLogCache.isThemeInUse) themes) + |> adminBarePage "Themes" true next ctx } // GET /admin/theme/new let add : HttpHandler = requireAccess Administrator >=> fun next ctx -> - hashForPage "Upload a Theme File" - |> withAntiCsrf ctx - |> adminBareView "theme-upload" next ctx + adminBarePage "Upload a Theme File" true next ctx Views.Admin.themeUpload /// Update the name and version for a theme based on the version.txt file, if present - let private updateNameAndVersion (theme : Theme) (zip : ZipArchive) = backgroundTask { + let private updateNameAndVersion (theme: Theme) (zip: ZipArchive) = backgroundTask { let now () = DateTime.UtcNow.ToString "yyyyMMdd.HHmm" match zip.Entries |> Seq.filter (fun it -> it.FullName = "version.txt") |> Seq.tryHead with | Some versionItem -> - use versionFile = new StreamReader(versionItem.Open ()) - let! versionText = versionFile.ReadToEndAsync () + use versionFile = new StreamReader(versionItem.Open()) + let! versionText = versionFile.ReadToEndAsync() let parts = versionText.Trim().Replace("\r", "").Split "\n" - let displayName = if parts[0] > "" then parts[0] else ThemeId.toString theme.Id + let displayName = if parts[0] > "" then parts[0] else string theme.Id let version = if parts.Length > 1 && parts[1] > "" then parts[1] else now () return { theme with Name = displayName; Version = version } - | None -> return { theme with Name = ThemeId.toString theme.Id; Version = now () } + | None -> return { theme with Name = string theme.Id; Version = now () } } /// Update the theme with all templates from the ZIP archive @@ -323,9 +329,9 @@ module Theme = zip.Entries |> Seq.filter (fun it -> it.Name.EndsWith ".liquid") |> Seq.map (fun templateItem -> backgroundTask { - use templateFile = new StreamReader (templateItem.Open ()) - let! template = templateFile.ReadToEndAsync () - return { Name = templateItem.Name.Replace (".liquid", ""); Text = template } + use templateFile = new StreamReader(templateItem.Open()) + let! template = templateFile.ReadToEndAsync() + return { Name = templateItem.Name.Replace(".liquid", ""); Text = template } }) let! templates = Task.WhenAll tasks return @@ -336,37 +342,37 @@ module Theme = } /// Update theme assets from the ZIP archive - let private updateAssets themeId (zip : ZipArchive) (data : IData) = backgroundTask { - for asset in zip.Entries |> Seq.filter (fun it -> it.FullName.StartsWith "wwwroot") do - let assetName = asset.FullName.Replace ("wwwroot/", "") + let private updateAssets themeId (zip: ZipArchive) (data: IData) = backgroundTask { + for asset in zip.Entries |> Seq.filter _.FullName.StartsWith("wwwroot") do + let assetName = asset.FullName.Replace("wwwroot/", "") if assetName <> "" && not (assetName.EndsWith "/") then - use stream = new MemoryStream () + use stream = new MemoryStream() do! asset.Open().CopyToAsync stream do! data.ThemeAsset.Save - { Id = ThemeAssetId (themeId, assetName) + { Id = ThemeAssetId(themeId, assetName) UpdatedOn = LocalDateTime.FromDateTime(asset.LastWriteTime.DateTime) - .InZoneLeniently(DateTimeZone.Utc).ToInstant () - Data = stream.ToArray () + .InZoneLeniently(DateTimeZone.Utc).ToInstant() + Data = stream.ToArray() } } /// Derive the theme ID from the file name given - let deriveIdFromFileName (fileName : string) = - let themeName = fileName.Split(".").[0].ToLowerInvariant().Replace (" ", "-") + let deriveIdFromFileName (fileName: string) = + let themeName = fileName.Split(".").[0].ToLowerInvariant().Replace(" ", "-") if themeName.EndsWith "-theme" then - if Regex.IsMatch (themeName, """^[a-z0-9\-]+$""") then - Ok (ThemeId (themeName.Substring (0, themeName.Length - 6))) + if Regex.IsMatch(themeName, """^[a-z0-9\-]+$""") then + Ok(ThemeId(themeName[..themeName.Length - 7])) else Error $"Theme ID {fileName} is invalid" else Error "Theme .zip file name must end in \"-theme.zip\"" /// Load a theme from the given stream, which should contain a ZIP archive - let loadFromZip themeId file (data : IData) = backgroundTask { + let loadFromZip themeId file (data: IData) = backgroundTask { let! isNew, theme = backgroundTask { match! data.Theme.FindById themeId with | Some t -> return false, t - | None -> return true, { Theme.empty with Id = themeId } + | None -> return true, { Theme.Empty with Id = themeId } } - use zip = new ZipArchive (file, ZipArchiveMode.Read) + use zip = new ZipArchive(file, ZipArchiveMode.Read) let! theme = updateNameAndVersion theme zip if not isNew then do! data.ThemeAsset.DeleteByTheme theme.Id let! theme = updateTemplates { theme with Templates = [] } zip @@ -381,37 +387,35 @@ module Theme = if ctx.Request.HasFormContentType && ctx.Request.Form.Files.Count > 0 then let themeFile = Seq.head ctx.Request.Form.Files match deriveIdFromFileName themeFile.FileName with - | Ok themeId when themeId <> adminTheme -> + | Ok themeId when themeId <> ThemeId "admin" -> let data = ctx.Data let! exists = data.Theme.Exists themeId let isNew = not exists - let! model = ctx.BindFormAsync () + let! model = ctx.BindFormAsync() if isNew || model.DoOverwrite then // Load the theme to the database - use stream = new MemoryStream () + use stream = new MemoryStream() do! themeFile.CopyToAsync stream let! _ = loadFromZip themeId stream data do! ThemeAssetCache.refreshTheme themeId data TemplateCache.invalidateTheme themeId // Save the .zip file - use file = new FileStream ($"{ThemeId.toString themeId}-theme.zip", FileMode.Create) + use file = new FileStream($"./themes/{themeId}-theme.zip", FileMode.Create) do! themeFile.CopyToAsync file do! addMessage ctx - { UserMessage.success with - Message = $"""Theme {if isNew then "add" else "updat"}ed successfully""" - } + { UserMessage.Success with + Message = $"""Theme {if isNew then "add" else "updat"}ed successfully""" } return! toAdminDashboard next ctx else do! addMessage ctx - { UserMessage.error with - Message = "Theme exists and overwriting was not requested; nothing saved" - } + { UserMessage.Error with + Message = "Theme exists and overwriting was not requested; nothing saved" } return! toAdminDashboard next ctx | Ok _ -> - do! addMessage ctx { UserMessage.error with Message = "You may not replace the admin theme" } + do! addMessage ctx { UserMessage.Error with Message = "You may not replace the admin theme" } return! toAdminDashboard next ctx | Error message -> - do! addMessage ctx { UserMessage.error with Message = message } + do! addMessage ctx { UserMessage.Error with Message = message } return! toAdminDashboard next ctx else return! RequestErrors.BAD_REQUEST "Bad request" next ctx } @@ -421,87 +425,53 @@ module Theme = let data = ctx.Data match themeId with | "admin" | "default" -> - do! addMessage ctx { UserMessage.error with Message = $"You may not delete the {themeId} theme" } + do! addMessage ctx { UserMessage.Error with Message = $"You may not delete the {themeId} theme" } return! all next ctx | it when WebLogCache.isThemeInUse (ThemeId it) -> do! addMessage ctx - { UserMessage.error with - Message = $"You may not delete the {themeId} theme, as it is currently in use" - } + { UserMessage.Error with + Message = $"You may not delete the {themeId} theme, as it is currently in use" } return! all next ctx | _ -> match! data.Theme.Delete (ThemeId themeId) with | true -> - let zippedTheme = $"{themeId}-theme.zip" + let zippedTheme = $"./themes/{themeId}-theme.zip" if File.Exists zippedTheme then File.Delete zippedTheme - do! addMessage ctx { UserMessage.success with Message = $"Theme ID {themeId} deleted successfully" } + do! addMessage ctx { UserMessage.Success with Message = $"Theme ID {themeId} deleted successfully" } return! all next ctx | false -> return! Error.notFound next ctx } -/// ~~ WEB LOG SETTINGS ~~ +/// ~~~ WEB LOG SETTINGS ~~~ module WebLog = - open System.Collections.Generic open System.IO // GET /admin/settings - let settings : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task { - let data = ctx.Data - match! TemplateCache.get adminTheme "user-list-body" data with - | Ok userTemplate -> - match! TemplateCache.get adminTheme "tag-mapping-list-body" ctx.Data with - | Ok tagMapTemplate -> - let! allPages = data.Page.All ctx.WebLog.Id - let! themes = data.Theme.All () - let! users = data.WebLogUser.FindByWebLog ctx.WebLog.Id - let! hash = - hashForPage "Web Log Settings" - |> withAntiCsrf ctx - |> addToHash ViewContext.Model (SettingsModel.fromWebLog ctx.WebLog) - |> addToHash "pages" ( - seq { - KeyValuePair.Create ("posts", "- First Page of Posts -") - yield! allPages - |> List.sortBy (fun p -> p.Title.ToLower ()) - |> List.map (fun p -> KeyValuePair.Create (PageId.toString p.Id, p.Title)) - } - |> Array.ofSeq) - |> addToHash "themes" ( - themes - |> Seq.ofList - |> Seq.map (fun it -> - KeyValuePair.Create (ThemeId.toString it.Id, $"{it.Name} (v{it.Version})")) - |> Array.ofSeq) - |> addToHash "upload_values" [| - KeyValuePair.Create (UploadDestination.toString Database, "Database") - KeyValuePair.Create (UploadDestination.toString Disk, "Disk") - |] - |> addToHash "users" (users |> List.map (DisplayUser.fromUser ctx.WebLog) |> Array.ofList) - |> addToHash "rss_model" (EditRssModel.fromRssOptions ctx.WebLog.Rss) - |> addToHash "custom_feeds" ( - ctx.WebLog.Rss.CustomFeeds - |> List.map (DisplayCustomFeed.fromFeed (CategoryCache.get ctx)) - |> Array.ofList) - |> addViewContext ctx - let! hash' = TagMapping.withTagMappings ctx hash - return! - addToHash "user_list" (userTemplate.Render hash') hash' - |> addToHash "tag_mapping_list" (tagMapTemplate.Render hash') - |> adminView "settings" next ctx - | Error message -> return! Error.server message next ctx - | Error message -> return! Error.server message next ctx + let settings : HttpHandler = fun next ctx -> task { + let data = ctx.Data + let! allPages = data.Page.All ctx.WebLog.Id + let pages = + allPages + |> List.sortBy _.Title.ToLower() + |> List.append [ { Page.Empty with Id = PageId "posts"; Title = "- First Page of Posts -" } ] + let! themes = data.Theme.All() + let uploads = [ Database; Disk ] + return! + Views.WebLog.webLogSettings + (SettingsModel.FromWebLog ctx.WebLog) themes pages uploads (EditRssModel.FromRssOptions ctx.WebLog.Rss) + |> adminPage "Web Log Settings" true next ctx } // POST /admin/settings - let saveSettings : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task { + let saveSettings : HttpHandler = fun next ctx -> task { let data = ctx.Data - let! model = ctx.BindFormAsync () + let! model = ctx.BindFormAsync() match! data.WebLog.FindById ctx.WebLog.Id with | Some webLog -> let oldSlug = webLog.Slug - let webLog = model.update webLog + let webLog = model.Update webLog do! data.WebLog.UpdateSettings webLog // Update cache @@ -509,11 +479,11 @@ module WebLog = if oldSlug <> webLog.Slug then // Rename disk directory if it exists - let uploadRoot = Path.Combine ("wwwroot", "upload") - let oldDir = Path.Combine (uploadRoot, oldSlug) - if Directory.Exists oldDir then Directory.Move (oldDir, Path.Combine (uploadRoot, webLog.Slug)) + let uploadRoot = Path.Combine("wwwroot", "upload") + let oldDir = Path.Combine(uploadRoot, oldSlug) + if Directory.Exists oldDir then Directory.Move(oldDir, Path.Combine(uploadRoot, webLog.Slug)) - do! addMessage ctx { UserMessage.success with Message = "Web log settings saved successfully" } + do! addMessage ctx { UserMessage.Success with Message = "Web log settings saved successfully" } return! redirectToGet "admin/settings" next ctx | None -> return! Error.notFound next ctx } diff --git a/src/MyWebLog/Handlers/Feed.fs b/src/MyWebLog/Handlers/Feed.fs index 7db1dd9..25f055f 100644 --- a/src/MyWebLog/Handlers/Feed.fs +++ b/src/MyWebLog/Handlers/Feed.fs @@ -2,7 +2,6 @@ module MyWebLog.Handlers.Feed open System -open System.Collections.Generic open System.IO open System.Net open System.ServiceModel.Syndication @@ -23,7 +22,7 @@ type FeedType = | Custom of CustomFeed * string /// Derive the type of RSS feed requested -let deriveFeedType (ctx : HttpContext) feedPath : (FeedType * int) option = +let deriveFeedType (ctx: HttpContext) feedPath : (FeedType * int) option = let webLog = ctx.WebLog let debug = debug "Feed" ctx let name = $"/{webLog.Rss.FeedName}" @@ -33,23 +32,22 @@ let deriveFeedType (ctx : HttpContext) feedPath : (FeedType * int) option = match webLog.Rss.IsFeedEnabled && feedPath = name with | true -> debug (fun () -> "Found standard feed") - Some (StandardFeed feedPath, postCount) + Some(StandardFeed feedPath, postCount) | false -> // Category and tag feeds are handled by defined routes; check for custom feed match webLog.Rss.CustomFeeds - |> List.tryFind (fun it -> feedPath.EndsWith (Permalink.toString it.Path)) with + |> List.tryFind (fun it -> feedPath.EndsWith(string it.Path)) with | Some feed -> debug (fun () -> "Found custom feed") - Some (Custom (feed, feedPath), - feed.Podcast |> Option.map (fun p -> p.ItemsInFeed) |> Option.defaultValue postCount) + Some(Custom(feed, feedPath), feed.Podcast |> Option.map _.ItemsInFeed |> Option.defaultValue postCount) | None -> - debug (fun () -> $"No matching feed found") + debug (fun () -> "No matching feed found") None /// Determine the function to retrieve posts for the given feed let private getFeedPosts ctx feedType = - let childIds catId = - let cat = CategoryCache.get ctx |> Array.find (fun c -> c.Id = CategoryId.toString catId) + let childIds (catId: CategoryId) = + let cat = CategoryCache.get ctx |> Array.find (fun c -> c.Id = string catId) getCategoryIds cat.Slug ctx let data = ctx.Data match feedType with @@ -62,7 +60,7 @@ let private getFeedPosts ctx feedType = | Tag tag -> data.Post.FindPageOfTaggedPosts ctx.WebLog.Id tag 1 /// Strip HTML from a string -let private stripHtml text = WebUtility.HtmlDecode <| Regex.Replace (text, "<(.|\n)*?>", "") +let private stripHtml text = WebUtility.HtmlDecode <| Regex.Replace(text, "<(.|\n)*?>", "") /// XML namespaces for building RSS feeds [] @@ -87,108 +85,113 @@ module private Namespace = let rawVoice = "http://www.rawvoice.com/rawvoiceRssModule/" /// Create a feed item from the given post -let private toFeedItem webLog (authors : MetaItem list) (cats : DisplayCategory[]) (tagMaps : TagMap list) - (post : Post) = +let private toFeedItem (webLog: WebLog) (authors: MetaItem list) (cats: DisplayCategory array) (tagMaps: TagMap list) + (post: Post) = let plainText = let endingP = post.Text.IndexOf "

    " stripHtml <| if endingP >= 0 then post.Text[..(endingP - 1)] else post.Text - let item = SyndicationItem ( - Id = WebLog.absoluteUrl webLog post.Permalink, + let item = SyndicationItem( + Id = webLog.AbsoluteUrl post.Permalink, Title = TextSyndicationContent.CreateHtmlContent post.Title, - PublishDate = post.PublishedOn.Value.ToDateTimeOffset (), - LastUpdatedTime = post.UpdatedOn.ToDateTimeOffset (), + PublishDate = post.PublishedOn.Value.ToDateTimeOffset(), + LastUpdatedTime = post.UpdatedOn.ToDateTimeOffset(), Content = TextSyndicationContent.CreatePlaintextContent plainText) item.AddPermalink (Uri item.Id) - let xmlDoc = XmlDocument () + let xmlDoc = XmlDocument() let encoded = let txt = post.Text .Replace("src=\"/", $"src=\"{webLog.UrlBase}/") - .Replace ("href=\"/", $"href=\"{webLog.UrlBase}/") - let it = xmlDoc.CreateElement ("content", "encoded", Namespace.content) - let _ = it.AppendChild (xmlDoc.CreateCDataSection txt) + .Replace("href=\"/", $"href=\"{webLog.UrlBase}/") + let it = xmlDoc.CreateElement("content", "encoded", Namespace.content) + let _ = it.AppendChild(xmlDoc.CreateCDataSection txt) it item.ElementExtensions.Add encoded - item.Authors.Add (SyndicationPerson ( - Name = (authors |> List.find (fun a -> a.Name = WebLogUserId.toString post.AuthorId)).Value)) + item.Authors.Add(SyndicationPerson(Name = (authors |> List.find (fun a -> a.Name = string post.AuthorId)).Value)) [ post.CategoryIds |> List.map (fun catId -> - let cat = cats |> Array.find (fun c -> c.Id = CategoryId.toString catId) - SyndicationCategory (cat.Name, WebLog.absoluteUrl webLog (Permalink $"category/{cat.Slug}/"), cat.Name)) + let cat = cats |> Array.find (fun c -> c.Id = string catId) + SyndicationCategory(cat.Name, webLog.AbsoluteUrl(Permalink $"category/{cat.Slug}/"), cat.Name)) post.Tags |> List.map (fun tag -> let urlTag = match tagMaps |> List.tryFind (fun tm -> tm.Tag = tag) with | Some tm -> tm.UrlValue | None -> tag.Replace (" ", "+") - SyndicationCategory (tag, WebLog.absoluteUrl webLog (Permalink $"tag/{urlTag}/"), $"{tag} (tag)")) + SyndicationCategory(tag, webLog.AbsoluteUrl(Permalink $"tag/{urlTag}/"), $"{tag} (tag)")) ] |> List.concat |> List.iter item.Categories.Add item /// Convert non-absolute URLs to an absolute URL for this web log -let toAbsolute webLog (link : string) = - if link.StartsWith "http" then link else WebLog.absoluteUrl webLog (Permalink link) +let toAbsolute (webLog: WebLog) (link: string) = + if link.StartsWith "http" then link else webLog.AbsoluteUrl(Permalink link) /// Add episode information to a podcast feed item -let private addEpisode webLog (podcast : PodcastOptions) (episode : Episode) (post : Post) (item : SyndicationItem) = +let private addEpisode (webLog: WebLog) (podcast: PodcastOptions) (episode: Episode) (post: Post) + (item: SyndicationItem) = let epMediaUrl = match episode.Media with | link when link.StartsWith "http" -> link | link when Option.isSome podcast.MediaBaseUrl -> $"{podcast.MediaBaseUrl.Value}{link}" - | link -> WebLog.absoluteUrl webLog (Permalink link) + | link -> webLog.AbsoluteUrl(Permalink link) let epMediaType = [ episode.MediaType; podcast.DefaultMediaType ] |> List.tryFind Option.isSome |> Option.flatten - let epImageUrl = defaultArg episode.ImageUrl (Permalink.toString podcast.ImageUrl) |> toAbsolute webLog - let epExplicit = defaultArg episode.Explicit podcast.Explicit |> ExplicitRating.toString + let epImageUrl = defaultArg episode.ImageUrl (string podcast.ImageUrl) |> toAbsolute webLog + let epExplicit = string (defaultArg episode.Explicit podcast.Explicit) - let xmlDoc = XmlDocument () + let xmlDoc = XmlDocument() let enclosure = let it = xmlDoc.CreateElement "enclosure" - it.SetAttribute ("url", epMediaUrl) - it.SetAttribute ("length", string episode.Length) - epMediaType |> Option.iter (fun typ -> it.SetAttribute ("type", typ)) + it.SetAttribute("url", epMediaUrl) + it.SetAttribute("length", string episode.Length) + epMediaType |> Option.iter (fun typ -> it.SetAttribute("type", typ)) it let image = - let it = xmlDoc.CreateElement ("itunes", "image", Namespace.iTunes) - it.SetAttribute ("href", epImageUrl) + let it = xmlDoc.CreateElement("itunes", "image", Namespace.iTunes) + it.SetAttribute("href", epImageUrl) it item.ElementExtensions.Add enclosure item.ElementExtensions.Add image - item.ElementExtensions.Add ("creator", Namespace.dc, podcast.DisplayedAuthor) - item.ElementExtensions.Add ("author", Namespace.iTunes, podcast.DisplayedAuthor) - item.ElementExtensions.Add ("explicit", Namespace.iTunes, epExplicit) - episode.Subtitle |> Option.iter (fun it -> item.ElementExtensions.Add ("subtitle", Namespace.iTunes, it)) - Episode.formatDuration episode - |> Option.iter (fun it -> item.ElementExtensions.Add ("duration", Namespace.iTunes, it)) + item.ElementExtensions.Add("creator", Namespace.dc, podcast.DisplayedAuthor) + item.ElementExtensions.Add("author", Namespace.iTunes, podcast.DisplayedAuthor) + item.ElementExtensions.Add("explicit", Namespace.iTunes, epExplicit) + episode.Subtitle |> Option.iter (fun it -> item.ElementExtensions.Add("subtitle", Namespace.iTunes, it)) + episode.FormatDuration() |> Option.iter (fun it -> item.ElementExtensions.Add("duration", Namespace.iTunes, it)) - match episode.ChapterFile with - | Some chapters -> - let url = toAbsolute webLog chapters - let typ = - match episode.ChapterType with - | Some mime -> Some mime - | None when chapters.EndsWith ".json" -> Some "application/json+chapters" - | None -> None - let elt = xmlDoc.CreateElement ("podcast", "chapters", Namespace.podcast) - elt.SetAttribute ("url", url) - typ |> Option.iter (fun it -> elt.SetAttribute ("type", it)) + let chapterUrl, chapterMimeType = + match episode.Chapters, episode.ChapterFile with + | Some _, _ -> + Some $"{webLog.AbsoluteUrl post.Permalink}?chapters", Some JSON_CHAPTERS + | None, Some chapters -> + let typ = + match episode.ChapterType with + | Some mime -> Some mime + | None when chapters.EndsWith ".json" -> Some JSON_CHAPTERS + | None -> None + Some (toAbsolute webLog chapters), typ + | None, None -> None, None + + match chapterUrl with + | Some url -> + let elt = xmlDoc.CreateElement("podcast", "chapters", Namespace.podcast) + elt.SetAttribute("url", url) + chapterMimeType |> Option.iter (fun it -> elt.SetAttribute("type", it)) item.ElementExtensions.Add elt | None -> () match episode.TranscriptUrl with | Some transcript -> let url = toAbsolute webLog transcript - let elt = xmlDoc.CreateElement ("podcast", "transcript", Namespace.podcast) - elt.SetAttribute ("url", url) - elt.SetAttribute ("type", Option.get episode.TranscriptType) - episode.TranscriptLang |> Option.iter (fun it -> elt.SetAttribute ("language", it)) - if defaultArg episode.TranscriptCaptions false then - elt.SetAttribute ("rel", "captions") + let elt = xmlDoc.CreateElement("podcast", "transcript", Namespace.podcast) + elt.SetAttribute("url", url) + elt.SetAttribute("type", Option.get episode.TranscriptType) + episode.TranscriptLang |> Option.iter (fun it -> elt.SetAttribute("language", it)) + if defaultArg episode.TranscriptCaptions false then elt.SetAttribute("rel", "captions") item.ElementExtensions.Add elt | None -> () @@ -196,38 +199,37 @@ let private addEpisode webLog (podcast : PodcastOptions) (episode : Episode) (po | Some season -> match episode.SeasonDescription with | Some desc -> - let elt = xmlDoc.CreateElement ("podcast", "season", Namespace.podcast) - elt.SetAttribute ("name", desc) + let elt = xmlDoc.CreateElement("podcast", "season", Namespace.podcast) + elt.SetAttribute("name", desc) elt.InnerText <- string season item.ElementExtensions.Add elt - | None -> item.ElementExtensions.Add ("season", Namespace.podcast, string season) + | None -> item.ElementExtensions.Add("season", Namespace.podcast, string season) | None -> () match episode.EpisodeNumber with | Some epNumber -> match episode.EpisodeDescription with | Some desc -> - let elt = xmlDoc.CreateElement ("podcast", "episode", Namespace.podcast) - elt.SetAttribute ("name", desc) + let elt = xmlDoc.CreateElement("podcast", "episode", Namespace.podcast) + elt.SetAttribute("name", desc) elt.InnerText <- string epNumber item.ElementExtensions.Add elt - | None -> item.ElementExtensions.Add ("episode", Namespace.podcast, string epNumber) + | None -> item.ElementExtensions.Add("episode", Namespace.podcast, string epNumber) | None -> () if post.Metadata |> List.exists (fun it -> it.Name = "chapter") then try - let chapters = xmlDoc.CreateElement ("psc", "chapters", Namespace.psc) - chapters.SetAttribute ("version", "1.2") + let chapters = xmlDoc.CreateElement("psc", "chapters", Namespace.psc) + chapters.SetAttribute("version", "1.2") post.Metadata |> List.filter (fun it -> it.Name = "chapter") - |> List.map (fun it -> - TimeSpan.Parse (it.Value.Split(" ")[0]), it.Value.Substring (it.Value.IndexOf(" ") + 1)) + |> List.map (fun it -> TimeSpan.Parse(it.Value.Split(" ")[0]), it.Value[it.Value.IndexOf(" ") + 1..]) |> List.sortBy fst |> List.iter (fun chap -> - let chapter = xmlDoc.CreateElement ("psc", "chapter", Namespace.psc) - chapter.SetAttribute ("start", (fst chap).ToString "hh:mm:ss") - chapter.SetAttribute ("title", snd chap) + let chapter = xmlDoc.CreateElement("psc", "chapter", Namespace.psc) + chapter.SetAttribute("start", (fst chap).ToString "hh:mm:ss") + chapter.SetAttribute("title", snd chap) chapters.AppendChild chapter |> ignore) item.ElementExtensions.Add chapters @@ -235,26 +237,26 @@ let private addEpisode webLog (podcast : PodcastOptions) (episode : Episode) (po item /// Add a namespace to the feed -let private addNamespace (feed : SyndicationFeed) alias nsUrl = - feed.AttributeExtensions.Add (XmlQualifiedName (alias, "http://www.w3.org/2000/xmlns/"), nsUrl) +let private addNamespace (feed: SyndicationFeed) alias nsUrl = + feed.AttributeExtensions.Add(XmlQualifiedName(alias, "http://www.w3.org/2000/xmlns/"), nsUrl) /// Add items to the top of the feed required for podcasts -let private addPodcast webLog (rssFeed : SyndicationFeed) (feed : CustomFeed) = - let addChild (doc : XmlDocument) ns prefix name value (elt : XmlElement) = +let private addPodcast (webLog: WebLog) (rssFeed: SyndicationFeed) (feed: CustomFeed) = + let addChild (doc: XmlDocument) ns prefix name value (elt: XmlElement) = let child = - if ns = "" then doc.CreateElement name else doc.CreateElement (prefix, name, ns) + if ns = "" then doc.CreateElement name else doc.CreateElement(prefix, name, ns) |> elt.AppendChild child.InnerText <- value elt let podcast = Option.get feed.Podcast - let feedUrl = WebLog.absoluteUrl webLog feed.Path + let feedUrl = webLog.AbsoluteUrl feed.Path let imageUrl = match podcast.ImageUrl with | Permalink link when link.StartsWith "http" -> link - | Permalink _ -> WebLog.absoluteUrl webLog podcast.ImageUrl + | Permalink _ -> webLog.AbsoluteUrl podcast.ImageUrl - let xmlDoc = XmlDocument () + let xmlDoc = XmlDocument() [ "dc", Namespace.dc "itunes", Namespace.iTunes @@ -265,12 +267,12 @@ let private addPodcast webLog (rssFeed : SyndicationFeed) (feed : CustomFeed) = |> List.iter (fun (alias, nsUrl) -> addNamespace rssFeed alias nsUrl) let categorization = - let it = xmlDoc.CreateElement ("itunes", "category", Namespace.iTunes) - it.SetAttribute ("text", podcast.AppleCategory) + let it = xmlDoc.CreateElement("itunes", "category", Namespace.iTunes) + it.SetAttribute("text", podcast.AppleCategory) podcast.AppleSubcategory |> Option.iter (fun subCat -> - let subCatElt = xmlDoc.CreateElement ("itunes", "category", Namespace.iTunes) - subCatElt.SetAttribute ("text", subCat) + let subCatElt = xmlDoc.CreateElement("itunes", "category", Namespace.iTunes) + subCatElt.SetAttribute("text", subCat) it.AppendChild subCatElt |> ignore) it let image = @@ -280,19 +282,19 @@ let private addPodcast webLog (rssFeed : SyndicationFeed) (feed : CustomFeed) = ] |> List.fold (fun elt (name, value) -> addChild xmlDoc "" "" name value elt) (xmlDoc.CreateElement "image") let iTunesImage = - let it = xmlDoc.CreateElement ("itunes", "image", Namespace.iTunes) - it.SetAttribute ("href", imageUrl) + let it = xmlDoc.CreateElement("itunes", "image", Namespace.iTunes) + it.SetAttribute("href", imageUrl) it let owner = [ "name", podcast.DisplayedAuthor "email", podcast.Email ] |> List.fold (fun elt (name, value) -> addChild xmlDoc Namespace.iTunes "itunes" name value elt) - (xmlDoc.CreateElement ("itunes", "owner", Namespace.iTunes)) + (xmlDoc.CreateElement("itunes", "owner", Namespace.iTunes)) let rawVoice = - let it = xmlDoc.CreateElement ("rawvoice", "subscribe", Namespace.rawVoice) - it.SetAttribute ("feed", feedUrl) - it.SetAttribute ("itunes", "") + let it = xmlDoc.CreateElement("rawvoice", "subscribe", Namespace.rawVoice) + it.SetAttribute("feed", feedUrl) + it.SetAttribute("itunes", "") it rssFeed.ElementExtensions.Add image @@ -300,25 +302,24 @@ let private addPodcast webLog (rssFeed : SyndicationFeed) (feed : CustomFeed) = rssFeed.ElementExtensions.Add categorization rssFeed.ElementExtensions.Add iTunesImage rssFeed.ElementExtensions.Add rawVoice - rssFeed.ElementExtensions.Add ("summary", Namespace.iTunes, podcast.Summary) - rssFeed.ElementExtensions.Add ("author", Namespace.iTunes, podcast.DisplayedAuthor) - rssFeed.ElementExtensions.Add ("explicit", Namespace.iTunes, ExplicitRating.toString podcast.Explicit) - podcast.Subtitle |> Option.iter (fun sub -> rssFeed.ElementExtensions.Add ("subtitle", Namespace.iTunes, sub)) + rssFeed.ElementExtensions.Add("summary", Namespace.iTunes, podcast.Summary) + rssFeed.ElementExtensions.Add("author", Namespace.iTunes, podcast.DisplayedAuthor) + rssFeed.ElementExtensions.Add("explicit", Namespace.iTunes, string podcast.Explicit) + podcast.Subtitle |> Option.iter (fun sub -> rssFeed.ElementExtensions.Add("subtitle", Namespace.iTunes, sub)) podcast.FundingUrl |> Option.iter (fun url -> - let funding = xmlDoc.CreateElement ("podcast", "funding", Namespace.podcast) - funding.SetAttribute ("url", toAbsolute webLog url) + let funding = xmlDoc.CreateElement("podcast", "funding", Namespace.podcast) + funding.SetAttribute("url", toAbsolute webLog url) funding.InnerText <- defaultArg podcast.FundingText "Support This Podcast" rssFeed.ElementExtensions.Add funding) podcast.PodcastGuid |> Option.iter (fun guid -> - rssFeed.ElementExtensions.Add ("guid", Namespace.podcast, guid.ToString().ToLowerInvariant ())) - podcast.Medium - |> Option.iter (fun med -> rssFeed.ElementExtensions.Add ("medium", Namespace.podcast, PodcastMedium.toString med)) + rssFeed.ElementExtensions.Add("guid", Namespace.podcast, guid.ToString().ToLowerInvariant())) + podcast.Medium |> Option.iter (fun med -> rssFeed.ElementExtensions.Add("medium", Namespace.podcast, string med)) /// Get the feed's self reference and non-feed link let private selfAndLink webLog feedType ctx = - let withoutFeed (it : string) = Permalink (it.Replace ($"/{webLog.Rss.FeedName}", "")) + let withoutFeed (it: string) = Permalink(it.Replace($"/{webLog.Rss.FeedName}", "")) match feedType with | StandardFeed path | CategoryFeed (_, path) @@ -330,8 +331,8 @@ let private selfAndLink webLog feedType ctx = | Tag tag -> feed.Path, Permalink $"""tag/{tag.Replace(" ", "+")}/""" /// Set the title and description of the feed based on its source -let private setTitleAndDescription feedType (webLog : WebLog) (cats : DisplayCategory[]) (feed : SyndicationFeed) = - let cleanText opt def = TextSyndicationContent (stripHtml (defaultArg opt def)) +let private setTitleAndDescription feedType (webLog: WebLog) (cats: DisplayCategory[]) (feed: SyndicationFeed) = + let cleanText opt def = TextSyndicationContent(stripHtml (defaultArg opt def)) match feedType with | StandardFeed _ -> feed.Title <- cleanText None webLog.Name @@ -359,7 +360,7 @@ let private setTitleAndDescription feedType (webLog : WebLog) (cats : DisplayCat feed.Description <- cleanText None $"""Posts with the "{tag}" tag""" /// Create a feed with a known non-zero-length list of posts -let createFeed (feedType : FeedType) posts : HttpHandler = fun next ctx -> backgroundTask { +let createFeed (feedType: FeedType) posts : HttpHandler = fun next ctx -> backgroundTask { let webLog = ctx.WebLog let data = ctx.Data let! authors = getAuthors webLog posts data @@ -373,40 +374,40 @@ let createFeed (feedType : FeedType) posts : HttpHandler = fun next ctx -> backg match podcast, post.Episode with | Some feed, Some episode -> addEpisode webLog (Option.get feed.Podcast) episode post item | Some _, _ -> - warn "Feed" ctx $"[{webLog.Name} {Permalink.toString self}] \"{stripHtml post.Title}\" has no media" + warn "Feed" ctx $"[{webLog.Name} {self}] \"{stripHtml post.Title}\" has no media" item | _ -> item - let feed = SyndicationFeed () + let feed = SyndicationFeed() addNamespace feed "content" Namespace.content setTitleAndDescription feedType webLog cats feed - feed.LastUpdatedTime <- (List.head posts).UpdatedOn.ToDateTimeOffset () + feed.LastUpdatedTime <- (List.head posts).UpdatedOn.ToDateTimeOffset() feed.Generator <- ctx.Generator feed.Items <- posts |> Seq.ofList |> Seq.map toItem feed.Language <- "en" - feed.Id <- WebLog.absoluteUrl webLog link + feed.Id <- webLog.AbsoluteUrl link webLog.Rss.Copyright |> Option.iter (fun copy -> feed.Copyright <- TextSyndicationContent copy) - feed.Links.Add (SyndicationLink (Uri (WebLog.absoluteUrl webLog self), "self", "", "application/rss+xml", 0L)) - feed.ElementExtensions.Add ("link", "", WebLog.absoluteUrl webLog link) + feed.Links.Add(SyndicationLink(Uri(webLog.AbsoluteUrl self), "self", "", "application/rss+xml", 0L)) + feed.ElementExtensions.Add("link", "", webLog.AbsoluteUrl link) podcast |> Option.iter (addPodcast webLog feed) - use mem = new MemoryStream () + use mem = new MemoryStream() use xml = XmlWriter.Create mem feed.SaveAsRss20 xml - xml.Close () + xml.Close() - let _ = mem.Seek (0L, SeekOrigin.Begin) + let _ = mem.Seek(0L, SeekOrigin.Begin) let rdr = new StreamReader(mem) - let! output = rdr.ReadToEndAsync () + let! output = rdr.ReadToEndAsync() return! (setHttpHeader "Content-Type" "text/xml" >=> setStatusCode 200 >=> setBodyFromString output) next ctx } // GET {any-prescribed-feed} -let generate (feedType : FeedType) postCount : HttpHandler = fun next ctx -> backgroundTask { +let generate (feedType: FeedType) postCount : HttpHandler = fun next ctx -> backgroundTask { match! getFeedPosts ctx feedType postCount with | posts when List.length posts > 0 -> return! createFeed feedType posts next ctx | _ -> return! Error.notFound next ctx @@ -417,13 +418,13 @@ let generate (feedType : FeedType) postCount : HttpHandler = fun next ctx -> bac // POST /admin/settings/rss let saveSettings : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task { let data = ctx.Data - let! model = ctx.BindFormAsync () + let! model = ctx.BindFormAsync() match! data.WebLog.FindById ctx.WebLog.Id with | Some webLog -> let webLog = { webLog with Rss = model.UpdateOptions webLog.Rss } do! data.WebLog.UpdateRssOptions webLog WebLogCache.set webLog - do! addMessage ctx { UserMessage.success with Message = "RSS settings updated successfully" } + do! addMessage ctx { UserMessage.Success with Message = "RSS settings updated successfully" } return! redirectToGet "admin/settings#rss-settings" next ctx | None -> return! Error.notFound next ctx } @@ -432,24 +433,27 @@ let saveSettings : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> t let editCustomFeed feedId : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> let customFeed = match feedId with - | "new" -> Some { CustomFeed.empty with Id = CustomFeedId "new" } + | "new" -> Some { CustomFeed.Empty with Id = CustomFeedId "new" } | _ -> ctx.WebLog.Rss.CustomFeeds |> List.tryFind (fun f -> f.Id = CustomFeedId feedId) match customFeed with | Some f -> - hashForPage $"""{if feedId = "new" then "Add" else "Edit"} Custom RSS Feed""" - |> withAntiCsrf ctx - |> addToHash ViewContext.Model (EditCustomFeedModel.fromFeed f) - |> addToHash "medium_values" [| - KeyValuePair.Create ("", "– Unspecified –") - KeyValuePair.Create (PodcastMedium.toString Podcast, "Podcast") - KeyValuePair.Create (PodcastMedium.toString Music, "Music") - KeyValuePair.Create (PodcastMedium.toString Video, "Video") - KeyValuePair.Create (PodcastMedium.toString Film, "Film") - KeyValuePair.Create (PodcastMedium.toString Audiobook, "Audiobook") - KeyValuePair.Create (PodcastMedium.toString Newsletter, "Newsletter") - KeyValuePair.Create (PodcastMedium.toString Blog, "Blog") - |] - |> adminView "custom-feed-edit" next ctx + let ratings = [ + { Name = string Yes; Value = "Yes" } + { Name = string No; Value = "No" } + { Name = string Clean; Value = "Clean" } + ] + let mediums = [ + { Name = ""; Value = "– Unspecified –" } + { Name = string Podcast; Value = "Podcast" } + { Name = string Music; Value = "Music" } + { Name = string Video; Value = "Video" } + { Name = string Film; Value = "Film" } + { Name = string Audiobook; Value = "Audiobook" } + { Name = string Newsletter; Value = "Newsletter" } + { Name = string Blog; Value = "Blog" } + ] + Views.WebLog.feedEdit (EditCustomFeedModel.FromFeed f) ratings mediums + |> adminPage $"""{if feedId = "new" then "Add" else "Edit"} Custom RSS Feed""" true next ctx | None -> Error.notFound next ctx // POST /admin/settings/rss/save @@ -457,45 +461,42 @@ let saveCustomFeed : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> let data = ctx.Data match! data.WebLog.FindById ctx.WebLog.Id with | Some webLog -> - let! model = ctx.BindFormAsync () + let! model = ctx.BindFormAsync() let theFeed = match model.Id with - | "new" -> Some { CustomFeed.empty with Id = CustomFeedId.create () } - | _ -> webLog.Rss.CustomFeeds |> List.tryFind (fun it -> CustomFeedId.toString it.Id = model.Id) + | "new" -> Some { CustomFeed.Empty with Id = CustomFeedId.Create() } + | _ -> webLog.Rss.CustomFeeds |> List.tryFind (fun it -> string it.Id = model.Id) match theFeed with | Some feed -> let feeds = model.UpdateFeed feed :: (webLog.Rss.CustomFeeds |> List.filter (fun it -> it.Id <> feed.Id)) - let webLog = { webLog with Rss = { webLog.Rss with CustomFeeds = feeds } } + let webLog = { webLog with Rss.CustomFeeds = feeds } do! data.WebLog.UpdateRssOptions webLog WebLogCache.set webLog - do! addMessage ctx { - UserMessage.success with - Message = $"""Successfully {if model.Id = "new" then "add" else "sav"}ed custom feed""" - } - return! redirectToGet $"admin/settings/rss/{CustomFeedId.toString feed.Id}/edit" next ctx + do! addMessage ctx + { UserMessage.Success with + Message = $"""Successfully {if model.Id = "new" then "add" else "sav"}ed custom feed""" } + return! redirectToGet $"admin/settings/rss/{feed.Id}/edit" next ctx | None -> return! Error.notFound next ctx | None -> return! Error.notFound next ctx } -// POST /admin/settings/rss/{id}/delete +// DELETE /admin/settings/rss/{id} let deleteCustomFeed feedId : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task { let data = ctx.Data match! data.WebLog.FindById ctx.WebLog.Id with | Some webLog -> let customId = CustomFeedId feedId if webLog.Rss.CustomFeeds |> List.exists (fun f -> f.Id = customId) then - let webLog = { - webLog with - Rss = { - webLog.Rss with - CustomFeeds = webLog.Rss.CustomFeeds |> List.filter (fun f -> f.Id <> customId) - } - } + let webLog = + { webLog with + Rss = + { webLog.Rss with + CustomFeeds = webLog.Rss.CustomFeeds |> List.filter (fun f -> f.Id <> customId) } } do! data.WebLog.UpdateRssOptions webLog WebLogCache.set webLog - do! addMessage ctx { UserMessage.success with Message = "Custom feed deleted successfully" } + do! addMessage ctx { UserMessage.Success with Message = "Custom feed deleted successfully" } else - do! addMessage ctx { UserMessage.warning with Message = "Custom feed not found; no action taken" } + do! addMessage ctx { UserMessage.Warning with Message = "Custom feed not found; no action taken" } return! redirectToGet "admin/settings#rss-settings" next ctx | None -> return! Error.notFound next ctx } diff --git a/src/MyWebLog/Handlers/Helpers.fs b/src/MyWebLog/Handlers/Helpers.fs index 2edefe8..1a26bea 100644 --- a/src/MyWebLog/Handlers/Helpers.fs +++ b/src/MyWebLog/Handlers/Helpers.fs @@ -3,13 +3,14 @@ module private MyWebLog.Handlers.Helpers open System.Text.Json open Microsoft.AspNetCore.Http +open MyWebLog.Views /// Session extensions to get and set objects type ISession with /// Set an item in the session - member this.Set<'T> (key, item : 'T) = - this.SetString (key, JsonSerializer.Serialize item) + member this.Set<'T>(key, item: 'T) = + this.SetString(key, JsonSerializer.Serialize item) /// Get an item from the session member this.TryGet<'T> key = @@ -25,6 +26,10 @@ module ViewContext = [] let AntiCsrfTokens = "csrf" + /// The unified application view context + [] + let AppViewContext = "app" + /// The categories for this web log [] let Categories = "categories" @@ -126,28 +131,28 @@ module ViewContext = let private sessionLoadedKey = "session-loaded" /// Load the session if it has not been loaded already; ensures async access but not excessive loading -let private loadSession (ctx : HttpContext) = task { +let private loadSession (ctx: HttpContext) = task { if not (ctx.Items.ContainsKey sessionLoadedKey) then - do! ctx.Session.LoadAsync () - ctx.Items.Add (sessionLoadedKey, "yes") + do! ctx.Session.LoadAsync() + ctx.Items.Add(sessionLoadedKey, "yes") } /// Ensure that the session is committed -let private commitSession (ctx : HttpContext) = task { - if ctx.Items.ContainsKey sessionLoadedKey then do! ctx.Session.CommitAsync () +let private commitSession (ctx: HttpContext) = task { + if ctx.Items.ContainsKey sessionLoadedKey then do! ctx.Session.CommitAsync() } open MyWebLog.ViewModels /// Add a message to the user's session -let addMessage (ctx : HttpContext) message = task { +let addMessage (ctx: HttpContext) message = task { do! loadSession ctx let msg = match ctx.Session.TryGet ViewContext.Messages with Some it -> it | None -> [] - ctx.Session.Set (ViewContext.Messages, message :: msg) + ctx.Session.Set(ViewContext.Messages, message :: msg) } /// Get any messages from the user's session, removing them in the process -let messages (ctx : HttpContext) = task { +let messages (ctx: HttpContext) = task { do! loadSession ctx match ctx.Session.TryGet ViewContext.Messages with | Some msg -> @@ -160,23 +165,19 @@ open MyWebLog open DotLiquid /// Shorthand for creating a DotLiquid hash from an anonymous object -let makeHash (values : obj) = +let makeHash (values: obj) = Hash.FromAnonymousObject values /// Create a hash with the page title filled -let hashForPage (title : string) = +let hashForPage (title: string) = makeHash {| page_title = title |} /// Add a key to the hash, returning the modified hash // (note that the hash itself is mutated; this is only used to make it pipeable) -let addToHash key (value : obj) (hash : Hash) = - if hash.ContainsKey key then hash[key] <- value else hash.Add (key, value) +let addToHash key (value: obj) (hash: Hash) = + if hash.ContainsKey key then hash[key] <- value else hash.Add(key, value) hash -/// Add anti-CSRF tokens to the given hash -let withAntiCsrf (ctx : HttpContext) = - addToHash ViewContext.AntiCsrfTokens ctx.CsrfTokenSet - open System.Security.Claims open Giraffe open Giraffe.Htmx @@ -185,40 +186,70 @@ open Giraffe.ViewEngine /// htmx script tag let private htmxScript = RenderView.AsString.htmlNode Htmx.Script.minified -/// Populate the DotLiquid hash with standard information -let addViewContext ctx (hash : Hash) = task { +/// Get the current user messages, and commit the session so that they are preserved +let private getCurrentMessages ctx = task { let! messages = messages ctx do! commitSession ctx - return - if hash.ContainsKey ViewContext.HtmxScript && hash.ContainsKey ViewContext.Messages then - // We have already populated everything; just update messages - hash[ViewContext.Messages] <- Array.concat [ hash[ViewContext.Messages] :?> UserMessage[]; messages ] + return messages +} + +/// Generate the view context for a response +let private generateViewContext pageTitle messages includeCsrf (ctx: HttpContext) = + { WebLog = ctx.WebLog + UserId = ctx.User.Claims + |> Seq.tryFind (fun claim -> claim.Type = ClaimTypes.NameIdentifier) + |> Option.map (fun claim -> WebLogUserId claim.Value) + PageTitle = pageTitle + Csrf = if includeCsrf then Some ctx.CsrfTokenSet else None + PageList = PageListCache.get ctx + Categories = CategoryCache.get ctx + CurrentPage = ctx.Request.Path.Value[1..] + Messages = messages + Generator = ctx.Generator + HtmxScript = htmxScript + IsAuthor = ctx.HasAccessLevel Author + IsEditor = ctx.HasAccessLevel Editor + IsWebLogAdmin = ctx.HasAccessLevel WebLogAdmin + IsAdministrator = ctx.HasAccessLevel Administrator } + + +/// Populate the DotLiquid hash with standard information +let addViewContext ctx (hash: Hash) = task { + let! messages = getCurrentMessages ctx + if hash.ContainsKey ViewContext.AppViewContext then + let oldApp = hash[ViewContext.AppViewContext] :?> AppViewContext + let newApp = { oldApp with Messages = Array.concat [ oldApp.Messages; messages ] } + return hash - else - ctx.User.Claims - |> Seq.tryFind (fun claim -> claim.Type = ClaimTypes.NameIdentifier) - |> Option.map (fun claim -> addToHash ViewContext.UserId claim.Value hash) - |> Option.defaultValue hash - |> addToHash ViewContext.WebLog ctx.WebLog - |> addToHash ViewContext.PageList (PageListCache.get ctx) - |> addToHash ViewContext.Categories (CategoryCache.get ctx) - |> addToHash ViewContext.CurrentPage ctx.Request.Path.Value[1..] - |> addToHash ViewContext.Messages messages - |> addToHash ViewContext.Generator ctx.Generator - |> addToHash ViewContext.HtmxScript htmxScript - |> addToHash ViewContext.IsLoggedOn ctx.User.Identity.IsAuthenticated - |> addToHash ViewContext.IsAuthor (ctx.HasAccessLevel Author) - |> addToHash ViewContext.IsEditor (ctx.HasAccessLevel Editor) - |> addToHash ViewContext.IsWebLogAdmin (ctx.HasAccessLevel WebLogAdmin) - |> addToHash ViewContext.IsAdministrator (ctx.HasAccessLevel Administrator) + |> addToHash ViewContext.AppViewContext newApp + |> addToHash ViewContext.Messages newApp.Messages + else + let app = + generateViewContext (string hash[ViewContext.PageTitle]) messages + (hash.ContainsKey ViewContext.AntiCsrfTokens) ctx + return + hash + |> addToHash ViewContext.UserId (app.UserId |> Option.map string |> Option.defaultValue "") + |> addToHash ViewContext.WebLog app.WebLog + |> addToHash ViewContext.PageList app.PageList + |> addToHash ViewContext.Categories app.Categories + |> addToHash ViewContext.CurrentPage app.CurrentPage + |> addToHash ViewContext.Messages app.Messages + |> addToHash ViewContext.Generator app.Generator + |> addToHash ViewContext.HtmxScript app.HtmxScript + |> addToHash ViewContext.IsLoggedOn app.IsLoggedOn + |> addToHash ViewContext.IsAuthor app.IsAuthor + |> addToHash ViewContext.IsEditor app.IsEditor + |> addToHash ViewContext.IsWebLogAdmin app.IsWebLogAdmin + |> addToHash ViewContext.IsAdministrator app.IsAdministrator } /// Is the request from htmx? -let isHtmx (ctx : HttpContext) = +let isHtmx (ctx: HttpContext) = ctx.Request.IsHtmx && not ctx.Request.IsHtmxRefresh /// Convert messages to headers (used for htmx responses) -let messagesToHeaders (messages : UserMessage array) : HttpHandler = +let messagesToHeaders (messages: UserMessage array) : HttpHandler = seq { yield! messages @@ -234,9 +265,12 @@ let messagesToHeaders (messages : UserMessage array) : HttpHandler = /// Redirect after doing some action; commits session and issues a temporary redirect let redirectToGet url : HttpHandler = fun _ ctx -> task { do! commitSession ctx - return! redirectTo false (WebLog.relativeUrl ctx.WebLog (Permalink url)) earlyReturn ctx + return! redirectTo false (ctx.WebLog.RelativeUrl(Permalink url)) earlyReturn ctx } +/// The MIME type for podcast episode JSON chapters +let JSON_CHAPTERS = "application/json+chapters" + /// Handlers for error conditions module Error = @@ -247,24 +281,24 @@ module Error = let notAuthorized : HttpHandler = fun next ctx -> if ctx.Request.Method = "GET" then let redirectUrl = $"user/log-on?returnUrl={WebUtility.UrlEncode ctx.Request.Path}" - if isHtmx ctx then (withHxRedirect redirectUrl >=> redirectToGet redirectUrl) next ctx - else redirectToGet redirectUrl next ctx + (next, ctx) + ||> if isHtmx ctx then withHxRedirect redirectUrl >=> withHxRetarget "body" >=> redirectToGet redirectUrl + else redirectToGet redirectUrl else if isHtmx ctx then let messages = [| - { UserMessage.error with - Message = $"You are not authorized to access the URL {ctx.Request.Path.Value}" - } + { UserMessage.Error with + Message = $"You are not authorized to access the URL {ctx.Request.Path.Value}" } |] (messagesToHeaders messages >=> setStatusCode 401) earlyReturn ctx else setStatusCode 401 earlyReturn ctx - /// Handle 404s from the API, sending known URL paths to the Vue app so that they can be handled there + /// Handle 404s let notFound : HttpHandler = handleContext (fun ctx -> if isHtmx ctx then let messages = [| - { UserMessage.error with Message = $"The URL {ctx.Request.Path.Value} was not found" } + { UserMessage.Error with Message = $"The URL {ctx.Request.Path.Value} was not found" } |] RequestErrors.notFound (messagesToHeaders messages) earlyReturn ctx else RequestErrors.NOT_FOUND "Not found" earlyReturn ctx) @@ -272,13 +306,13 @@ module Error = let server message : HttpHandler = handleContext (fun ctx -> if isHtmx ctx then - let messages = [| { UserMessage.error with Message = message } |] + let messages = [| { UserMessage.Error with Message = message } |] ServerErrors.internalError (messagesToHeaders messages) earlyReturn ctx else ServerErrors.INTERNAL_ERROR message earlyReturn ctx) /// Render a view for the specified theme, using the specified template, layout, and hash -let viewForTheme themeId template next ctx (hash : Hash) = task { +let viewForTheme themeId template next ctx (hash: Hash) = task { let! hash = addViewContext ctx hash // NOTE: DotLiquid does not support {% render %} or {% include %} in its templates, so we will do a 2-pass render; @@ -296,13 +330,13 @@ let viewForTheme themeId template next ctx (hash : Hash) = task { } /// Render a bare view for the specified theme, using the specified template and hash -let bareForTheme themeId template next ctx (hash : Hash) = task { +let bareForTheme themeId template next ctx (hash: Hash) = task { let! hash = addViewContext ctx hash let withContent = task { if hash.ContainsKey ViewContext.Content then return Ok hash else match! TemplateCache.get themeId template ctx.Data with - | Ok contentTemplate -> return Ok (addToHash ViewContext.Content (contentTemplate.Render hash) hash) + | Ok contentTemplate -> return Ok(addToHash ViewContext.Content (contentTemplate.Render hash) hash) | Error message -> return Error message } match! withContent with @@ -311,7 +345,7 @@ let bareForTheme themeId template next ctx (hash : Hash) = task { match! TemplateCache.get themeId "layout-bare" ctx.Data with | Ok layoutTemplate -> return! - (messagesToHeaders (hash[ViewContext.Messages] :?> UserMessage[]) + (messagesToHeaders (hash[ViewContext.Messages] :?> UserMessage array) >=> htmlString (layoutTemplate.Render completeHash)) next ctx | Error message -> return! Error.server message next ctx @@ -324,16 +358,22 @@ let themedView template next ctx hash = task { return! viewForTheme (hash[ViewContext.WebLog] :?> WebLog).ThemeId template next ctx hash } -/// The ID for the admin theme -let adminTheme = ThemeId "admin" +/// Display a page for an admin endpoint +let adminPage pageTitle includeCsrf next ctx (content: AppViewContext -> XmlNode list) = task { + let! messages = getCurrentMessages ctx + let appCtx = generateViewContext pageTitle messages includeCsrf ctx + let layout = if isHtmx ctx then Layout.partial else Layout.full + return! htmlString (layout content appCtx |> RenderView.AsString.htmlDocument) next ctx +} -/// Display a view for the admin theme -let adminView template = - viewForTheme adminTheme template - -/// Display a bare view for the admin theme -let adminBareView template = - bareForTheme adminTheme template +/// Display a bare page for an admin endpoint +let adminBarePage pageTitle includeCsrf next ctx (content: AppViewContext -> XmlNode list) = task { + let! messages = getCurrentMessages ctx + let appCtx = generateViewContext pageTitle messages includeCsrf ctx + return! + ( messagesToHeaders appCtx.Messages + >=> htmlString (Layout.bare content appCtx |> RenderView.AsString.htmlDocument)) next ctx +} /// Validate the anti cross-site request forgery token in the current request let validateCsrf : HttpHandler = fun next ctx -> task { @@ -348,59 +388,61 @@ let requireUser : HttpHandler = requiresAuthentication Error.notAuthorized /// Require a specific level of access for a route let requireAccess level : HttpHandler = fun next ctx -> task { match ctx.UserAccessLevel with - | Some userLevel when AccessLevel.hasAccess level userLevel -> return! next ctx + | Some userLevel when userLevel.HasAccess level -> return! next ctx | Some userLevel -> do! addMessage ctx - { UserMessage.warning with - Message = $"The page you tried to access requires {AccessLevel.toString level} privileges" - Detail = Some $"Your account only has {AccessLevel.toString userLevel} privileges" - } + { UserMessage.Warning with + Message = $"The page you tried to access requires {level} privileges" + Detail = Some $"Your account only has {userLevel} privileges" } return! Error.notAuthorized next ctx | None -> do! addMessage ctx - { UserMessage.warning with Message = "The page you tried to access required you to be logged on" } + { UserMessage.Warning with Message = "The page you tried to access required you to be logged on" } return! Error.notAuthorized next ctx } /// Determine if a user is authorized to edit a page or post, given the author -let canEdit authorId (ctx : HttpContext) = +let canEdit authorId (ctx: HttpContext) = ctx.UserId = authorId || ctx.HasAccessLevel Editor open System.Threading.Tasks /// Create a Task with a Some result for the given object -let someTask<'T> (it : 'T) = Task.FromResult (Some it) +let someTask<'T> (it: 'T) = Task.FromResult(Some it) + +/// Create an absolute URL from a string that may already be an absolute URL +let absoluteUrl (url: string) (ctx: HttpContext) = + if url.StartsWith "http" then url else ctx.WebLog.AbsoluteUrl(Permalink url) + -open System.Collections.Generic open MyWebLog.Data -/// Get the templates available for the current web log's theme (in a key/value pair list) -let templatesForTheme (ctx : HttpContext) (typ : string) = backgroundTask { +/// Get the templates available for the current web log's theme (in a meta item list) +let templatesForTheme (ctx: HttpContext) (typ: string) = backgroundTask { match! ctx.Data.Theme.FindByIdWithoutText ctx.WebLog.ThemeId with | Some theme -> return seq { - KeyValuePair.Create ("", $"- Default (single-{typ}) -") + { Name = ""; Value = $"- Default (single-{typ}) -" } yield! theme.Templates |> Seq.ofList |> Seq.filter (fun it -> it.Name.EndsWith $"-{typ}" && it.Name <> $"single-{typ}") - |> Seq.map (fun it -> KeyValuePair.Create (it.Name, it.Name)) + |> Seq.map (fun it -> { Name = it.Name; Value = it.Name }) } - |> Array.ofSeq - | None -> return [| KeyValuePair.Create ("", $"- Default (single-{typ}) -") |] + | None -> return seq { { Name = ""; Value = $"- Default (single-{typ}) -" } } } /// Get all authors for a list of posts as metadata items -let getAuthors (webLog : WebLog) (posts : Post list) (data : IData) = +let getAuthors (webLog: WebLog) (posts: Post list) (data: IData) = posts - |> List.map (fun p -> p.AuthorId) + |> List.map _.AuthorId |> List.distinct |> data.WebLogUser.FindNames webLog.Id /// Get all tag mappings for a list of posts as metadata items -let getTagMappings (webLog : WebLog) (posts : Post list) (data : IData) = +let getTagMappings (webLog: WebLog) (posts: Post list) (data: IData) = posts - |> List.map (fun p -> p.Tags) + |> List.map _.Tags |> List.concat |> List.distinct |> fun tags -> data.TagMap.FindMappingForTags tags webLog.Id @@ -416,13 +458,12 @@ let getCategoryIds slug ctx = |> Seq.map (fun c -> CategoryId c.Id) |> List.ofSeq -open System -open System.Globalization open NodaTime /// Parse a date/time to UTC -let parseToUtc (date : string) = - Instant.FromDateTimeUtc (DateTime.Parse (date, null, DateTimeStyles.AdjustToUniversal)) +let parseToUtc (date: string) : Instant = + let result = roundTrip.Parse date + if result.Success then result.Value else raise result.Exception open Microsoft.Extensions.DependencyInjection open Microsoft.Extensions.Logging @@ -431,25 +472,24 @@ open Microsoft.Extensions.Logging let mutable private debugEnabled : bool option = None /// Is debug enabled for handlers? -let private isDebugEnabled (ctx : HttpContext) = +let private isDebugEnabled (ctx: HttpContext) = match debugEnabled with | Some flag -> flag | None -> - let fac = ctx.RequestServices.GetRequiredService () + let fac = ctx.RequestServices.GetRequiredService() let log = fac.CreateLogger "MyWebLog.Handlers" - debugEnabled <- Some (log.IsEnabled LogLevel.Debug) + debugEnabled <- Some(log.IsEnabled LogLevel.Debug) debugEnabled.Value /// Log a debug message -let debug (name : string) ctx msg = +let debug (name: string) ctx msg = if isDebugEnabled ctx then - let fac = ctx.RequestServices.GetRequiredService () + let fac = ctx.RequestServices.GetRequiredService() let log = fac.CreateLogger $"MyWebLog.Handlers.{name}" - log.LogDebug (msg ()) + log.LogDebug(msg ()) /// Log a warning message -let warn (name : string) (ctx : HttpContext) msg = - let fac = ctx.RequestServices.GetRequiredService () +let warn (name: string) (ctx: HttpContext) msg = + let fac = ctx.RequestServices.GetRequiredService() let log = fac.CreateLogger $"MyWebLog.Handlers.{name}" log.LogWarning msg - \ No newline at end of file diff --git a/src/MyWebLog/Handlers/Page.fs b/src/MyWebLog/Handlers/Page.fs index 6ddeae8..f616375 100644 --- a/src/MyWebLog/Handlers/Page.fs +++ b/src/MyWebLog/Handlers/Page.fs @@ -9,26 +9,22 @@ open MyWebLog.ViewModels // GET /admin/pages/page/{pageNbr} let all pageNbr : HttpHandler = requireAccess Author >=> fun next ctx -> task { let! pages = ctx.Data.Page.FindPageOfPages ctx.WebLog.Id pageNbr + let displayPages = + pages + |> Seq.ofList + |> Seq.truncate 25 + |> Seq.map (DisplayPage.FromPageMinimal ctx.WebLog) + |> List.ofSeq return! - hashForPage "Pages" - |> withAntiCsrf ctx - |> addToHash "pages" (pages - |> Seq.ofList - |> Seq.truncate 25 - |> Seq.map (DisplayPage.fromPageMinimal ctx.WebLog) - |> List.ofSeq) - |> addToHash "page_nbr" pageNbr - |> addToHash "prev_page" (if pageNbr = 2 then "" else $"/page/{pageNbr - 1}") - |> addToHash "has_next" (List.length pages > 25) - |> addToHash "next_page" $"/page/{pageNbr + 1}" - |> adminView "page-list" next ctx + Views.Page.pageList displayPages pageNbr (pages.Length > 25) + |> adminPage "Pages" true next ctx } // GET /admin/page/{id}/edit let edit pgId : HttpHandler = requireAccess Author >=> fun next ctx -> task { let! result = task { match pgId with - | "new" -> return Some ("Add a New Page", { Page.empty with Id = PageId "new"; AuthorId = ctx.UserId }) + | "new" -> return Some ("Add a New Page", { Page.Empty with Id = PageId "new"; AuthorId = ctx.UserId }) | _ -> match! ctx.Data.Page.FindFullById (PageId pgId) ctx.WebLog.Id with | Some page -> return Some ("Edit Page", page) @@ -36,29 +32,21 @@ let edit pgId : HttpHandler = requireAccess Author >=> fun next ctx -> task { } match result with | Some (title, page) when canEdit page.AuthorId ctx -> - let model = EditPageModel.fromPage page + let model = EditPageModel.FromPage page let! templates = templatesForTheme ctx "page" - return! - hashForPage title - |> withAntiCsrf ctx - |> addToHash ViewContext.Model model - |> addToHash "metadata" ( - Array.zip model.MetaNames model.MetaValues - |> Array.mapi (fun idx (name, value) -> [| string idx; name; value |])) - |> addToHash "templates" templates - |> adminView "page-edit" next ctx + return! adminPage title true next ctx (Views.Page.pageEdit model templates) | Some _ -> return! Error.notAuthorized next ctx | None -> return! Error.notFound next ctx } -// POST /admin/page/{id}/delete +// DELETE /admin/page/{id} let delete pgId : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task { match! ctx.Data.Page.Delete (PageId pgId) ctx.WebLog.Id with | true -> do! PageListCache.update ctx - do! addMessage ctx { UserMessage.success with Message = "Page deleted successfully" } - | false -> do! addMessage ctx { UserMessage.error with Message = "Page not found; nothing deleted" } - return! redirectToGet "admin/pages" next ctx + do! addMessage ctx { UserMessage.Success with Message = "Page deleted successfully" } + | false -> do! addMessage ctx { UserMessage.Error with Message = "Page not found; nothing deleted" } + return! all 1 next ctx } // GET /admin/page/{id}/permalinks @@ -66,24 +54,23 @@ let editPermalinks pgId : HttpHandler = requireAccess Author >=> fun next ctx -> match! ctx.Data.Page.FindFullById (PageId pgId) ctx.WebLog.Id with | Some pg when canEdit pg.AuthorId ctx -> return! - hashForPage "Manage Prior Permalinks" - |> withAntiCsrf ctx - |> addToHash ViewContext.Model (ManagePermalinksModel.fromPage pg) - |> adminView "permalinks" next ctx + ManagePermalinksModel.FromPage pg + |> Views.Helpers.managePermalinks + |> adminPage "Manage Prior Permalinks" true next ctx | Some _ -> return! Error.notAuthorized next ctx | None -> return! Error.notFound next ctx } // POST /admin/page/permalinks let savePermalinks : HttpHandler = requireAccess Author >=> fun next ctx -> task { - let! model = ctx.BindFormAsync () + let! model = ctx.BindFormAsync() let pageId = PageId model.Id match! ctx.Data.Page.FindById pageId ctx.WebLog.Id with | Some pg when canEdit pg.AuthorId ctx -> let links = model.Prior |> Array.map Permalink |> List.ofArray match! ctx.Data.Page.UpdatePriorPermalinks pageId ctx.WebLog.Id links with | true -> - do! addMessage ctx { UserMessage.success with Message = "Page permalinks saved successfully" } + do! addMessage ctx { UserMessage.Success with Message = "Page permalinks saved successfully" } return! redirectToGet $"admin/page/{model.Id}/permalinks" next ctx | false -> return! Error.notFound next ctx | Some _ -> return! Error.notAuthorized next ctx @@ -95,29 +82,28 @@ let editRevisions pgId : HttpHandler = requireAccess Author >=> fun next ctx -> match! ctx.Data.Page.FindFullById (PageId pgId) ctx.WebLog.Id with | Some pg when canEdit pg.AuthorId ctx -> return! - hashForPage "Manage Page Revisions" - |> withAntiCsrf ctx - |> addToHash ViewContext.Model (ManageRevisionsModel.fromPage ctx.WebLog pg) - |> adminView "revisions" next ctx + ManageRevisionsModel.FromPage pg + |> Views.Helpers.manageRevisions + |> adminPage "Manage Page Revisions" true next ctx | Some _ -> return! Error.notAuthorized next ctx | None -> return! Error.notFound next ctx } -// GET /admin/page/{id}/revisions/purge +// DELETE /admin/page/{id}/revisions let purgeRevisions pgId : HttpHandler = requireAccess Author >=> fun next ctx -> task { let data = ctx.Data match! data.Page.FindFullById (PageId pgId) ctx.WebLog.Id with | Some pg -> do! data.Page.Update { pg with Revisions = [ List.head pg.Revisions ] } - do! addMessage ctx { UserMessage.success with Message = "Prior revisions purged successfully" } - return! redirectToGet $"admin/page/{pgId}/revisions" next ctx + do! addMessage ctx { UserMessage.Success with Message = "Prior revisions purged successfully" } + return! editRevisions pgId next ctx | None -> return! Error.notFound next ctx } open Microsoft.AspNetCore.Http /// Find the page and the requested revision -let private findPageRevision pgId revDate (ctx : HttpContext) = task { +let private findPageRevision pgId revDate (ctx: HttpContext) = task { match! ctx.Data.Page.FindFullById (PageId pgId) ctx.WebLog.Id with | Some pg -> let asOf = parseToUtc revDate @@ -129,19 +115,9 @@ let private findPageRevision pgId revDate (ctx : HttpContext) = task { let previewRevision (pgId, revDate) : HttpHandler = requireAccess Author >=> fun next ctx -> task { match! findPageRevision pgId revDate ctx with | Some pg, Some rev when canEdit pg.AuthorId ctx -> - let _, extra = WebLog.hostAndPath ctx.WebLog - return! {| - content = - [ """
    """ - (MarkupText.toHtml >> addBaseToRelativeUrls extra) rev.Text - "
    " - ] - |> String.concat "" - |} - |> makeHash |> adminBareView "" next ctx + return! adminBarePage "" false next ctx (Views.Helpers.commonPreview rev) | Some _, Some _ -> return! Error.notAuthorized next ctx - | None, _ - | _, None -> return! Error.notFound next ctx + | None, _ | _, None -> return! Error.notFound next ctx } // POST /admin/page/{id}/revision/{revision-date}/restore @@ -151,22 +127,21 @@ let restoreRevision (pgId, revDate) : HttpHandler = requireAccess Author >=> fun do! ctx.Data.Page.Update { pg with Revisions = { rev with AsOf = Noda.now () } - :: (pg.Revisions |> List.filter (fun r -> r.AsOf <> rev.AsOf)) - } - do! addMessage ctx { UserMessage.success with Message = "Revision restored successfully" } + :: (pg.Revisions |> List.filter (fun r -> r.AsOf <> rev.AsOf)) } + do! addMessage ctx { UserMessage.Success with Message = "Revision restored successfully" } return! redirectToGet $"admin/page/{pgId}/revisions" next ctx | Some _, Some _ -> return! Error.notAuthorized next ctx | None, _ | _, None -> return! Error.notFound next ctx } -// POST /admin/page/{id}/revision/{revision-date}/delete +// DELETE /admin/page/{id}/revision/{revision-date} let deleteRevision (pgId, revDate) : HttpHandler = requireAccess Author >=> fun next ctx -> task { match! findPageRevision pgId revDate ctx with | Some pg, Some rev when canEdit pg.AuthorId ctx -> do! ctx.Data.Page.Update { pg with Revisions = pg.Revisions |> List.filter (fun r -> r.AsOf <> rev.AsOf) } - do! addMessage ctx { UserMessage.success with Message = "Revision deleted successfully" } - return! adminBareView "" next ctx (makeHash {| content = "" |}) + do! addMessage ctx { UserMessage.Success with Message = "Revision deleted successfully" } + return! adminBarePage "" false next ctx (fun _ -> []) | Some _, Some _ -> return! Error.notAuthorized next ctx | None, _ | _, None -> return! Error.notFound next ctx @@ -174,26 +149,26 @@ let deleteRevision (pgId, revDate) : HttpHandler = requireAccess Author >=> fun // POST /admin/page/save let save : HttpHandler = requireAccess Author >=> fun next ctx -> task { - let! model = ctx.BindFormAsync () + let! model = ctx.BindFormAsync() let data = ctx.Data let now = Noda.now () let tryPage = if model.IsNew then - { Page.empty with - Id = PageId.create () + { Page.Empty with + Id = PageId.Create() WebLogId = ctx.WebLog.Id AuthorId = ctx.UserId PublishedOn = now } |> someTask - else data.Page.FindFullById (PageId model.PageId) ctx.WebLog.Id + else data.Page.FindFullById (PageId model.Id) ctx.WebLog.Id match! tryPage with | Some page when canEdit page.AuthorId ctx -> let updateList = page.IsInPageList <> model.IsShownInPageList let updatedPage = model.UpdatePage page now do! (if model.IsNew then data.Page.Add else data.Page.Update) updatedPage if updateList then do! PageListCache.update ctx - do! addMessage ctx { UserMessage.success with Message = "Page saved successfully" } - return! redirectToGet $"admin/page/{PageId.toString page.Id}/edit" next ctx + do! addMessage ctx { UserMessage.Success with Message = "Page saved successfully" } + return! redirectToGet $"admin/page/{page.Id}/edit" next ctx | Some _ -> return! Error.notAuthorized next ctx | None -> return! Error.notFound next ctx } diff --git a/src/MyWebLog/Handlers/Post.fs b/src/MyWebLog/Handlers/Post.fs index c39dc86..b1ae54a 100644 --- a/src/MyWebLog/Handlers/Post.fs +++ b/src/MyWebLog/Handlers/Post.fs @@ -6,7 +6,7 @@ open System.Collections.Generic open MyWebLog /// Parse a slug and page number from an "everything else" URL -let private parseSlugAndPage webLog (slugAndPage : string seq) = +let private parseSlugAndPage webLog (slugAndPage: string seq) = let fullPath = slugAndPage |> Seq.head let slugPath = slugAndPage |> Seq.skip 1 |> Seq.head let slugs, isFeed = @@ -24,9 +24,10 @@ let private parseSlugAndPage webLog (slugAndPage : string seq) = | idx when idx + 2 = slugs.Length -> Some (int slugs[pageIdx + 1]) | _ -> None let slugParts = if pageIdx > 0 then Array.truncate pageIdx slugs else slugs - pageNbr, String.Join ("/", slugParts), isFeed + pageNbr, String.Join("/", slugParts), isFeed /// The type of post list being prepared +[] type ListType = | AdminList | CategoryList @@ -39,15 +40,15 @@ open MyWebLog.Data open MyWebLog.ViewModels /// Convert a list of posts into items ready to be displayed -let preparePostList webLog posts listType (url : string) pageNbr perPage (data : IData) = task { +let preparePostList webLog posts listType (url: string) pageNbr perPage (data: IData) = task { let! authors = getAuthors webLog posts data let! tagMappings = getTagMappings webLog posts data - let relUrl it = Some <| WebLog.relativeUrl webLog (Permalink it) + let relUrl it = Some <| webLog.RelativeUrl(Permalink it) let postItems = posts |> Seq.ofList |> Seq.truncate perPage - |> Seq.map (PostListItem.fromPost webLog) + |> Seq.map (PostListItem.FromPost webLog) |> Array.ofSeq let! olderPost, newerPost = match listType with @@ -55,10 +56,10 @@ let preparePostList webLog posts listType (url : string) pageNbr perPage (data : let post = List.head posts let target = defaultArg post.PublishedOn post.UpdatedOn data.Post.FindSurroundingPosts webLog.Id target - | _ -> Task.FromResult (None, None) + | _ -> Task.FromResult(None, None) let newerLink = match listType, pageNbr with - | SinglePost, _ -> newerPost |> Option.map (fun p -> Permalink.toString p.Permalink) + | SinglePost, _ -> newerPost |> Option.map (fun it -> string it.Permalink) | _, 1 -> None | PostList, 2 when webLog.DefaultPage = "posts" -> Some "" | PostList, _ -> relUrl $"page/{pageNbr - 1}" @@ -70,7 +71,7 @@ let preparePostList webLog posts listType (url : string) pageNbr perPage (data : | AdminList, _ -> relUrl $"admin/posts/page/{pageNbr - 1}" let olderLink = match listType, List.length posts > perPage with - | SinglePost, _ -> olderPost |> Option.map (fun p -> Permalink.toString p.Permalink) + | SinglePost, _ -> olderPost |> Option.map (fun it -> string it.Permalink) | _, false -> None | PostList, true -> relUrl $"page/{pageNbr + 1}" | CategoryList, true -> relUrl $"category/{url}/page/{pageNbr + 1}" @@ -81,9 +82,9 @@ let preparePostList webLog posts listType (url : string) pageNbr perPage (data : Authors = authors Subtitle = None NewerLink = newerLink - NewerName = newerPost |> Option.map (fun p -> p.Title) + NewerName = newerPost |> Option.map _.Title OlderLink = olderLink - OlderName = olderPost |> Option.map (fun p -> p.Title) + OlderName = olderPost |> Option.map _.Title } return makeHash {||} @@ -114,8 +115,8 @@ let pageOfPosts pageNbr : HttpHandler = fun next ctx -> task { } // GET /page/{pageNbr}/ -let redirectToPageOfPosts (pageNbr : int) : HttpHandler = fun next ctx -> - redirectTo true (WebLog.relativeUrl ctx.WebLog (Permalink $"page/{pageNbr}")) next ctx +let redirectToPageOfPosts (pageNbr: int) : HttpHandler = fun next ctx -> + redirectTo true (ctx.WebLog.RelativeUrl(Permalink $"page/{pageNbr}")) next ctx // GET /category/{slug}/ // GET /category/{slug}/page/{pageNbr} @@ -163,7 +164,7 @@ let pageOfTaggedPosts slugAndPage : HttpHandler = fun next ctx -> task { | None -> return urlTag } if isFeed then - return! Feed.generate (Feed.TagFeed (tag, $"tag/{rawTag}/{webLog.Rss.FeedName}")) + return! Feed.generate (Feed.TagFeed(tag, $"tag/{rawTag}/{webLog.Rss.FeedName}")) (defaultArg webLog.Rss.ItemsInFeed webLog.PostsPerPage) next ctx else match! data.Post.FindPageOfTaggedPosts webLog.Id tag pageNbr webLog.PostsPerPage with @@ -178,13 +179,13 @@ let pageOfTaggedPosts slugAndPage : HttpHandler = fun next ctx -> task { |> themedView "index" next ctx // Other systems use hyphens for spaces; redirect if this is an old tag link | _ -> - let spacedTag = tag.Replace ("-", " ") + let spacedTag = tag.Replace("-", " ") match! data.Post.FindPageOfTaggedPosts webLog.Id spacedTag pageNbr 1 with | posts when List.length posts > 0 -> let endUrl = if pageNbr = 1 then "" else $"page/{pageNbr}" return! redirectTo true - (WebLog.relativeUrl webLog (Permalink $"""tag/{spacedTag.Replace (" ", "+")}/{endUrl}""")) + (webLog.RelativeUrl(Permalink $"""tag/{spacedTag.Replace (" ", "+")}/{endUrl}""")) next ctx | _ -> return! Error.notFound next ctx | None, _, _ -> return! Error.notFound next ctx @@ -200,22 +201,60 @@ let home : HttpHandler = fun next ctx -> task { | Some page -> return! hashForPage page.Title - |> addToHash "page" (DisplayPage.fromPage webLog page) + |> addToHash "page" (DisplayPage.FromPage webLog page) |> addToHash ViewContext.IsHome true |> themedView (defaultArg page.Template "single-page") next ctx | None -> return! Error.notFound next ctx } +// GET /{post-permalink}?chapters +let chapters (post: Post) : HttpHandler = fun next ctx -> + match post.Episode with + | Some ep -> + match ep.Chapters with + | Some chapters -> + let chapterData = + chapters + |> Seq.ofList + |> Seq.map (fun it -> + let dic = Dictionary() + dic["startTime"] <- Math.Round(it.StartTime.TotalSeconds, 2) + it.Title |> Option.iter (fun ttl -> dic["title"] <- ttl) + it.ImageUrl |> Option.iter (fun img -> dic["img"] <- absoluteUrl img ctx) + it.Url |> Option.iter (fun url -> dic["url"] <- absoluteUrl url ctx) + it.IsHidden |> Option.iter (fun toc -> dic["toc"] <- not toc) + it.EndTime |> Option.iter (fun ent -> dic["endTime"] <- Math.Round(ent.TotalSeconds, 2)) + it.Location |> Option.iter (fun loc -> + let locData = Dictionary() + locData["name"] <- loc.Name + locData["geo"] <- loc.Geo + loc.Osm |> Option.iter (fun osm -> locData["osm"] <- osm) + dic["location"] <- locData) + dic) + |> ResizeArray + let jsonFile = Dictionary() + jsonFile["version"] <- "1.2.0" + jsonFile["title"] <- post.Title + jsonFile["fileName"] <- absoluteUrl ep.Media ctx + if defaultArg ep.ChapterWaypoints false then jsonFile["waypoints"] <- true + jsonFile["chapters"] <- chapterData + (setContentType JSON_CHAPTERS >=> json jsonFile) next ctx + | None -> + match ep.ChapterFile with + | Some file -> redirectTo true file next ctx + | None -> Error.notFound next ctx + | None -> Error.notFound next ctx + + +// ~~ ADMINISTRATION ~~ + // GET /admin/posts // GET /admin/posts/page/{pageNbr} let all pageNbr : HttpHandler = requireAccess Author >=> fun next ctx -> task { let data = ctx.Data let! posts = data.Post.FindPageOfPosts ctx.WebLog.Id pageNbr 25 let! hash = preparePostList ctx.WebLog posts AdminList "" pageNbr 25 data - return! - addToHash ViewContext.PageTitle "Posts" hash - |> withAntiCsrf ctx - |> adminView "post-list" next ctx + return! adminPage "Posts" true next ctx (Views.Post.list (hash[ViewContext.Model] :?> PostDisplay)) } // GET /admin/post/{id}/edit @@ -223,7 +262,7 @@ let edit postId : HttpHandler = requireAccess Author >=> fun next ctx -> task { let data = ctx.Data let! result = task { match postId with - | "new" -> return Some ("Write a New Post", { Post.empty with Id = PostId "new" }) + | "new" -> return Some ("Write a New Post", { Post.Empty with Id = PostId "new" }) | _ -> match! data.Post.FindFullById (PostId postId) ctx.WebLog.Id with | Some post -> return Some ("Edit Post", post) @@ -232,32 +271,25 @@ let edit postId : HttpHandler = requireAccess Author >=> fun next ctx -> task { match result with | Some (title, post) when canEdit post.AuthorId ctx -> let! templates = templatesForTheme ctx "post" - let model = EditPostModel.fromPost ctx.WebLog post - return! - hashForPage title - |> withAntiCsrf ctx - |> addToHash ViewContext.Model model - |> addToHash "metadata" ( - Array.zip model.MetaNames model.MetaValues - |> Array.mapi (fun idx (name, value) -> [| string idx; name; value |])) - |> addToHash "templates" templates - |> addToHash "explicit_values" [| - KeyValuePair.Create ("", "– Default –") - KeyValuePair.Create (ExplicitRating.toString Yes, "Yes") - KeyValuePair.Create (ExplicitRating.toString No, "No") - KeyValuePair.Create (ExplicitRating.toString Clean, "Clean") - |] - |> adminView "post-edit" next ctx + let model = EditPostModel.FromPost ctx.WebLog post + let ratings = [ + { Name = ""; Value = "– Default –" } + { Name = string Yes; Value = "Yes" } + { Name = string No; Value = "No" } + { Name = string Clean; Value = "Clean" } + ] + return! adminPage title true next ctx (Views.Post.postEdit model templates ratings) | Some _ -> return! Error.notAuthorized next ctx | None -> return! Error.notFound next ctx } -// POST /admin/post/{id}/delete +// DELETE /admin/post/{id} let delete postId : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task { match! ctx.Data.Post.Delete (PostId postId) ctx.WebLog.Id with - | true -> do! addMessage ctx { UserMessage.success with Message = "Post deleted successfully" } - | false -> do! addMessage ctx { UserMessage.error with Message = "Post not found; nothing deleted" } - return! redirectToGet "admin/posts" next ctx + | true -> do! addMessage ctx { UserMessage.Success with Message = "Post deleted successfully" } + | false -> do! addMessage ctx { UserMessage.Error with Message = "Post not found; nothing deleted" } + //return! redirectToGet "admin/posts" next ctx + return! all 1 next ctx } // GET /admin/post/{id}/permalinks @@ -265,24 +297,23 @@ let editPermalinks postId : HttpHandler = requireAccess Author >=> fun next ctx match! ctx.Data.Post.FindFullById (PostId postId) ctx.WebLog.Id with | Some post when canEdit post.AuthorId ctx -> return! - hashForPage "Manage Prior Permalinks" - |> withAntiCsrf ctx - |> addToHash ViewContext.Model (ManagePermalinksModel.fromPost post) - |> adminView "permalinks" next ctx + ManagePermalinksModel.FromPost post + |> Views.Helpers.managePermalinks + |> adminPage "Manage Prior Permalinks" true next ctx | Some _ -> return! Error.notAuthorized next ctx | None -> return! Error.notFound next ctx } // POST /admin/post/permalinks let savePermalinks : HttpHandler = requireAccess Author >=> fun next ctx -> task { - let! model = ctx.BindFormAsync () + let! model = ctx.BindFormAsync() let postId = PostId model.Id match! ctx.Data.Post.FindById postId ctx.WebLog.Id with | Some post when canEdit post.AuthorId ctx -> let links = model.Prior |> Array.map Permalink |> List.ofArray match! ctx.Data.Post.UpdatePriorPermalinks postId ctx.WebLog.Id links with | true -> - do! addMessage ctx { UserMessage.success with Message = "Post permalinks saved successfully" } + do! addMessage ctx { UserMessage.Success with Message = "Post permalinks saved successfully" } return! redirectToGet $"admin/post/{model.Id}/permalinks" next ctx | false -> return! Error.notFound next ctx | Some _ -> return! Error.notAuthorized next ctx @@ -294,22 +325,21 @@ let editRevisions postId : HttpHandler = requireAccess Author >=> fun next ctx - match! ctx.Data.Post.FindFullById (PostId postId) ctx.WebLog.Id with | Some post when canEdit post.AuthorId ctx -> return! - hashForPage "Manage Post Revisions" - |> withAntiCsrf ctx - |> addToHash ViewContext.Model (ManageRevisionsModel.fromPost ctx.WebLog post) - |> adminView "revisions" next ctx + ManageRevisionsModel.FromPost post + |> Views.Helpers.manageRevisions + |> adminPage "Manage Post Revisions" true next ctx | Some _ -> return! Error.notAuthorized next ctx | None -> return! Error.notFound next ctx } -// GET /admin/post/{id}/revisions/purge +// DELETE /admin/post/{id}/revisions let purgeRevisions postId : HttpHandler = requireAccess Author >=> fun next ctx -> task { let data = ctx.Data match! data.Post.FindFullById (PostId postId) ctx.WebLog.Id with | Some post when canEdit post.AuthorId ctx -> do! data.Post.Update { post with Revisions = [ List.head post.Revisions ] } - do! addMessage ctx { UserMessage.success with Message = "Prior revisions purged successfully" } - return! redirectToGet $"admin/post/{postId}/revisions" next ctx + do! addMessage ctx { UserMessage.Success with Message = "Prior revisions purged successfully" } + return! editRevisions postId next ctx | Some _ -> return! Error.notAuthorized next ctx | None -> return! Error.notFound next ctx } @@ -317,7 +347,7 @@ let purgeRevisions postId : HttpHandler = requireAccess Author >=> fun next ctx open Microsoft.AspNetCore.Http /// Find the post and the requested revision -let private findPostRevision postId revDate (ctx : HttpContext) = task { +let private findPostRevision postId revDate (ctx: HttpContext) = task { match! ctx.Data.Post.FindFullById (PostId postId) ctx.WebLog.Id with | Some post -> let asOf = parseToUtc revDate @@ -329,19 +359,9 @@ let private findPostRevision postId revDate (ctx : HttpContext) = task { let previewRevision (postId, revDate) : HttpHandler = requireAccess Author >=> fun next ctx -> task { match! findPostRevision postId revDate ctx with | Some post, Some rev when canEdit post.AuthorId ctx -> - let _, extra = WebLog.hostAndPath ctx.WebLog - return! {| - content = - [ """
    """ - (MarkupText.toHtml >> addBaseToRelativeUrls extra) rev.Text - "
    " - ] - |> String.concat "" - |} - |> makeHash |> adminBareView "" next ctx + return! adminBarePage "" false next ctx (Views.Helpers.commonPreview rev) | Some _, Some _ -> return! Error.notAuthorized next ctx - | None, _ - | _, None -> return! Error.notFound next ctx + | None, _ | _, None -> return! Error.notFound next ctx } // POST /admin/post/{id}/revision/{revision-date}/restore @@ -351,39 +371,124 @@ let restoreRevision (postId, revDate) : HttpHandler = requireAccess Author >=> f do! ctx.Data.Post.Update { post with Revisions = { rev with AsOf = Noda.now () } - :: (post.Revisions |> List.filter (fun r -> r.AsOf <> rev.AsOf)) - } - do! addMessage ctx { UserMessage.success with Message = "Revision restored successfully" } + :: (post.Revisions |> List.filter (fun r -> r.AsOf <> rev.AsOf)) } + do! addMessage ctx { UserMessage.Success with Message = "Revision restored successfully" } return! redirectToGet $"admin/post/{postId}/revisions" next ctx | Some _, Some _ -> return! Error.notAuthorized next ctx | None, _ | _, None -> return! Error.notFound next ctx } -// POST /admin/post/{id}/revision/{revision-date}/delete +// DELETE /admin/post/{id}/revision/{revision-date} let deleteRevision (postId, revDate) : HttpHandler = requireAccess Author >=> fun next ctx -> task { match! findPostRevision postId revDate ctx with | Some post, Some rev when canEdit post.AuthorId ctx -> do! ctx.Data.Post.Update { post with Revisions = post.Revisions |> List.filter (fun r -> r.AsOf <> rev.AsOf) } - do! addMessage ctx { UserMessage.success with Message = "Revision deleted successfully" } - return! adminBareView "" next ctx (makeHash {| content = "" |}) + do! addMessage ctx { UserMessage.Success with Message = "Revision deleted successfully" } + return! adminBarePage "" false next ctx (fun _ -> []) | Some _, Some _ -> return! Error.notAuthorized next ctx | None, _ | _, None -> return! Error.notFound next ctx } +// GET /admin/post/{id}/chapters +let manageChapters postId : HttpHandler = requireAccess Author >=> fun next ctx -> task { + match! ctx.Data.Post.FindById (PostId postId) ctx.WebLog.Id with + | Some post + when Option.isSome post.Episode + && Option.isSome post.Episode.Value.Chapters + && canEdit post.AuthorId ctx -> + return! + Views.Post.chapters false (ManageChaptersModel.Create post) + |> adminPage "Manage Chapters" true next ctx + | Some _ | None -> return! Error.notFound next ctx +} + +// GET /admin/post/{id}/chapter/{idx} +let editChapter (postId, index) : HttpHandler = requireAccess Author >=> fun next ctx -> task { + match! ctx.Data.Post.FindById (PostId postId) ctx.WebLog.Id with + | Some post + when Option.isSome post.Episode + && Option.isSome post.Episode.Value.Chapters + && canEdit post.AuthorId ctx -> + let chapter = + if index = -1 then Some Chapter.Empty + else + let chapters = post.Episode.Value.Chapters.Value + if index < List.length chapters then Some chapters[index] else None + match chapter with + | Some chap -> + return! + Views.Post.chapterEdit (EditChapterModel.FromChapter post.Id index chap) + |> adminBarePage (if index = -1 then "Add a Chapter" else "Edit Chapter") true next ctx + | None -> return! Error.notFound next ctx + | Some _ | None -> return! Error.notFound next ctx +} + +// POST /admin/post/{id}/chapter/{idx} +let saveChapter (postId, index) : HttpHandler = requireAccess Author >=> fun next ctx -> task { + let data = ctx.Data + match! data.Post.FindFullById (PostId postId) ctx.WebLog.Id with + | Some post + when Option.isSome post.Episode + && Option.isSome post.Episode.Value.Chapters + && canEdit post.AuthorId ctx -> + let! form = ctx.BindFormAsync() + let chapters = post.Episode.Value.Chapters.Value + if index >= -1 && index < List.length chapters then + try + let chapter = form.ToChapter() + let existing = if index = -1 then chapters else List.removeAt index chapters + let updatedPost = + { post with + Episode = Some + { post.Episode.Value with + Chapters = Some (chapter :: existing |> List.sortBy _.StartTime) } } + do! data.Post.Update updatedPost + do! addMessage ctx { UserMessage.Success with Message = "Chapter saved successfully" } + return! + Views.Post.chapterList form.AddAnother (ManageChaptersModel.Create updatedPost) + |> adminBarePage "Manage Chapters" true next ctx + with + | ex -> return! Error.server ex.Message next ctx + else return! Error.notFound next ctx + | Some _ | None -> return! Error.notFound next ctx +} + +// DELETE /admin/post/{id}/chapter/{idx} +let deleteChapter (postId, index) : HttpHandler = requireAccess Author >=> fun next ctx -> task { + let data = ctx.Data + match! data.Post.FindById (PostId postId) ctx.WebLog.Id with + | Some post + when Option.isSome post.Episode + && Option.isSome post.Episode.Value.Chapters + && canEdit post.AuthorId ctx -> + let chapters = post.Episode.Value.Chapters.Value + if index >= 0 && index < List.length chapters then + let updatedPost = + { post with + Episode = Some { post.Episode.Value with Chapters = Some (List.removeAt index chapters) } } + do! data.Post.Update updatedPost + do! addMessage ctx { UserMessage.Success with Message = "Chapter deleted successfully" } + return! + Views.Post.chapterList false (ManageChaptersModel.Create updatedPost) + |> adminPage "Manage Chapters" true next ctx + else return! Error.notFound next ctx + | Some _ | None -> return! Error.notFound next ctx +} + // POST /admin/post/save let save : HttpHandler = requireAccess Author >=> fun next ctx -> task { - let! model = ctx.BindFormAsync () + let! model = ctx.BindFormAsync() let data = ctx.Data let tryPost = if model.IsNew then - { Post.empty with - Id = PostId.create () + { Post.Empty with + Id = PostId.Create() WebLogId = ctx.WebLog.Id - AuthorId = ctx.UserId - } |> someTask - else data.Post.FindFullById (PostId model.PostId) ctx.WebLog.Id + AuthorId = ctx.UserId } + |> someTask + else data.Post.FindFullById (PostId model.Id) ctx.WebLog.Id match! tryPost with | Some post when canEdit post.AuthorId ctx -> let priorCats = post.CategoryIds @@ -397,11 +502,10 @@ let save : HttpHandler = requireAccess Author >=> fun next ctx -> task { { post with PublishedOn = Some dt UpdatedOn = dt - Revisions = [ { (List.head post.Revisions) with AsOf = dt } ] - } + Revisions = [ { (List.head post.Revisions) with AsOf = dt } ] } else { post with PublishedOn = Some dt } else post - do! (if model.PostId = "new" then data.Post.Add else data.Post.Update) updatedPost + do! (if model.IsNew then data.Post.Add else data.Post.Update) updatedPost // If the post was published or its categories changed, refresh the category cache if model.DoPublish || not (priorCats @@ -409,8 +513,8 @@ let save : HttpHandler = requireAccess Author >=> fun next ctx -> task { |> List.distinct |> List.length = List.length priorCats) then do! CategoryCache.update ctx - do! addMessage ctx { UserMessage.success with Message = "Post saved successfully" } - return! redirectToGet $"admin/post/{PostId.toString post.Id}/edit" next ctx + do! addMessage ctx { UserMessage.Success with Message = "Post saved successfully" } + return! redirectToGet $"admin/post/{post.Id}/edit" next ctx | Some _ -> return! Error.notAuthorized next ctx | None -> return! Error.notFound next ctx } diff --git a/src/MyWebLog/Handlers/Routes.fs b/src/MyWebLog/Handlers/Routes.fs index e664a9d..733f29b 100644 --- a/src/MyWebLog/Handlers/Routes.fs +++ b/src/MyWebLog/Handlers/Routes.fs @@ -11,28 +11,33 @@ module CatchAll = open MyWebLog.ViewModels /// Sequence where the first returned value is the proper handler for the link - let private deriveAction (ctx : HttpContext) : HttpHandler seq = + let private deriveAction (ctx: HttpContext) : HttpHandler seq = let webLog = ctx.WebLog let data = ctx.Data let debug = debug "Routes.CatchAll" ctx let textLink = - let _, extra = WebLog.hostAndPath webLog - let url = string ctx.Request.Path - (if extra = "" then url else url.Substring extra.Length).ToLowerInvariant () + let extra = webLog.ExtraPath + let url = string ctx.Request.Path + (if extra = "" then url else url[extra.Length..]).ToLowerInvariant() let await it = (Async.AwaitTask >> Async.RunSynchronously) it seq { debug (fun () -> $"Considering URL {textLink}") // Home page directory without the directory slash - if textLink = "" then yield redirectTo true (WebLog.relativeUrl webLog Permalink.empty) - let permalink = Permalink (textLink.Substring 1) + if textLink = "" then yield redirectTo true (webLog.RelativeUrl Permalink.Empty) + let permalink = Permalink textLink[1..] // Current post match data.Post.FindByPermalink permalink webLog.Id |> await with | Some post -> debug (fun () -> "Found post by permalink") - let hash = Post.preparePostList webLog [ post ] Post.ListType.SinglePost "" 1 1 data |> await - yield fun next ctx -> - addToHash ViewContext.PageTitle post.Title hash - |> themedView (defaultArg post.Template "single-post") next ctx + if post.Status = Published || Option.isSome ctx.UserAccessLevel then + if ctx.Request.Query.ContainsKey "chapters" then + yield Post.chapters post + else + yield fun next ctx -> + Post.preparePostList webLog [ post ] Post.ListType.SinglePost "" 1 1 data + |> await + |> addToHash ViewContext.PageTitle post.Title + |> themedView (defaultArg post.Template "single-post") next ctx | None -> () // Current page match data.Page.FindByPermalink permalink webLog.Id |> await with @@ -40,7 +45,7 @@ module CatchAll = debug (fun () -> "Found page by permalink") yield fun next ctx -> hashForPage page.Title - |> addToHash "page" (DisplayPage.fromPage webLog page) + |> addToHash "page" (DisplayPage.FromPage webLog page) |> addToHash ViewContext.IsPage true |> themedView (defaultArg page.Template "single-page") next ctx | None -> () @@ -56,25 +61,25 @@ module CatchAll = match data.Post.FindByPermalink altLink webLog.Id |> await with | Some post -> debug (fun () -> "Found post by trailing-slash-agnostic permalink") - yield redirectTo true (WebLog.relativeUrl webLog post.Permalink) + yield redirectTo true (webLog.RelativeUrl post.Permalink) | None -> () // Page differing only by trailing slash match data.Page.FindByPermalink altLink webLog.Id |> await with | Some page -> debug (fun () -> "Found page by trailing-slash-agnostic permalink") - yield redirectTo true (WebLog.relativeUrl webLog page.Permalink) + yield redirectTo true (webLog.RelativeUrl page.Permalink) | None -> () // Prior post match data.Post.FindCurrentPermalink [ permalink; altLink ] webLog.Id |> await with | Some link -> debug (fun () -> "Found post by prior permalink") - yield redirectTo true (WebLog.relativeUrl webLog link) + yield redirectTo true (webLog.RelativeUrl link) | None -> () // Prior page match data.Page.FindCurrentPermalink [ permalink; altLink ] webLog.Id |> await with | Some link -> debug (fun () -> "Found page by prior permalink") - yield redirectTo true (WebLog.relativeUrl webLog link) + yield redirectTo true (webLog.RelativeUrl link) | None -> () debug (fun () -> "No content found") } @@ -88,13 +93,13 @@ module CatchAll = module Asset = // GET /theme/{theme}/{**path} - let serve (urlParts : string seq) : HttpHandler = fun next ctx -> task { + let serve (urlParts: string seq) : HttpHandler = fun next ctx -> task { let path = urlParts |> Seq.skip 1 |> Seq.head - match! ctx.Data.ThemeAsset.FindById (ThemeAssetId.ofString path) with + match! ctx.Data.ThemeAsset.FindById(ThemeAssetId.Parse path) with | Some asset -> match Upload.checkModified asset.UpdatedOn ctx with | Some threeOhFour -> return! threeOhFour next ctx - | None -> return! Upload.sendFile (asset.UpdatedOn.ToDateTimeUtc ()) path asset.Data next ctx + | None -> return! Upload.sendFile (asset.UpdatedOn.ToDateTimeUtc()) path asset.Data next ctx | None -> return! Error.notFound next ctx } @@ -107,9 +112,8 @@ let router : HttpHandler = choose [ subRoute "/admin" (requireUser >=> choose [ GET_HEAD >=> choose [ route "/administration" >=> Admin.Dashboard.admin - subRoute "/categor" (choose [ + subRoute "/categor" (requireAccess WebLogAdmin >=> choose [ route "ies" >=> Admin.Category.all - route "ies/bare" >=> Admin.Category.bare routef "y/%s/edit" Admin.Category.edit ]) route "/dashboard" >=> Admin.Dashboard.user @@ -129,18 +133,24 @@ let router : HttpHandler = choose [ routef "/%s/permalinks" Post.editPermalinks routef "/%s/revision/%s/preview" Post.previewRevision routef "/%s/revisions" Post.editRevisions + routef "/%s/chapter/%i" Post.editChapter + routef "/%s/chapters" Post.manageChapters ]) - subRoute "/settings" (choose [ - route "" >=> Admin.WebLog.settings - routef "/rss/%s/edit" Feed.editCustomFeed - subRoute "/user" (choose [ - route "s" >=> User.all - routef "/%s/edit" User.edit + subRoute "/settings" (requireAccess WebLogAdmin >=> choose [ + route "" >=> Admin.WebLog.settings + routef "/rss/%s/edit" Feed.editCustomFeed + subRoute "/redirect-rules" (choose [ + route "" >=> Admin.RedirectRules.all + routef "/%i" Admin.RedirectRules.edit ]) subRoute "/tag-mapping" (choose [ route "s" >=> Admin.TagMapping.all routef "/%s/edit" Admin.TagMapping.edit ]) + subRoute "/user" (choose [ + route "s" >=> User.all + routef "/%s/edit" User.edit + ]) ]) subRoute "/theme" (choose [ route "/list" >=> Admin.Theme.all @@ -156,7 +166,7 @@ let router : HttpHandler = choose [ routef "/theme/%s/refresh" Admin.Cache.refreshTheme routef "/web-log/%s/refresh" Admin.Cache.refreshWebLog ]) - subRoute "/category" (choose [ + subRoute "/category" (requireAccess WebLogAdmin >=> choose [ route "/save" >=> Admin.Category.save routef "/%s/delete" Admin.Category.delete ]) @@ -164,43 +174,56 @@ let router : HttpHandler = choose [ subRoute "/page" (choose [ route "/save" >=> Page.save route "/permalinks" >=> Page.savePermalinks - routef "/%s/delete" Page.delete - routef "/%s/revision/%s/delete" Page.deleteRevision routef "/%s/revision/%s/restore" Page.restoreRevision - routef "/%s/revisions/purge" Page.purgeRevisions ]) subRoute "/post" (choose [ route "/save" >=> Post.save route "/permalinks" >=> Post.savePermalinks - routef "/%s/delete" Post.delete - routef "/%s/revision/%s/delete" Post.deleteRevision + routef "/%s/chapter/%i" Post.saveChapter routef "/%s/revision/%s/restore" Post.restoreRevision - routef "/%s/revisions/purge" Post.purgeRevisions ]) - subRoute "/settings" (choose [ - route "" >=> Admin.WebLog.saveSettings + subRoute "/settings" (requireAccess WebLogAdmin >=> choose [ + route "" >=> Admin.WebLog.saveSettings subRoute "/rss" (choose [ - route "" >=> Feed.saveSettings - route "/save" >=> Feed.saveCustomFeed - routef "/%s/delete" Feed.deleteCustomFeed + route "" >=> Feed.saveSettings + route "/save" >=> Feed.saveCustomFeed ]) - subRoute "/tag-mapping" (choose [ - route "/save" >=> Admin.TagMapping.save - routef "/%s/delete" Admin.TagMapping.delete - ]) - subRoute "/user" (choose [ - route "/save" >=> User.save - routef "/%s/delete" User.delete + subRoute "/redirect-rules" (choose [ + routef "/%i" Admin.RedirectRules.save + routef "/%i/up" Admin.RedirectRules.moveUp + routef "/%i/down" Admin.RedirectRules.moveDown ]) + route "/tag-mapping/save" >=> Admin.TagMapping.save + route "/user/save" >=> User.save ]) subRoute "/theme" (choose [ route "/new" >=> Admin.Theme.save routef "/%s/delete" Admin.Theme.delete ]) - subRoute "/upload" (choose [ - route "/save" >=> Upload.save - routexp "/delete/(.*)" Upload.deleteFromDisk - routef "/%s/delete" Upload.deleteFromDb + route "/upload/save" >=> Upload.save + ] + DELETE >=> validateCsrf >=> choose [ + routef "/category/%s" Admin.Category.delete + subRoute "/page" (choose [ + routef "/%s" Page.delete + routef "/%s/revision/%s" Page.deleteRevision + routef "/%s/revisions" Page.purgeRevisions + ]) + subRoute "/post" (choose [ + routef "/%s" Post.delete + routef "/%s/chapter/%i" Post.deleteChapter + routef "/%s/revision/%s" Post.deleteRevision + routef "/%s/revisions" Post.purgeRevisions + ]) + subRoute "/settings" (requireAccess WebLogAdmin >=> choose [ + routef "/redirect-rules/%i" Admin.RedirectRules.delete + routef "/rss/%s" Feed.deleteCustomFeed + routef "/tag-mapping/%s" Admin.TagMapping.delete + routef "/user/%s" User.delete + ]) + subRoute "/upload" (requireAccess WebLogAdmin >=> choose [ + routexp "/disk/(.*)" Upload.deleteFromDisk + routef "/%s" Upload.deleteFromDb ]) ] ]) @@ -229,7 +252,7 @@ let routerWithPath extraPath : HttpHandler = /// Handler to apply Giraffe routing with a possible sub-route let handleRoute : HttpHandler = fun next ctx -> - let _, extraPath = WebLog.hostAndPath ctx.WebLog + let extraPath = ctx.WebLog.ExtraPath (if extraPath = "" then router else routerWithPath extraPath) next ctx diff --git a/src/MyWebLog/Handlers/Upload.fs b/src/MyWebLog/Handlers/Upload.fs index c1c840d..c992eda 100644 --- a/src/MyWebLog/Handlers/Upload.fs +++ b/src/MyWebLog/Handlers/Upload.fs @@ -12,7 +12,7 @@ module private Helpers = open Microsoft.AspNetCore.StaticFiles /// A MIME type mapper instance to use when serving files from the database - let mimeMap = FileExtensionContentTypeProvider () + let mimeMap = FileExtensionContentTypeProvider() /// A cache control header that instructs the browser to cache the result for no more than 30 days let cacheForThirtyDays = @@ -24,7 +24,7 @@ module private Helpers = let slash = Path.DirectorySeparatorChar /// The base directory where uploads are stored, relative to the executable - let uploadDir = Path.Combine ("wwwroot", "upload") + let uploadDir = Path.Combine("wwwroot", "upload") // ~~ SERVING UPLOADS ~~ @@ -35,10 +35,10 @@ open Microsoft.AspNetCore.Http open NodaTime /// Determine if the file has been modified since the date/time specified by the If-Modified-Since header -let checkModified since (ctx : HttpContext) : HttpHandler option = +let checkModified since (ctx: HttpContext) : HttpHandler option = match ctx.Request.Headers.IfModifiedSince with | it when it.Count < 1 -> None - | it when since > Instant.FromDateTimeUtc (DateTime.Parse (it[0], null, DateTimeStyles.AdjustToUniversal)) -> None + | it when since > Instant.FromDateTimeUtc(DateTime.Parse(it[0], null, DateTimeStyles.AdjustToUniversal)) -> None | _ -> Some (setStatusCode 304) @@ -53,29 +53,29 @@ let sendFile updatedOn path (data : byte[]) : HttpHandler = fun next ctx -> let headers = ResponseHeaders ctx.Response.Headers headers.ContentType <- (deriveMimeType >> MediaTypeHeaderValue) path headers.CacheControl <- cacheForThirtyDays - let stream = new MemoryStream (data) + let stream = new MemoryStream(data) streamData true stream None (Some (DateTimeOffset updatedOn)) next ctx open MyWebLog // GET /upload/{web-log-slug}/{**path} -let serve (urlParts : string seq) : HttpHandler = fun next ctx -> task { +let serve (urlParts: string seq) : HttpHandler = fun next ctx -> task { let webLog = ctx.WebLog let parts = (urlParts |> Seq.skip 1 |> Seq.head).Split '/' let slug = Array.head parts if slug = webLog.Slug then // Static file middleware will not work in subdirectories; check for an actual file first - let fileName = Path.Combine ("wwwroot", (Seq.head urlParts)[1..]) + let fileName = Path.Combine("wwwroot", (Seq.head urlParts)[1..]) if File.Exists fileName then return! streamFile true fileName None None next ctx else - let path = String.Join ('/', Array.skip 1 parts) + let path = String.Join('/', Array.skip 1 parts) match! ctx.Data.Upload.FindByPath path webLog.Id with | Some upload -> match checkModified upload.UpdatedOn ctx with | Some threeOhFour -> return! threeOhFour next ctx - | None -> return! sendFile (upload.UpdatedOn.ToDateTimeUtc ()) path upload.Data next ctx + | None -> return! sendFile (upload.UpdatedOn.ToDateTimeUtc()) path upload.Data next ctx | None -> return! Error.notFound next ctx else return! Error.notFound next ctx @@ -87,122 +87,109 @@ open System.Text.RegularExpressions open MyWebLog.ViewModels /// Turn a string into a lowercase URL-safe slug -let makeSlug it = ((Regex """\s+""").Replace ((Regex "[^A-z0-9 -]").Replace (it, ""), "-")).ToLowerInvariant () +let makeSlug it = (Regex """\s+""").Replace((Regex "[^A-z0-9 -]").Replace(it, ""), "-").ToLowerInvariant() // GET /admin/uploads let list : HttpHandler = requireAccess Author >=> fun next ctx -> task { let webLog = ctx.WebLog let! dbUploads = ctx.Data.Upload.FindByWebLog webLog.Id let diskUploads = - let path = Path.Combine (uploadDir, webLog.Slug) + let path = Path.Combine(uploadDir, webLog.Slug) try - Directory.EnumerateFiles (path, "*", SearchOption.AllDirectories) + Directory.EnumerateFiles(path, "*", SearchOption.AllDirectories) |> Seq.map (fun file -> let name = Path.GetFileName file let create = - match File.GetCreationTime (Path.Combine (path, file)) with + match File.GetCreationTime(Path.Combine(path, file)) with | dt when dt > DateTime.UnixEpoch -> Some dt | _ -> None { DisplayUpload.Id = "" Name = name - Path = file.Replace($"{path}{slash}", "").Replace(name, "").Replace (slash, '/') + Path = file.Replace($"{path}{slash}", "").Replace(name, "").Replace(slash, '/') UpdatedOn = create - Source = UploadDestination.toString Disk - }) - |> List.ofSeq + Source = string Disk }) with | :? DirectoryNotFoundException -> [] // This is fine | ex -> warn "Upload" ctx $"Encountered {ex.GetType().Name} listing uploads for {path}:\n{ex.Message}" [] - let allFiles = - dbUploads - |> List.map (DisplayUpload.fromUpload webLog Database) - |> List.append diskUploads - |> List.sortByDescending (fun file -> file.UpdatedOn, file.Path) return! - hashForPage "Uploaded Files" - |> withAntiCsrf ctx - |> addToHash "files" allFiles - |> adminView "upload-list" next ctx + dbUploads + |> Seq.ofList + |> Seq.map (DisplayUpload.FromUpload webLog Database) + |> Seq.append diskUploads + |> Seq.sortByDescending (fun file -> file.UpdatedOn, file.Path) + |> Views.WebLog.uploadList + |> adminPage "Uploaded Files" true next ctx } // GET /admin/upload/new let showNew : HttpHandler = requireAccess Author >=> fun next ctx -> - hashForPage "Upload a File" - |> withAntiCsrf ctx - |> addToHash "destination" (UploadDestination.toString ctx.WebLog.Uploads) - |> adminView "upload-new" next ctx - - -/// Redirect to the upload list -let showUploads : HttpHandler = - redirectToGet "admin/uploads" + adminPage "Upload a File" true next ctx Views.WebLog.uploadNew // POST /admin/upload/save let save : HttpHandler = requireAccess Author >=> fun next ctx -> task { if ctx.Request.HasFormContentType && ctx.Request.Form.Files.Count > 0 then let upload = Seq.head ctx.Request.Form.Files let fileName = String.Concat (makeSlug (Path.GetFileNameWithoutExtension upload.FileName), - Path.GetExtension(upload.FileName).ToLowerInvariant ()) + Path.GetExtension(upload.FileName).ToLowerInvariant()) let now = Noda.now () - let localNow = WebLog.localTime ctx.WebLog now + let localNow = ctx.WebLog.LocalTime now let year = localNow.ToString "yyyy" let month = localNow.ToString "MM" - let! form = ctx.BindFormAsync () + let! form = ctx.BindFormAsync() - match UploadDestination.parse form.Destination with + match UploadDestination.Parse form.Destination with | Database -> - use stream = new MemoryStream () + use stream = new MemoryStream() do! upload.CopyToAsync stream let file = - { Id = UploadId.create () + { Id = UploadId.Create() WebLogId = ctx.WebLog.Id Path = Permalink $"{year}/{month}/{fileName}" UpdatedOn = now - Data = stream.ToArray () - } + Data = stream.ToArray() } do! ctx.Data.Upload.Add file | Disk -> - let fullPath = Path.Combine (uploadDir, ctx.WebLog.Slug, year, month) + let fullPath = Path.Combine(uploadDir, ctx.WebLog.Slug, year, month) let _ = Directory.CreateDirectory fullPath - use stream = new FileStream (Path.Combine (fullPath, fileName), FileMode.Create) + use stream = new FileStream(Path.Combine(fullPath, fileName), FileMode.Create) do! upload.CopyToAsync stream - do! addMessage ctx { UserMessage.success with Message = $"File uploaded to {form.Destination} successfully" } - return! showUploads next ctx + do! addMessage ctx { UserMessage.Success with Message = $"File uploaded to {form.Destination} successfully" } + return! redirectToGet "admin/uploads" next ctx else return! RequestErrors.BAD_REQUEST "Bad request; no file present" next ctx } -// POST /admin/upload/{id}/delete -let deleteFromDb upId : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task { +// DELETE /admin/upload/{id} +let deleteFromDb upId : HttpHandler = fun next ctx -> task { match! ctx.Data.Upload.Delete (UploadId upId) ctx.WebLog.Id with | Ok fileName -> - do! addMessage ctx { UserMessage.success with Message = $"{fileName} deleted successfully" } - return! showUploads next ctx + do! addMessage ctx { UserMessage.Success with Message = $"{fileName} deleted successfully" } + return! list next ctx | Error _ -> return! Error.notFound next ctx } /// Remove a directory tree if it is empty -let removeEmptyDirectories (webLog : WebLog) (filePath : string) = +let removeEmptyDirectories (webLog: WebLog) (filePath: string) = let mutable path = Path.GetDirectoryName filePath let mutable finished = false while (not finished) && path > "" do - let fullPath = Path.Combine (uploadDir, webLog.Slug, path) + let fullPath = Path.Combine(uploadDir, webLog.Slug, path) if Directory.EnumerateFileSystemEntries fullPath |> Seq.isEmpty then Directory.Delete fullPath path <- String.Join(slash, path.Split slash |> Array.rev |> Array.skip 1 |> Array.rev) else finished <- true -// POST /admin/upload/delete/{**path} -let deleteFromDisk urlParts : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task { +// DELETE /admin/upload/disk/{**path} +let deleteFromDisk urlParts : HttpHandler = fun next ctx -> task { let filePath = urlParts |> Seq.skip 1 |> Seq.head - let path = Path.Combine (uploadDir, ctx.WebLog.Slug, filePath) + let path = Path.Combine(uploadDir, ctx.WebLog.Slug, filePath) if File.Exists path then File.Delete path removeEmptyDirectories ctx.WebLog filePath - do! addMessage ctx { UserMessage.success with Message = $"{filePath} deleted successfully" } - return! showUploads next ctx + do! addMessage ctx { UserMessage.Success with Message = $"{filePath} deleted successfully" } + return! list next ctx else return! Error.notFound next ctx } diff --git a/src/MyWebLog/Handlers/User.fs b/src/MyWebLog/Handlers/User.fs index 6a67a61..5f972ac 100644 --- a/src/MyWebLog/Handlers/User.fs +++ b/src/MyWebLog/Handlers/User.fs @@ -5,23 +5,22 @@ open System open Microsoft.AspNetCore.Http open Microsoft.AspNetCore.Identity open MyWebLog -open NodaTime // ~~ LOG ON / LOG OFF ~~ /// Create a password hash a password for a given user let createPasswordHash user password = - PasswordHasher().HashPassword (user, password) + PasswordHasher().HashPassword(user, password) /// Verify whether a password is valid -let verifyPassword user password (ctx : HttpContext) = backgroundTask { +let verifyPassword user password (ctx: HttpContext) = backgroundTask { match user with | Some usr -> - let hasher = PasswordHasher () - match hasher.VerifyHashedPassword (usr, usr.PasswordHash, password) with + let hasher = PasswordHasher() + match hasher.VerifyHashedPassword(usr, usr.PasswordHash, password) with | PasswordVerificationResult.Success -> return Ok () | PasswordVerificationResult.SuccessRehashNeeded -> - do! ctx.Data.WebLogUser.Update { usr with PasswordHash = hasher.HashPassword (usr, password) } + do! ctx.Data.WebLogUser.Update { usr with PasswordHash = hasher.HashPassword(usr, password) } return Ok () | _ -> return Error "Log on attempt unsuccessful" | None -> return Error "Log on attempt unsuccessful" @@ -36,10 +35,7 @@ let logOn returnUrl : HttpHandler = fun next ctx -> match returnUrl with | Some _ -> returnUrl | None -> if ctx.Request.Query.ContainsKey "returnUrl" then Some ctx.Request.Query["returnUrl"].[0] else None - hashForPage "Log On" - |> withAntiCsrf ctx - |> addToHash ViewContext.Model { LogOnModel.empty with ReturnTo = returnTo } - |> adminView "log-on" next ctx + adminPage "Log On" true next ctx (Views.User.logOn { LogOnModel.Empty with ReturnTo = returnTo }) open System.Security.Claims @@ -48,90 +44,74 @@ open Microsoft.AspNetCore.Authentication.Cookies // POST /user/log-on let doLogOn : HttpHandler = fun next ctx -> task { - let! model = ctx.BindFormAsync () + let! model = ctx.BindFormAsync() let data = ctx.Data let! tryUser = data.WebLogUser.FindByEmail model.EmailAddress ctx.WebLog.Id match! verifyPassword tryUser model.Password ctx with | Ok _ -> let user = tryUser.Value let claims = seq { - Claim (ClaimTypes.NameIdentifier, WebLogUserId.toString user.Id) - Claim (ClaimTypes.Name, $"{user.FirstName} {user.LastName}") - Claim (ClaimTypes.GivenName, user.PreferredName) - Claim (ClaimTypes.Role, AccessLevel.toString user.AccessLevel) + Claim(ClaimTypes.NameIdentifier, string user.Id) + Claim(ClaimTypes.Name, $"{user.FirstName} {user.LastName}") + Claim(ClaimTypes.GivenName, user.PreferredName) + Claim(ClaimTypes.Role, string user.AccessLevel) } - let identity = ClaimsIdentity (claims, CookieAuthenticationDefaults.AuthenticationScheme) + let identity = ClaimsIdentity(claims, CookieAuthenticationDefaults.AuthenticationScheme) - do! ctx.SignInAsync (identity.AuthenticationType, ClaimsPrincipal identity, - AuthenticationProperties (IssuedUtc = DateTimeOffset.UtcNow)) + do! ctx.SignInAsync(identity.AuthenticationType, ClaimsPrincipal identity, + AuthenticationProperties(IssuedUtc = DateTimeOffset.UtcNow)) do! data.WebLogUser.SetLastSeen user.Id user.WebLogId do! addMessage ctx - { UserMessage.success with + { UserMessage.Success with Message = "Log on successful" - Detail = Some $"Welcome to {ctx.WebLog.Name}!" - } + Detail = Some $"Welcome to {ctx.WebLog.Name}!" } return! match model.ReturnTo with - | Some url -> redirectTo false url next ctx + | Some url -> redirectTo false url next ctx // TODO: change to redirectToGet? | None -> redirectToGet "admin/dashboard" next ctx | Error msg -> - do! addMessage ctx { UserMessage.error with Message = msg } + do! addMessage ctx { UserMessage.Error with Message = msg } return! logOn model.ReturnTo next ctx } // GET /user/log-off let logOff : HttpHandler = fun next ctx -> task { do! ctx.SignOutAsync CookieAuthenticationDefaults.AuthenticationScheme - do! addMessage ctx { UserMessage.info with Message = "Log off successful" } + do! addMessage ctx { UserMessage.Info with Message = "Log off successful" } return! redirectToGet "" next ctx } // ~~ ADMINISTRATION ~~ -open System.Collections.Generic open Giraffe.Htmx /// Got no time for URL/form manipulators... let private goAway : HttpHandler = RequestErrors.BAD_REQUEST "really?" // GET /admin/settings/users -let all : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task { +let all : HttpHandler = fun next ctx -> task { let! users = ctx.Data.WebLogUser.FindByWebLog ctx.WebLog.Id - return! - hashForPage "User Administration" - |> withAntiCsrf ctx - |> addToHash "users" (users |> List.map (DisplayUser.fromUser ctx.WebLog) |> Array.ofList) - |> adminBareView "user-list-body" next ctx + return! adminBarePage "User Administration" true next ctx (Views.User.userList users) } /// Show the edit user page -let private showEdit (model : EditUserModel) : HttpHandler = fun next ctx -> - hashForPage (if model.IsNew then "Add a New User" else "Edit User") - |> withAntiCsrf ctx - |> addToHash ViewContext.Model model - |> addToHash "access_levels" [| - KeyValuePair.Create (AccessLevel.toString Author, "Author") - KeyValuePair.Create (AccessLevel.toString Editor, "Editor") - KeyValuePair.Create (AccessLevel.toString WebLogAdmin, "Web Log Admin") - if ctx.HasAccessLevel Administrator then - KeyValuePair.Create (AccessLevel.toString Administrator, "Administrator") - |] - |> adminBareView "user-edit" next ctx +let private showEdit (model: EditUserModel) : HttpHandler = fun next ctx -> + adminBarePage (if model.IsNew then "Add a New User" else "Edit User") true next ctx (Views.User.edit model) // GET /admin/settings/user/{id}/edit -let edit usrId : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task { +let edit usrId : HttpHandler = fun next ctx -> task { let isNew = usrId = "new" let userId = WebLogUserId usrId let tryUser = - if isNew then someTask { WebLogUser.empty with Id = userId } + if isNew then someTask { WebLogUser.Empty with Id = userId } else ctx.Data.WebLogUser.FindById userId ctx.WebLog.Id match! tryUser with - | Some user -> return! showEdit (EditUserModel.fromUser user) next ctx + | Some user -> return! showEdit (EditUserModel.FromUser user) next ctx | None -> return! Error.notFound next ctx } -// POST /admin/settings/user/{id}/delete -let delete userId : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task { +// DELETE /admin/settings/user/{id} +let delete userId : HttpHandler = fun next ctx -> task { let data = ctx.Data match! data.WebLogUser.FindById (WebLogUserId userId) ctx.WebLog.Id with | Some user -> @@ -141,43 +121,31 @@ let delete userId : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> match! data.WebLogUser.Delete user.Id user.WebLogId with | Ok _ -> do! addMessage ctx - { UserMessage.success with - Message = $"User {WebLogUser.displayName user} deleted successfully" - } + { UserMessage.Success with + Message = $"User {user.DisplayName} deleted successfully" } return! all next ctx | Error msg -> do! addMessage ctx - { UserMessage.error with - Message = $"User {WebLogUser.displayName user} was not deleted" - Detail = Some msg - } + { UserMessage.Error with + Message = $"User {user.DisplayName} was not deleted" + Detail = Some msg } return! all next ctx | None -> return! Error.notFound next ctx } -/// Display the user "my info" page, with information possibly filled in -let private showMyInfo (model : EditMyInfoModel) (user : WebLogUser) : HttpHandler = fun next ctx -> - hashForPage "Edit Your Information" - |> withAntiCsrf ctx - |> addToHash ViewContext.Model model - |> addToHash "access_level" (AccessLevel.toString user.AccessLevel) - |> addToHash "created_on" (WebLog.localTime ctx.WebLog user.CreatedOn) - |> addToHash "last_seen_on" (WebLog.localTime ctx.WebLog - (defaultArg user.LastSeenOn (Instant.FromUnixTimeSeconds 0))) - - |> adminView "my-info" next ctx - - // GET /admin/my-info let myInfo : HttpHandler = requireAccess Author >=> fun next ctx -> task { match! ctx.Data.WebLogUser.FindById ctx.UserId ctx.WebLog.Id with - | Some user -> return! showMyInfo (EditMyInfoModel.fromUser user) user next ctx + | Some user -> + return! + Views.User.myInfo (EditMyInfoModel.FromUser user) user + |> adminPage "Edit Your Information" true next ctx | None -> return! Error.notFound next ctx } // POST /admin/my-info let saveMyInfo : HttpHandler = requireAccess Author >=> fun next ctx -> task { - let! model = ctx.BindFormAsync () + let! model = ctx.BindFormAsync() let data = ctx.Data match! data.WebLogUser.FindById ctx.UserId ctx.WebLog.Id with | Some user when model.NewPassword = model.NewPasswordConfirm -> @@ -187,15 +155,16 @@ let saveMyInfo : HttpHandler = requireAccess Author >=> fun next ctx -> task { FirstName = model.FirstName LastName = model.LastName PreferredName = model.PreferredName - PasswordHash = pw - } + PasswordHash = pw } do! data.WebLogUser.Update user let pwMsg = if model.NewPassword = "" then "" else " and updated your password" - do! addMessage ctx { UserMessage.success with Message = $"Saved your information{pwMsg} successfully" } + do! addMessage ctx { UserMessage.Success with Message = $"Saved your information{pwMsg} successfully" } return! redirectToGet "admin/my-info" next ctx | Some user -> - do! addMessage ctx { UserMessage.error with Message = "Passwords did not match; no updates made" } - return! showMyInfo { model with NewPassword = ""; NewPasswordConfirm = "" } user next ctx + do! addMessage ctx { UserMessage.Error with Message = "Passwords did not match; no updates made" } + return! + Views.User.myInfo { model with NewPassword = ""; NewPasswordConfirm = "" } user + |> adminPage "Edit Your Information" true next ctx | None -> return! Error.notFound next ctx } @@ -204,15 +173,15 @@ let saveMyInfo : HttpHandler = requireAccess Author >=> fun next ctx -> task { // POST /admin/settings/user/save let save : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task { - let! model = ctx.BindFormAsync () + let! model = ctx.BindFormAsync() let data = ctx.Data let tryUser = if model.IsNew then - { WebLogUser.empty with - Id = WebLogUserId.create () + { WebLogUser.Empty with + Id = WebLogUserId.Create() WebLogId = ctx.WebLog.Id - CreatedOn = Noda.now () - } |> someTask + CreatedOn = Noda.now () } + |> someTask else data.WebLogUser.FindById (WebLogUserId model.Id) ctx.WebLog.Id match! tryUser with | Some user when model.Password = model.PasswordConfirm -> @@ -225,12 +194,11 @@ let save : HttpHandler = requireAccess WebLogAdmin >=> fun next ctx -> task { else { updatedUser with PasswordHash = createPasswordHash updatedUser model.Password } do! (if model.IsNew then data.WebLogUser.Add else data.WebLogUser.Update) toUpdate do! addMessage ctx - { UserMessage.success with - Message = $"""{if model.IsNew then "Add" else "Updat"}ed user successfully""" - } + { UserMessage.Success with + Message = $"""{if model.IsNew then "Add" else "Updat"}ed user successfully""" } return! all next ctx | Some _ -> - do! addMessage ctx { UserMessage.error with Message = "The passwords did not match; nothing saved" } + do! addMessage ctx { UserMessage.Error with Message = "The passwords did not match; nothing saved" } return! (withHxRetarget $"#user_{model.Id}" >=> showEdit { model with Password = ""; PasswordConfirm = "" }) next ctx diff --git a/src/MyWebLog/Maintenance.fs b/src/MyWebLog/Maintenance.fs index 544de4f..43caaca 100644 --- a/src/MyWebLog/Maintenance.fs +++ b/src/MyWebLog/Maintenance.fs @@ -7,9 +7,9 @@ open MyWebLog.Data open NodaTime /// Create the web log information -let private doCreateWebLog (args : string[]) (sp : IServiceProvider) = task { +let private doCreateWebLog (args: string[]) (sp: IServiceProvider) = task { - let data = sp.GetRequiredService () + let data = sp.GetRequiredService() let timeZone = let local = TimeZoneInfo.Local.Id @@ -21,30 +21,29 @@ let private doCreateWebLog (args : string[]) (sp : IServiceProvider) = task { | false, _ -> raise <| TimeZoneNotFoundException $"Cannot find IANA timezone for {local}" // Create the web log - let webLogId = WebLogId.create () - let userId = WebLogUserId.create () - let homePageId = PageId.create () + let webLogId = WebLogId.Create() + let userId = WebLogUserId.Create() + let homePageId = PageId.Create() let slug = Handlers.Upload.makeSlug args[2] // If this is the first web log being created, the user will be an installation admin; otherwise, they will be an // admin just over their web log - let! webLogs = data.WebLog.All () + let! webLogs = data.WebLog.All() let accessLevel = if List.isEmpty webLogs then Administrator else WebLogAdmin do! data.WebLog.Add - { WebLog.empty with + { WebLog.Empty with Id = webLogId Name = args[2] Slug = slug UrlBase = args[1] - DefaultPage = PageId.toString homePageId - TimeZone = timeZone - } + DefaultPage = string homePageId + TimeZone = timeZone } // Create the admin user let now = Noda.now () let user = - { WebLogUser.empty with + { WebLogUser.Empty with Id = userId WebLogId = webLogId Email = args[3] @@ -52,13 +51,12 @@ let private doCreateWebLog (args : string[]) (sp : IServiceProvider) = task { LastName = "User" PreferredName = "Admin" AccessLevel = accessLevel - CreatedOn = now - } + CreatedOn = now } do! data.WebLogUser.Add { user with PasswordHash = Handlers.User.createPasswordHash user args[4] } // Create the default home page do! data.Page.Add - { Page.empty with + { Page.Empty with Id = homePageId WebLogId = webLogId AuthorId = userId @@ -69,16 +67,14 @@ let private doCreateWebLog (args : string[]) (sp : IServiceProvider) = task { Text = "

    This is your default home page.

    " Revisions = [ { AsOf = now - Text = Html "

    This is your default home page.

    " - } - ] - } + Text = Html "

    This is your default home page.

    " } + ] } printfn $"Successfully initialized database for {args[2]} with URL base {args[1]}" match accessLevel with | Administrator -> printfn $" ({args[3]} is an installation administrator)" | WebLogAdmin -> - printfn $" ({args[3]} is a web log administrator;" +