WIP on uploads (#2)
- Add data types and fields - Implement in both RethinkDB and SQLite - Add uploads to backup/restore - Add empty upload folder to project - Add indexes to SQLite tables (#15)
This commit is contained in:
@@ -93,44 +93,14 @@ module CatchAll =
|
||||
/// Serve theme assets
|
||||
module Asset =
|
||||
|
||||
open System
|
||||
open Microsoft.AspNetCore.Http.Headers
|
||||
open Microsoft.AspNetCore.StaticFiles
|
||||
open Microsoft.Net.Http.Headers
|
||||
|
||||
/// Determine if the asset has been modified since the date/time specified by the If-Modified-Since header
|
||||
let private checkModified asset (ctx : HttpContext) : HttpHandler option =
|
||||
match ctx.Request.Headers.IfModifiedSince with
|
||||
| it when it.Count < 1 -> None
|
||||
| it ->
|
||||
if asset.updatedOn > DateTime.Parse it[0] then
|
||||
None
|
||||
else
|
||||
Some (setStatusCode 304 >=> setBodyFromString "Not Modified")
|
||||
|
||||
/// An instance of ASP.NET Core's file extension to MIME type converter
|
||||
let private mimeMap = FileExtensionContentTypeProvider ()
|
||||
|
||||
// GET /theme/{theme}/{**path}
|
||||
let serveAsset (urlParts : string seq) : HttpHandler = fun next ctx -> task {
|
||||
let serve (urlParts : string seq) : HttpHandler = fun next ctx -> task {
|
||||
let path = urlParts |> Seq.skip 1 |> Seq.head
|
||||
match! ctx.Data.ThemeAsset.findById (ThemeAssetId.ofString path) with
|
||||
| Some asset ->
|
||||
match checkModified asset ctx with
|
||||
match Upload.checkModified asset.updatedOn ctx with
|
||||
| Some threeOhFour -> return! threeOhFour next ctx
|
||||
| None ->
|
||||
let mimeType =
|
||||
match mimeMap.TryGetContentType path with
|
||||
| true, typ -> typ
|
||||
| false, _ -> "application/octet-stream"
|
||||
let headers = ResponseHeaders ctx.Response.Headers
|
||||
headers.LastModified <- Some (DateTimeOffset asset.updatedOn) |> Option.toNullable
|
||||
headers.ContentType <- MediaTypeHeaderValue mimeType
|
||||
headers.CacheControl <-
|
||||
let hdr = CacheControlHeaderValue()
|
||||
hdr.MaxAge <- Some (TimeSpan.FromDays 30) |> Option.toNullable
|
||||
hdr
|
||||
return! setBody asset.data next ctx
|
||||
| None -> return! Upload.sendFile asset.updatedOn path asset.data next ctx
|
||||
| None -> return! Error.notFound next ctx
|
||||
}
|
||||
|
||||
@@ -210,7 +180,8 @@ let router : HttpHandler = choose [
|
||||
GET_HEAD >=> routef "/page/%i" Post.pageOfPosts
|
||||
GET_HEAD >=> routef "/page/%i/" Post.redirectToPageOfPosts
|
||||
GET_HEAD >=> routexp "/tag/(.*)" Post.pageOfTaggedPosts
|
||||
GET_HEAD >=> routexp "/themes/(.*)" Asset.serveAsset
|
||||
GET_HEAD >=> routexp "/themes/(.*)" Asset.serve
|
||||
GET_HEAD >=> routexp "/upload/(.*)" Upload.serve
|
||||
subRoute "/user" (choose [
|
||||
GET_HEAD >=> choose [
|
||||
route "/log-on" >=> User.logOn None
|
||||
|
||||
61
src/MyWebLog/Handlers/Upload.fs
Normal file
61
src/MyWebLog/Handlers/Upload.fs
Normal file
@@ -0,0 +1,61 @@
|
||||
/// Handlers to manipulate uploaded files
|
||||
module MyWebLog.Handlers.Upload
|
||||
|
||||
open System
|
||||
open Giraffe
|
||||
open Microsoft.AspNetCore.Http
|
||||
open MyWebLog
|
||||
|
||||
/// Helper functions for this module
|
||||
[<AutoOpen>]
|
||||
module private Helpers =
|
||||
|
||||
open Microsoft.AspNetCore.StaticFiles
|
||||
|
||||
/// A MIME type mapper instance to use when serving files from the database
|
||||
let mimeMap = FileExtensionContentTypeProvider ()
|
||||
|
||||
|
||||
/// Determine if the file has been modified since the date/time specified by the If-Modified-Since header
|
||||
let checkModified since (ctx : HttpContext) : HttpHandler option =
|
||||
match ctx.Request.Headers.IfModifiedSince with
|
||||
| it when it.Count < 1 -> None
|
||||
| it when since > DateTime.Parse it[0] -> None
|
||||
| _ -> Some (setStatusCode 304 >=> setBodyFromString "Not Modified")
|
||||
|
||||
|
||||
open Microsoft.AspNetCore.Http.Headers
|
||||
open Microsoft.Net.Http.Headers
|
||||
|
||||
/// Derive a MIME type based on the extension of the file
|
||||
let deriveMimeType path =
|
||||
match mimeMap.TryGetContentType path with true, typ -> typ | false, _ -> "application/octet-stream"
|
||||
|
||||
/// Send a file, caching the response for 30 days
|
||||
let sendFile updatedOn path data : HttpHandler = fun next ctx -> task {
|
||||
let headers = ResponseHeaders ctx.Response.Headers
|
||||
headers.LastModified <- Some (DateTimeOffset updatedOn) |> Option.toNullable
|
||||
headers.ContentType <- (deriveMimeType >> MediaTypeHeaderValue) path
|
||||
headers.CacheControl <-
|
||||
let hdr = CacheControlHeaderValue()
|
||||
hdr.MaxAge <- Some (TimeSpan.FromDays 30) |> Option.toNullable
|
||||
hdr
|
||||
return! setBody data next ctx
|
||||
}
|
||||
|
||||
// GET /upload/{web-log-slug}/{**path}
|
||||
let serve (urlParts : string seq) : HttpHandler = fun next ctx -> task {
|
||||
let parts = (urlParts |> Seq.skip 1 |> Seq.head).Split '/'
|
||||
let slug = Array.head parts
|
||||
let path = String.Join ('/', parts |> Array.skip 1)
|
||||
let webLog = ctx.WebLog
|
||||
if slug = webLog.slug then
|
||||
match! ctx.Data.Upload.findByPath path webLog.id with
|
||||
| Some upload ->
|
||||
match checkModified upload.updatedOn ctx with
|
||||
| Some threeOhFour -> return! threeOhFour next ctx
|
||||
| None -> return! sendFile upload.updatedOn path upload.data next ctx
|
||||
| None -> return! Error.notFound next ctx
|
||||
else
|
||||
return! Error.notFound next ctx
|
||||
}
|
||||
@@ -2,6 +2,7 @@ module MyWebLog.Maintenance
|
||||
|
||||
open System
|
||||
open System.IO
|
||||
open System.Text.RegularExpressions
|
||||
open Microsoft.Extensions.DependencyInjection
|
||||
open MyWebLog.Data
|
||||
|
||||
@@ -23,11 +24,13 @@ let private doCreateWebLog (args : string[]) (sp : IServiceProvider) = task {
|
||||
let webLogId = WebLogId.create ()
|
||||
let userId = WebLogUserId.create ()
|
||||
let homePageId = PageId.create ()
|
||||
let slug = ((Regex """\s+""").Replace ((Regex "[^A-z0-9 ]").Replace (args[2], ""), "-")).ToLowerInvariant ()
|
||||
|
||||
do! data.WebLog.add
|
||||
{ WebLog.empty with
|
||||
id = webLogId
|
||||
name = args[2]
|
||||
slug = slug
|
||||
urlBase = args[1]
|
||||
defaultPage = PageId.toString homePageId
|
||||
timeZone = timeZone
|
||||
@@ -162,13 +165,48 @@ module Backup =
|
||||
}
|
||||
|
||||
/// Create a theme asset from an encoded theme asset
|
||||
static member fromAsset (asset : EncodedAsset) : ThemeAsset =
|
||||
{ id = asset.id
|
||||
updatedOn = asset.updatedOn
|
||||
data = Convert.FromBase64String asset.data
|
||||
static member fromEncoded (encoded : EncodedAsset) : ThemeAsset =
|
||||
{ id = encoded.id
|
||||
updatedOn = encoded.updatedOn
|
||||
data = Convert.FromBase64String encoded.data
|
||||
}
|
||||
|
||||
/// An uploaded file, with the data base-64 encoded
|
||||
type EncodedUpload =
|
||||
{ /// The ID of the upload
|
||||
id : UploadId
|
||||
|
||||
/// The ID of the web log to which the upload belongs
|
||||
webLogId : WebLogId
|
||||
|
||||
/// The path at which this upload is served
|
||||
path : Permalink
|
||||
|
||||
/// The date/time this upload was last updated (file time)
|
||||
updatedOn : DateTime
|
||||
|
||||
/// The data for the upload, base-64 encoded
|
||||
data : string
|
||||
}
|
||||
|
||||
/// Create an encoded uploaded file from the original uploaded file
|
||||
static member fromUpload (upload : Upload) : EncodedUpload =
|
||||
{ id = upload.id
|
||||
webLogId = upload.webLogId
|
||||
path = upload.path
|
||||
updatedOn = upload.updatedOn
|
||||
data = Convert.ToBase64String upload.data
|
||||
}
|
||||
|
||||
/// Create an uploaded file from an encoded uploaded file
|
||||
static member fromEncoded (encoded : EncodedUpload) : Upload =
|
||||
{ id = encoded.id
|
||||
webLogId = encoded.webLogId
|
||||
path = encoded.path
|
||||
updatedOn = encoded.updatedOn
|
||||
data = Convert.FromBase64String encoded.data
|
||||
}
|
||||
|
||||
/// A unified archive for a web log
|
||||
type Archive =
|
||||
{ /// The web log to which this archive belongs
|
||||
@@ -194,6 +232,9 @@ module Backup =
|
||||
|
||||
/// The posts for this web log (containing only the most recent revision)
|
||||
posts : Post list
|
||||
|
||||
/// The uploaded files for this web log
|
||||
uploads : EncodedUpload list
|
||||
}
|
||||
|
||||
/// Create a JSON serializer (uses RethinkDB data implementation's JSON converters)
|
||||
@@ -212,6 +253,7 @@ module Backup =
|
||||
let tagMapCount = List.length archive.tagMappings
|
||||
let pageCount = List.length archive.pages
|
||||
let postCount = List.length archive.posts
|
||||
let uploadCount = List.length archive.uploads
|
||||
|
||||
// Create a pluralized output based on the count
|
||||
let plural count ifOne ifMany =
|
||||
@@ -225,6 +267,7 @@ module Backup =
|
||||
printfn $""" - {tagMapCount} tag mapping{plural tagMapCount "" "s"}"""
|
||||
printfn $""" - {pageCount} page{plural pageCount "" "s"}"""
|
||||
printfn $""" - {postCount} post{plural postCount "" "s"}"""
|
||||
printfn $""" - {uploadCount} uploaded file{plural uploadCount "" "s"}"""
|
||||
|
||||
/// Create a backup archive
|
||||
let private createBackup webLog (fileName : string) prettyOutput (data : IData) = task {
|
||||
@@ -248,6 +291,9 @@ module Backup =
|
||||
printfn "- Exporting posts..."
|
||||
let! posts = data.Post.findFullByWebLog webLog.id
|
||||
|
||||
printfn "- Exporting uploads..."
|
||||
let! uploads = data.Upload.findByWebLog webLog.id
|
||||
|
||||
printfn "- Writing archive..."
|
||||
let archive = {
|
||||
webLog = webLog
|
||||
@@ -256,8 +302,9 @@ module Backup =
|
||||
assets = assets |> List.map EncodedAsset.fromAsset
|
||||
categories = categories
|
||||
tagMappings = tagMaps
|
||||
pages = pages |> List.map (fun p -> { p with revisions = List.truncate 1 p.revisions })
|
||||
posts = posts |> List.map (fun p -> { p with revisions = List.truncate 1 p.revisions })
|
||||
pages = pages |> List.map (fun p -> { p with revisions = List.truncate 1 p.revisions })
|
||||
posts = posts |> List.map (fun p -> { p with revisions = List.truncate 1 p.revisions })
|
||||
uploads = uploads |> List.map EncodedUpload.fromUpload
|
||||
}
|
||||
|
||||
// Write the structure to the backup file
|
||||
@@ -284,6 +331,7 @@ module Backup =
|
||||
let newPageIds = archive.pages |> List.map (fun page -> page.id, PageId.create ()) |> dict
|
||||
let newPostIds = archive.posts |> List.map (fun post -> post.id, PostId.create ()) |> dict
|
||||
let newUserIds = archive.users |> List.map (fun user -> user.id, WebLogUserId.create ()) |> dict
|
||||
let newUpIds = archive.uploads |> List.map (fun up -> up.id, UploadId.create ()) |> dict
|
||||
return
|
||||
{ archive with
|
||||
webLog = { archive.webLog with id = newWebLogId; urlBase = Option.get newUrlBase }
|
||||
@@ -308,6 +356,8 @@ module Backup =
|
||||
authorId = newUserIds[post.authorId]
|
||||
categoryIds = post.categoryIds |> List.map (fun c -> newCatIds[c])
|
||||
})
|
||||
uploads = archive.uploads
|
||||
|> List.map (fun u -> { u with id = newUpIds[u.id]; webLogId = newWebLogId })
|
||||
}
|
||||
| None ->
|
||||
return
|
||||
@@ -320,7 +370,7 @@ module Backup =
|
||||
printfn ""
|
||||
printfn "- Importing theme..."
|
||||
do! data.Theme.save restore.theme
|
||||
let! _ = restore.assets |> List.map (EncodedAsset.fromAsset >> data.ThemeAsset.save) |> Task.WhenAll
|
||||
let! _ = restore.assets |> List.map (EncodedAsset.fromEncoded >> data.ThemeAsset.save) |> Task.WhenAll
|
||||
|
||||
// Restore web log data
|
||||
|
||||
@@ -342,6 +392,9 @@ module Backup =
|
||||
|
||||
// TODO: comments not yet implemented
|
||||
|
||||
printfn "- Restoring uploads..."
|
||||
do! data.Upload.restore (restore.uploads |> List.map EncodedUpload.fromEncoded)
|
||||
|
||||
displayStats "Restored for {{NAME}}:" restore.webLog restore
|
||||
}
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
<Compile Include="Handlers\Feed.fs" />
|
||||
<Compile Include="Handlers\Post.fs" />
|
||||
<Compile Include="Handlers\User.fs" />
|
||||
<Compile Include="Handlers\Upload.fs" />
|
||||
<Compile Include="Handlers\Routes.fs" />
|
||||
<Compile Include="DotLiquidBespoke.fs" />
|
||||
<Compile Include="Maintenance.fs" />
|
||||
@@ -41,7 +42,7 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<None Include=".\wwwroot\img\*.png" CopyToOutputDirectory="Always" />
|
||||
<None Include=".\wwwroot\upload\*" CopyToOutputDirectory="Always" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
0
src/MyWebLog/wwwroot/upload/.gitkeep
Normal file
0
src/MyWebLog/wwwroot/upload/.gitkeep
Normal file
Reference in New Issue
Block a user