WIP on SQLite doc library

This commit is contained in:
Daniel J. Summers 2023-12-19 09:23:34 -05:00
parent e04c8b58e9
commit d330c97d9f
3 changed files with 108 additions and 120 deletions

View File

@ -4,6 +4,10 @@
<ProjectReference Include="..\MyWebLog.Domain\MyWebLog.Domain.fsproj" /> <ProjectReference Include="..\MyWebLog.Domain\MyWebLog.Domain.fsproj" />
</ItemGroup> </ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\BitBadger.Sqlite.Documents\src\BitBadger.Sqlite.FSharp.Documents\BitBadger.Sqlite.FSharp.Documents.fsproj" />
</ItemGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="BitBadger.Npgsql.FSharp.Documents" Version="2.0.0" /> <PackageReference Include="BitBadger.Npgsql.FSharp.Documents" Version="2.0.0" />
<PackageReference Include="Microsoft.Data.Sqlite" Version="8.0.0" /> <PackageReference Include="Microsoft.Data.Sqlite" Version="8.0.0" />

View File

@ -1,5 +1,7 @@
namespace MyWebLog.Data namespace MyWebLog.Data
open System.Threading.Tasks
open BitBadger.Sqlite.FSharp.Documents
open Microsoft.Data.Sqlite open Microsoft.Data.Sqlite
open Microsoft.Extensions.Logging open Microsoft.Extensions.Logging
open MyWebLog open MyWebLog
@ -12,108 +14,98 @@ type SQLiteData(conn: SqliteConnection, log: ILogger<SQLiteData>, ser: JsonSeria
let ensureTables () = backgroundTask { let ensureTables () = backgroundTask {
use cmd = conn.CreateCommand() let! tables = Custom.list<string> "SELECT name FROM sqlite_master WHERE type = 'table'" None _.GetString(0)
let! tables = backgroundTask {
cmd.CommandText <- "SELECT name FROM sqlite_master WHERE type = 'table'"
let! rdr = cmd.ExecuteReaderAsync()
let mutable tableList = []
while! rdr.ReadAsync() do
tableList <- Map.getString "name" rdr :: tableList
do! rdr.CloseAsync()
return tableList
}
let needsTable table = let needsTable table =
not (List.contains table tables) not (List.contains table tables)
let jsonTable table = let jsonTable table =
$"CREATE TABLE {table} (data TEXT NOT NULL); $"{Definition.createTable table}; {Definition.createKey table}"
CREATE UNIQUE INDEX idx_{table}_key ON {table} ((data ->> 'Id'))"
seq { let tasks =
// Theme tables seq {
if needsTable Table.Theme then jsonTable Table.Theme // Theme tables
if needsTable Table.ThemeAsset then if needsTable Table.Theme then jsonTable Table.Theme
$"CREATE TABLE {Table.ThemeAsset} ( if needsTable Table.ThemeAsset then
theme_id TEXT NOT NULL, $"CREATE TABLE {Table.ThemeAsset} (
path TEXT NOT NULL, theme_id TEXT NOT NULL,
updated_on TEXT NOT NULL, path TEXT NOT NULL,
data BLOB NOT NULL, updated_on TEXT NOT NULL,
PRIMARY KEY (theme_id, path))" data BLOB NOT NULL,
PRIMARY KEY (theme_id, path))"
// Web log table
if needsTable Table.WebLog then jsonTable Table.WebLog // Web log table
if needsTable Table.WebLog then jsonTable Table.WebLog
// Category table
if needsTable Table.Category then // Category table
$"{jsonTable Table.Category}; if needsTable Table.Category then
CREATE INDEX idx_{Table.Category}_web_log ON {Table.Category} ((data ->> 'WebLogId'))" $"{jsonTable Table.Category};
CREATE INDEX idx_{Table.Category}_web_log ON {Table.Category} ((data ->> 'WebLogId'))"
// Web log user table
if needsTable Table.WebLogUser then // Web log user table
$"{jsonTable Table.WebLogUser}; if needsTable Table.WebLogUser then
CREATE INDEX idx_{Table.WebLogUser}_email $"{jsonTable Table.WebLogUser};
ON {Table.WebLogUser} ((data ->> 'WebLogId'), (data ->> 'Email'))" CREATE INDEX idx_{Table.WebLogUser}_email
ON {Table.WebLogUser} ((data ->> 'WebLogId'), (data ->> 'Email'))"
// Page tables
if needsTable Table.Page then // Page tables
$"{jsonTable Table.Page}; if needsTable Table.Page then
CREATE INDEX idx_{Table.Page}_author ON {Table.Page} ((data ->> 'AuthorId')); $"{jsonTable Table.Page};
CREATE INDEX idx_{Table.Page}_permalink CREATE INDEX idx_{Table.Page}_author ON {Table.Page} ((data ->> 'AuthorId'));
ON {Table.Page} ((data ->> 'WebLogId'), (data ->> 'Permalink'))" CREATE INDEX idx_{Table.Page}_permalink
if needsTable Table.PageRevision then ON {Table.Page} ((data ->> 'WebLogId'), (data ->> 'Permalink'))"
"CREATE TABLE page_revision ( if needsTable Table.PageRevision then
page_id TEXT NOT NULL, $"CREATE TABLE {Table.PageRevision} (
as_of TEXT NOT NULL, page_id TEXT NOT NULL,
revision_text TEXT NOT NULL, as_of TEXT NOT NULL,
PRIMARY KEY (page_id, as_of))" revision_text TEXT NOT NULL,
PRIMARY KEY (page_id, as_of))"
// Post tables
if needsTable Table.Post then // Post tables
$"{jsonTable Table.Post}; if needsTable Table.Post then
CREATE INDEX idx_{Table.Post}_author ON {Table.Post} ((data ->> 'AuthorId')); $"{jsonTable Table.Post};
CREATE INDEX idx_{Table.Post}_status CREATE INDEX idx_{Table.Post}_author ON {Table.Post} ((data ->> 'AuthorId'));
ON {Table.Post} ((data ->> 'WebLogId'), (data ->> 'Status'), (data ->> 'UpdatedOn')); CREATE INDEX idx_{Table.Post}_status
CREATE INDEX idx_{Table.Post}_permalink ON {Table.Post} ((data ->> 'WebLogId'), (data ->> 'Status'), (data ->> 'UpdatedOn'));
ON {Table.Post} ((data ->> 'WebLogId'), (data ->> 'Permalink'))" CREATE INDEX idx_{Table.Post}_permalink
// TODO: index categories by post? ON {Table.Post} ((data ->> 'WebLogId'), (data ->> 'Permalink'))"
if needsTable Table.PostRevision then // TODO: index categories by post?
$"CREATE TABLE {Table.PostRevision} ( if needsTable Table.PostRevision then
post_id TEXT NOT NULL, $"CREATE TABLE {Table.PostRevision} (
as_of TEXT NOT NULL, post_id TEXT NOT NULL,
revision_text TEXT NOT NULL, as_of TEXT NOT NULL,
PRIMARY KEY (post_id, as_of))" revision_text TEXT NOT NULL,
if needsTable Table.PostComment then PRIMARY KEY (post_id, as_of))"
$"{jsonTable Table.PostComment}; if needsTable Table.PostComment then
CREATE INDEX idx_{Table.PostComment}_post ON {Table.PostComment} ((data ->> 'PostId'))" $"{jsonTable Table.PostComment};
CREATE INDEX idx_{Table.PostComment}_post ON {Table.PostComment} ((data ->> 'PostId'))"
// Tag map table
if needsTable Table.TagMap then // Tag map table
$"{jsonTable Table.TagMap}; if needsTable Table.TagMap then
CREATE INDEX idx_{Table.TagMap}_tag ON {Table.TagMap} ((data ->> 'WebLogId'), (data ->> 'UrlValue'))" $"{jsonTable Table.TagMap};
CREATE INDEX idx_{Table.TagMap}_tag ON {Table.TagMap} ((data ->> 'WebLogId'), (data ->> 'UrlValue'))"
// Uploaded file table
if needsTable Table.Upload then // Uploaded file table
$"CREATE TABLE {Table.Upload} ( if needsTable Table.Upload then
id TEXT PRIMARY KEY, $"CREATE TABLE {Table.Upload} (
web_log_id TEXT NOT NULL, id TEXT PRIMARY KEY,
path TEXT NOT NULL, web_log_id TEXT NOT NULL,
updated_on TEXT NOT NULL, path TEXT NOT NULL,
data BLOB NOT NULL); updated_on TEXT NOT NULL,
CREATE INDEX idx_{Table.Upload}_path ON {Table.Upload} (web_log_id, path)" data BLOB NOT NULL);
CREATE INDEX idx_{Table.Upload}_path ON {Table.Upload} (web_log_id, path)"
// Database version table
if needsTable Table.DbVersion then // Database version table
$"CREATE TABLE {Table.DbVersion} (id TEXT PRIMARY KEY); if needsTable Table.DbVersion then
INSERT INTO {Table.DbVersion} VALUES ('v2.1')" $"CREATE TABLE {Table.DbVersion} (id TEXT PRIMARY KEY);
} INSERT INTO {Table.DbVersion} VALUES ('v2.1')"
|> Seq.map (fun sql -> }
log.LogInformation $"Creating {(sql.Split ' ')[2]} table..." |> Seq.map (fun sql ->
cmd.CommandText <- sql log.LogInformation $"""Creating {(sql.Replace("IF NOT EXISTS ", "").Split ' ')[2]} table..."""
write cmd |> Async.AwaitTask |> Async.RunSynchronously) Custom.nonQuery sql None)
|> List.ofSeq
|> ignore let! _ = Task.WhenAll tasks
()
} }
/// Set the database version to the specified version /// Set the database version to the specified version
@ -459,15 +451,6 @@ type SQLiteData(conn: SqliteConnection, log: ILogger<SQLiteData>, ser: JsonSeria
/// The connection for this instance /// The connection for this instance
member _.Conn = conn member _.Conn = conn
/// Make a SQLite connection ready to execute commends
static member setUpConnection (conn: SqliteConnection) = backgroundTask {
do! conn.OpenAsync()
use cmd = conn.CreateCommand()
cmd.CommandText <- "PRAGMA foreign_keys = TRUE"
let! _ = cmd.ExecuteNonQueryAsync()
()
}
interface IData with interface IData with
member _.Category = SQLiteCategoryData (conn, ser, log) member _.Category = SQLiteCategoryData (conn, ser, log)
@ -484,10 +467,6 @@ type SQLiteData(conn: SqliteConnection, log: ILogger<SQLiteData>, ser: JsonSeria
member _.StartUp () = backgroundTask { member _.StartUp () = backgroundTask {
do! ensureTables () do! ensureTables ()
let! version = Custom.single<string> $"SELECT id FROM {Table.DbVersion}" None _.GetString(0)
use cmd = conn.CreateCommand() do! migrate version
cmd.CommandText <- $"SELECT id FROM {Table.DbVersion}"
use! rdr = cmd.ExecuteReaderAsync()
let! isFound = rdr.ReadAsync()
do! migrate (if isFound then Some (Map.getString "id" rdr) else None)
} }

View File

@ -50,25 +50,30 @@ type RedirectRuleMiddleware(next: RequestDelegate, log: ILogger<RedirectRuleMidd
open System open System
open BitBadger.Npgsql.FSharp.Documents
open Microsoft.Extensions.DependencyInjection open Microsoft.Extensions.DependencyInjection
open MyWebLog.Data open MyWebLog.Data
open Newtonsoft.Json open Newtonsoft.Json
open Npgsql open Npgsql
// The PostgreSQL document library
module Postgres = BitBadger.Npgsql.FSharp.Documents
// The SQLite document library
module Sqlite = BitBadger.Sqlite.FSharp.Documents
/// Logic to obtain a data connection and implementation based on configured values /// Logic to obtain a data connection and implementation based on configured values
module DataImplementation = module DataImplementation =
open MyWebLog.Converters open MyWebLog.Converters
open RethinkDb.Driver.FSharp open RethinkDb.Driver.FSharp
open RethinkDb.Driver.Net open RethinkDb.Driver.Net
/// Create an NpgsqlDataSource from the connection string, configuring appropriately /// Create an NpgsqlDataSource from the connection string, configuring appropriately
let createNpgsqlDataSource (cfg: IConfiguration) = let createNpgsqlDataSource (cfg: IConfiguration) =
let builder = NpgsqlDataSourceBuilder(cfg.GetConnectionString "PostgreSQL") let builder = NpgsqlDataSourceBuilder(cfg.GetConnectionString "PostgreSQL")
let _ = builder.UseNodaTime() let _ = builder.UseNodaTime()
// let _ = builder.UseLoggerFactory(LoggerFactory.Create(fun it -> it.AddConsole () |> ignore)) // let _ = builder.UseLoggerFactory(LoggerFactory.Create(fun it -> it.AddConsole () |> ignore))
(builder.Build >> Configuration.useDataSource) () (builder.Build >> Postgres.Configuration.useDataSource) ()
/// Get the configured data implementation /// Get the configured data implementation
let get (sp: IServiceProvider) : IData = let get (sp: IServiceProvider) : IData =
@ -77,10 +82,10 @@ module DataImplementation =
let connStr name = config.GetConnectionString name let connStr name = config.GetConnectionString name
let hasConnStr name = (connStr >> isNull >> not) name let hasConnStr name = (connStr >> isNull >> not) name
let createSQLite connStr : IData = let createSQLite connStr : IData =
Sqlite.Configuration.useConnectionString connStr
let log = sp.GetRequiredService<ILogger<SQLiteData>>() let log = sp.GetRequiredService<ILogger<SQLiteData>>()
let conn = new SqliteConnection(connStr) let conn = new SqliteConnection(connStr)
log.LogInformation $"Using SQLite database {conn.DataSource}" log.LogInformation $"Using SQLite database {conn.DataSource}"
await (SQLiteData.setUpConnection conn)
SQLiteData(conn, log, Json.configure (JsonSerializer.CreateDefault())) SQLiteData(conn, log, Json.configure (JsonSerializer.CreateDefault()))
if hasConnStr "SQLite" then if hasConnStr "SQLite" then
@ -93,7 +98,7 @@ module DataImplementation =
RethinkDbData(conn, rethinkCfg, log) RethinkDbData(conn, rethinkCfg, log)
elif hasConnStr "PostgreSQL" then elif hasConnStr "PostgreSQL" then
createNpgsqlDataSource config createNpgsqlDataSource config
use conn = Configuration.dataSource().CreateConnection() use conn = Postgres.Configuration.dataSource().CreateConnection()
let log = sp.GetRequiredService<ILogger<PostgresData>>() let log = sp.GetRequiredService<ILogger<PostgresData>>()
log.LogInformation $"Using PostgreSQL database {conn.Database}" log.LogInformation $"Using PostgreSQL database {conn.Database}"
PostgresData(log, Json.configure (JsonSerializer.CreateDefault())) PostgresData(log, Json.configure (JsonSerializer.CreateDefault()))
@ -170,13 +175,13 @@ let main args =
opts.TableName <- "Session" opts.TableName <- "Session"
opts.Connection <- rethink.Conn) opts.Connection <- rethink.Conn)
() ()
| :? SQLiteData as sql -> | :? SQLiteData ->
// ADO.NET connections are designed to work as per-request instantiation // ADO.NET connections are designed to work as per-request instantiation
let cfg = sp.GetRequiredService<IConfiguration>() let cfg = sp.GetRequiredService<IConfiguration>()
let _ = let _ =
builder.Services.AddScoped<SqliteConnection>(fun sp -> builder.Services.AddScoped<SqliteConnection>(fun sp ->
let conn = new SqliteConnection(sql.Conn.ConnectionString) let conn = Sqlite.Configuration.dbConn ()
SQLiteData.setUpConnection conn |> Async.AwaitTask |> Async.RunSynchronously conn.OpenAsync() |> Async.AwaitTask |> Async.RunSynchronously
conn) conn)
let _ = builder.Services.AddScoped<IData, SQLiteData>() let _ = builder.Services.AddScoped<IData, SQLiteData>()
// Use SQLite for caching as well // Use SQLite for caching as well
@ -185,7 +190,7 @@ let main args =
() ()
| :? PostgresData as postgres -> | :? PostgresData as postgres ->
// ADO.NET Data Sources are designed to work as singletons // ADO.NET Data Sources are designed to work as singletons
let _ = builder.Services.AddSingleton<NpgsqlDataSource>(Configuration.dataSource ()) let _ = builder.Services.AddSingleton<NpgsqlDataSource>(Postgres.Configuration.dataSource ())
let _ = builder.Services.AddSingleton<IData> postgres let _ = builder.Services.AddSingleton<IData> postgres
let _ = let _ =
builder.Services.AddSingleton<IDistributedCache>(fun _ -> builder.Services.AddSingleton<IDistributedCache>(fun _ ->