WIP on SQLite doc library
This commit is contained in:
parent
e04c8b58e9
commit
d330c97d9f
@ -4,6 +4,10 @@
|
|||||||
<ProjectReference Include="..\MyWebLog.Domain\MyWebLog.Domain.fsproj" />
|
<ProjectReference Include="..\MyWebLog.Domain\MyWebLog.Domain.fsproj" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|
||||||
|
<ItemGroup>
|
||||||
|
<ProjectReference Include="..\..\..\BitBadger.Sqlite.Documents\src\BitBadger.Sqlite.FSharp.Documents\BitBadger.Sqlite.FSharp.Documents.fsproj" />
|
||||||
|
</ItemGroup>
|
||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<PackageReference Include="BitBadger.Npgsql.FSharp.Documents" Version="2.0.0" />
|
<PackageReference Include="BitBadger.Npgsql.FSharp.Documents" Version="2.0.0" />
|
||||||
<PackageReference Include="Microsoft.Data.Sqlite" Version="8.0.0" />
|
<PackageReference Include="Microsoft.Data.Sqlite" Version="8.0.0" />
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
namespace MyWebLog.Data
|
namespace MyWebLog.Data
|
||||||
|
|
||||||
|
open System.Threading.Tasks
|
||||||
|
open BitBadger.Sqlite.FSharp.Documents
|
||||||
open Microsoft.Data.Sqlite
|
open Microsoft.Data.Sqlite
|
||||||
open Microsoft.Extensions.Logging
|
open Microsoft.Extensions.Logging
|
||||||
open MyWebLog
|
open MyWebLog
|
||||||
@ -12,108 +14,98 @@ type SQLiteData(conn: SqliteConnection, log: ILogger<SQLiteData>, ser: JsonSeria
|
|||||||
|
|
||||||
let ensureTables () = backgroundTask {
|
let ensureTables () = backgroundTask {
|
||||||
|
|
||||||
use cmd = conn.CreateCommand()
|
let! tables = Custom.list<string> "SELECT name FROM sqlite_master WHERE type = 'table'" None _.GetString(0)
|
||||||
|
|
||||||
let! tables = backgroundTask {
|
|
||||||
cmd.CommandText <- "SELECT name FROM sqlite_master WHERE type = 'table'"
|
|
||||||
let! rdr = cmd.ExecuteReaderAsync()
|
|
||||||
let mutable tableList = []
|
|
||||||
while! rdr.ReadAsync() do
|
|
||||||
tableList <- Map.getString "name" rdr :: tableList
|
|
||||||
do! rdr.CloseAsync()
|
|
||||||
return tableList
|
|
||||||
}
|
|
||||||
|
|
||||||
let needsTable table =
|
let needsTable table =
|
||||||
not (List.contains table tables)
|
not (List.contains table tables)
|
||||||
|
|
||||||
let jsonTable table =
|
let jsonTable table =
|
||||||
$"CREATE TABLE {table} (data TEXT NOT NULL);
|
$"{Definition.createTable table}; {Definition.createKey table}"
|
||||||
CREATE UNIQUE INDEX idx_{table}_key ON {table} ((data ->> 'Id'))"
|
|
||||||
|
|
||||||
seq {
|
let tasks =
|
||||||
// Theme tables
|
seq {
|
||||||
if needsTable Table.Theme then jsonTable Table.Theme
|
// Theme tables
|
||||||
if needsTable Table.ThemeAsset then
|
if needsTable Table.Theme then jsonTable Table.Theme
|
||||||
$"CREATE TABLE {Table.ThemeAsset} (
|
if needsTable Table.ThemeAsset then
|
||||||
theme_id TEXT NOT NULL,
|
$"CREATE TABLE {Table.ThemeAsset} (
|
||||||
path TEXT NOT NULL,
|
theme_id TEXT NOT NULL,
|
||||||
updated_on TEXT NOT NULL,
|
path TEXT NOT NULL,
|
||||||
data BLOB NOT NULL,
|
updated_on TEXT NOT NULL,
|
||||||
PRIMARY KEY (theme_id, path))"
|
data BLOB NOT NULL,
|
||||||
|
PRIMARY KEY (theme_id, path))"
|
||||||
|
|
||||||
// Web log table
|
// Web log table
|
||||||
if needsTable Table.WebLog then jsonTable Table.WebLog
|
if needsTable Table.WebLog then jsonTable Table.WebLog
|
||||||
|
|
||||||
// Category table
|
// Category table
|
||||||
if needsTable Table.Category then
|
if needsTable Table.Category then
|
||||||
$"{jsonTable Table.Category};
|
$"{jsonTable Table.Category};
|
||||||
CREATE INDEX idx_{Table.Category}_web_log ON {Table.Category} ((data ->> 'WebLogId'))"
|
CREATE INDEX idx_{Table.Category}_web_log ON {Table.Category} ((data ->> 'WebLogId'))"
|
||||||
|
|
||||||
// Web log user table
|
// Web log user table
|
||||||
if needsTable Table.WebLogUser then
|
if needsTable Table.WebLogUser then
|
||||||
$"{jsonTable Table.WebLogUser};
|
$"{jsonTable Table.WebLogUser};
|
||||||
CREATE INDEX idx_{Table.WebLogUser}_email
|
CREATE INDEX idx_{Table.WebLogUser}_email
|
||||||
ON {Table.WebLogUser} ((data ->> 'WebLogId'), (data ->> 'Email'))"
|
ON {Table.WebLogUser} ((data ->> 'WebLogId'), (data ->> 'Email'))"
|
||||||
|
|
||||||
// Page tables
|
// Page tables
|
||||||
if needsTable Table.Page then
|
if needsTable Table.Page then
|
||||||
$"{jsonTable Table.Page};
|
$"{jsonTable Table.Page};
|
||||||
CREATE INDEX idx_{Table.Page}_author ON {Table.Page} ((data ->> 'AuthorId'));
|
CREATE INDEX idx_{Table.Page}_author ON {Table.Page} ((data ->> 'AuthorId'));
|
||||||
CREATE INDEX idx_{Table.Page}_permalink
|
CREATE INDEX idx_{Table.Page}_permalink
|
||||||
ON {Table.Page} ((data ->> 'WebLogId'), (data ->> 'Permalink'))"
|
ON {Table.Page} ((data ->> 'WebLogId'), (data ->> 'Permalink'))"
|
||||||
if needsTable Table.PageRevision then
|
if needsTable Table.PageRevision then
|
||||||
"CREATE TABLE page_revision (
|
$"CREATE TABLE {Table.PageRevision} (
|
||||||
page_id TEXT NOT NULL,
|
page_id TEXT NOT NULL,
|
||||||
as_of TEXT NOT NULL,
|
as_of TEXT NOT NULL,
|
||||||
revision_text TEXT NOT NULL,
|
revision_text TEXT NOT NULL,
|
||||||
PRIMARY KEY (page_id, as_of))"
|
PRIMARY KEY (page_id, as_of))"
|
||||||
|
|
||||||
// Post tables
|
// Post tables
|
||||||
if needsTable Table.Post then
|
if needsTable Table.Post then
|
||||||
$"{jsonTable Table.Post};
|
$"{jsonTable Table.Post};
|
||||||
CREATE INDEX idx_{Table.Post}_author ON {Table.Post} ((data ->> 'AuthorId'));
|
CREATE INDEX idx_{Table.Post}_author ON {Table.Post} ((data ->> 'AuthorId'));
|
||||||
CREATE INDEX idx_{Table.Post}_status
|
CREATE INDEX idx_{Table.Post}_status
|
||||||
ON {Table.Post} ((data ->> 'WebLogId'), (data ->> 'Status'), (data ->> 'UpdatedOn'));
|
ON {Table.Post} ((data ->> 'WebLogId'), (data ->> 'Status'), (data ->> 'UpdatedOn'));
|
||||||
CREATE INDEX idx_{Table.Post}_permalink
|
CREATE INDEX idx_{Table.Post}_permalink
|
||||||
ON {Table.Post} ((data ->> 'WebLogId'), (data ->> 'Permalink'))"
|
ON {Table.Post} ((data ->> 'WebLogId'), (data ->> 'Permalink'))"
|
||||||
// TODO: index categories by post?
|
// TODO: index categories by post?
|
||||||
if needsTable Table.PostRevision then
|
if needsTable Table.PostRevision then
|
||||||
$"CREATE TABLE {Table.PostRevision} (
|
$"CREATE TABLE {Table.PostRevision} (
|
||||||
post_id TEXT NOT NULL,
|
post_id TEXT NOT NULL,
|
||||||
as_of TEXT NOT NULL,
|
as_of TEXT NOT NULL,
|
||||||
revision_text TEXT NOT NULL,
|
revision_text TEXT NOT NULL,
|
||||||
PRIMARY KEY (post_id, as_of))"
|
PRIMARY KEY (post_id, as_of))"
|
||||||
if needsTable Table.PostComment then
|
if needsTable Table.PostComment then
|
||||||
$"{jsonTable Table.PostComment};
|
$"{jsonTable Table.PostComment};
|
||||||
CREATE INDEX idx_{Table.PostComment}_post ON {Table.PostComment} ((data ->> 'PostId'))"
|
CREATE INDEX idx_{Table.PostComment}_post ON {Table.PostComment} ((data ->> 'PostId'))"
|
||||||
|
|
||||||
// Tag map table
|
// Tag map table
|
||||||
if needsTable Table.TagMap then
|
if needsTable Table.TagMap then
|
||||||
$"{jsonTable Table.TagMap};
|
$"{jsonTable Table.TagMap};
|
||||||
CREATE INDEX idx_{Table.TagMap}_tag ON {Table.TagMap} ((data ->> 'WebLogId'), (data ->> 'UrlValue'))"
|
CREATE INDEX idx_{Table.TagMap}_tag ON {Table.TagMap} ((data ->> 'WebLogId'), (data ->> 'UrlValue'))"
|
||||||
|
|
||||||
// Uploaded file table
|
// Uploaded file table
|
||||||
if needsTable Table.Upload then
|
if needsTable Table.Upload then
|
||||||
$"CREATE TABLE {Table.Upload} (
|
$"CREATE TABLE {Table.Upload} (
|
||||||
id TEXT PRIMARY KEY,
|
id TEXT PRIMARY KEY,
|
||||||
web_log_id TEXT NOT NULL,
|
web_log_id TEXT NOT NULL,
|
||||||
path TEXT NOT NULL,
|
path TEXT NOT NULL,
|
||||||
updated_on TEXT NOT NULL,
|
updated_on TEXT NOT NULL,
|
||||||
data BLOB NOT NULL);
|
data BLOB NOT NULL);
|
||||||
CREATE INDEX idx_{Table.Upload}_path ON {Table.Upload} (web_log_id, path)"
|
CREATE INDEX idx_{Table.Upload}_path ON {Table.Upload} (web_log_id, path)"
|
||||||
|
|
||||||
// Database version table
|
// Database version table
|
||||||
if needsTable Table.DbVersion then
|
if needsTable Table.DbVersion then
|
||||||
$"CREATE TABLE {Table.DbVersion} (id TEXT PRIMARY KEY);
|
$"CREATE TABLE {Table.DbVersion} (id TEXT PRIMARY KEY);
|
||||||
INSERT INTO {Table.DbVersion} VALUES ('v2.1')"
|
INSERT INTO {Table.DbVersion} VALUES ('v2.1')"
|
||||||
}
|
}
|
||||||
|> Seq.map (fun sql ->
|
|> Seq.map (fun sql ->
|
||||||
log.LogInformation $"Creating {(sql.Split ' ')[2]} table..."
|
log.LogInformation $"""Creating {(sql.Replace("IF NOT EXISTS ", "").Split ' ')[2]} table..."""
|
||||||
cmd.CommandText <- sql
|
Custom.nonQuery sql None)
|
||||||
write cmd |> Async.AwaitTask |> Async.RunSynchronously)
|
|
||||||
|> List.ofSeq
|
let! _ = Task.WhenAll tasks
|
||||||
|> ignore
|
()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the database version to the specified version
|
/// Set the database version to the specified version
|
||||||
@ -459,15 +451,6 @@ type SQLiteData(conn: SqliteConnection, log: ILogger<SQLiteData>, ser: JsonSeria
|
|||||||
/// The connection for this instance
|
/// The connection for this instance
|
||||||
member _.Conn = conn
|
member _.Conn = conn
|
||||||
|
|
||||||
/// Make a SQLite connection ready to execute commends
|
|
||||||
static member setUpConnection (conn: SqliteConnection) = backgroundTask {
|
|
||||||
do! conn.OpenAsync()
|
|
||||||
use cmd = conn.CreateCommand()
|
|
||||||
cmd.CommandText <- "PRAGMA foreign_keys = TRUE"
|
|
||||||
let! _ = cmd.ExecuteNonQueryAsync()
|
|
||||||
()
|
|
||||||
}
|
|
||||||
|
|
||||||
interface IData with
|
interface IData with
|
||||||
|
|
||||||
member _.Category = SQLiteCategoryData (conn, ser, log)
|
member _.Category = SQLiteCategoryData (conn, ser, log)
|
||||||
@ -484,10 +467,6 @@ type SQLiteData(conn: SqliteConnection, log: ILogger<SQLiteData>, ser: JsonSeria
|
|||||||
|
|
||||||
member _.StartUp () = backgroundTask {
|
member _.StartUp () = backgroundTask {
|
||||||
do! ensureTables ()
|
do! ensureTables ()
|
||||||
|
let! version = Custom.single<string> $"SELECT id FROM {Table.DbVersion}" None _.GetString(0)
|
||||||
use cmd = conn.CreateCommand()
|
do! migrate version
|
||||||
cmd.CommandText <- $"SELECT id FROM {Table.DbVersion}"
|
|
||||||
use! rdr = cmd.ExecuteReaderAsync()
|
|
||||||
let! isFound = rdr.ReadAsync()
|
|
||||||
do! migrate (if isFound then Some (Map.getString "id" rdr) else None)
|
|
||||||
}
|
}
|
||||||
|
@ -50,12 +50,17 @@ type RedirectRuleMiddleware(next: RequestDelegate, log: ILogger<RedirectRuleMidd
|
|||||||
|
|
||||||
|
|
||||||
open System
|
open System
|
||||||
open BitBadger.Npgsql.FSharp.Documents
|
|
||||||
open Microsoft.Extensions.DependencyInjection
|
open Microsoft.Extensions.DependencyInjection
|
||||||
open MyWebLog.Data
|
open MyWebLog.Data
|
||||||
open Newtonsoft.Json
|
open Newtonsoft.Json
|
||||||
open Npgsql
|
open Npgsql
|
||||||
|
|
||||||
|
// The PostgreSQL document library
|
||||||
|
module Postgres = BitBadger.Npgsql.FSharp.Documents
|
||||||
|
|
||||||
|
// The SQLite document library
|
||||||
|
module Sqlite = BitBadger.Sqlite.FSharp.Documents
|
||||||
|
|
||||||
/// Logic to obtain a data connection and implementation based on configured values
|
/// Logic to obtain a data connection and implementation based on configured values
|
||||||
module DataImplementation =
|
module DataImplementation =
|
||||||
|
|
||||||
@ -68,7 +73,7 @@ module DataImplementation =
|
|||||||
let builder = NpgsqlDataSourceBuilder(cfg.GetConnectionString "PostgreSQL")
|
let builder = NpgsqlDataSourceBuilder(cfg.GetConnectionString "PostgreSQL")
|
||||||
let _ = builder.UseNodaTime()
|
let _ = builder.UseNodaTime()
|
||||||
// let _ = builder.UseLoggerFactory(LoggerFactory.Create(fun it -> it.AddConsole () |> ignore))
|
// let _ = builder.UseLoggerFactory(LoggerFactory.Create(fun it -> it.AddConsole () |> ignore))
|
||||||
(builder.Build >> Configuration.useDataSource) ()
|
(builder.Build >> Postgres.Configuration.useDataSource) ()
|
||||||
|
|
||||||
/// Get the configured data implementation
|
/// Get the configured data implementation
|
||||||
let get (sp: IServiceProvider) : IData =
|
let get (sp: IServiceProvider) : IData =
|
||||||
@ -77,10 +82,10 @@ module DataImplementation =
|
|||||||
let connStr name = config.GetConnectionString name
|
let connStr name = config.GetConnectionString name
|
||||||
let hasConnStr name = (connStr >> isNull >> not) name
|
let hasConnStr name = (connStr >> isNull >> not) name
|
||||||
let createSQLite connStr : IData =
|
let createSQLite connStr : IData =
|
||||||
|
Sqlite.Configuration.useConnectionString connStr
|
||||||
let log = sp.GetRequiredService<ILogger<SQLiteData>>()
|
let log = sp.GetRequiredService<ILogger<SQLiteData>>()
|
||||||
let conn = new SqliteConnection(connStr)
|
let conn = new SqliteConnection(connStr)
|
||||||
log.LogInformation $"Using SQLite database {conn.DataSource}"
|
log.LogInformation $"Using SQLite database {conn.DataSource}"
|
||||||
await (SQLiteData.setUpConnection conn)
|
|
||||||
SQLiteData(conn, log, Json.configure (JsonSerializer.CreateDefault()))
|
SQLiteData(conn, log, Json.configure (JsonSerializer.CreateDefault()))
|
||||||
|
|
||||||
if hasConnStr "SQLite" then
|
if hasConnStr "SQLite" then
|
||||||
@ -93,7 +98,7 @@ module DataImplementation =
|
|||||||
RethinkDbData(conn, rethinkCfg, log)
|
RethinkDbData(conn, rethinkCfg, log)
|
||||||
elif hasConnStr "PostgreSQL" then
|
elif hasConnStr "PostgreSQL" then
|
||||||
createNpgsqlDataSource config
|
createNpgsqlDataSource config
|
||||||
use conn = Configuration.dataSource().CreateConnection()
|
use conn = Postgres.Configuration.dataSource().CreateConnection()
|
||||||
let log = sp.GetRequiredService<ILogger<PostgresData>>()
|
let log = sp.GetRequiredService<ILogger<PostgresData>>()
|
||||||
log.LogInformation $"Using PostgreSQL database {conn.Database}"
|
log.LogInformation $"Using PostgreSQL database {conn.Database}"
|
||||||
PostgresData(log, Json.configure (JsonSerializer.CreateDefault()))
|
PostgresData(log, Json.configure (JsonSerializer.CreateDefault()))
|
||||||
@ -170,13 +175,13 @@ let main args =
|
|||||||
opts.TableName <- "Session"
|
opts.TableName <- "Session"
|
||||||
opts.Connection <- rethink.Conn)
|
opts.Connection <- rethink.Conn)
|
||||||
()
|
()
|
||||||
| :? SQLiteData as sql ->
|
| :? SQLiteData ->
|
||||||
// ADO.NET connections are designed to work as per-request instantiation
|
// ADO.NET connections are designed to work as per-request instantiation
|
||||||
let cfg = sp.GetRequiredService<IConfiguration>()
|
let cfg = sp.GetRequiredService<IConfiguration>()
|
||||||
let _ =
|
let _ =
|
||||||
builder.Services.AddScoped<SqliteConnection>(fun sp ->
|
builder.Services.AddScoped<SqliteConnection>(fun sp ->
|
||||||
let conn = new SqliteConnection(sql.Conn.ConnectionString)
|
let conn = Sqlite.Configuration.dbConn ()
|
||||||
SQLiteData.setUpConnection conn |> Async.AwaitTask |> Async.RunSynchronously
|
conn.OpenAsync() |> Async.AwaitTask |> Async.RunSynchronously
|
||||||
conn)
|
conn)
|
||||||
let _ = builder.Services.AddScoped<IData, SQLiteData>()
|
let _ = builder.Services.AddScoped<IData, SQLiteData>()
|
||||||
// Use SQLite for caching as well
|
// Use SQLite for caching as well
|
||||||
@ -185,7 +190,7 @@ let main args =
|
|||||||
()
|
()
|
||||||
| :? PostgresData as postgres ->
|
| :? PostgresData as postgres ->
|
||||||
// ADO.NET Data Sources are designed to work as singletons
|
// ADO.NET Data Sources are designed to work as singletons
|
||||||
let _ = builder.Services.AddSingleton<NpgsqlDataSource>(Configuration.dataSource ())
|
let _ = builder.Services.AddSingleton<NpgsqlDataSource>(Postgres.Configuration.dataSource ())
|
||||||
let _ = builder.Services.AddSingleton<IData> postgres
|
let _ = builder.Services.AddSingleton<IData> postgres
|
||||||
let _ =
|
let _ =
|
||||||
builder.Services.AddSingleton<IDistributedCache>(fun _ ->
|
builder.Services.AddSingleton<IDistributedCache>(fun _ ->
|
||||||
|
Loading…
Reference in New Issue
Block a user