Ported the project from sqlite to postgres

This commit is contained in:
Imbus 2023-11-14 08:40:45 +01:00
parent d397b5c1ed
commit 29c1fc8f82
23 changed files with 566 additions and 409 deletions

View file

@ -25,6 +25,9 @@ RUN cargo build --target x86_64-unknown-linux-musl --release
RUN rm src/*.rs
ADD server /build-container
# Make sure sqlx reads from .sqlx directory
ENV SQLX_OFFLINE true
RUN cargo build --target x86_64-unknown-linux-musl --release
# Final stage, copy the server binary and the frontend build

View file

@ -22,6 +22,9 @@ RUN cargo build --target x86_64-unknown-linux-musl
RUN rm src/*.rs
ADD server /build-container
# Make sure sqlx reads from .sqlx directory
ENV SQLX_OFFLINE true
# Note that '--release' is missing here, so we build in debug mode
RUN cargo build --target x86_64-unknown-linux-musl

View file

@ -1,76 +1,60 @@
runtime := "podman"
# Builds a debug container and runs it
dev: start-debug
@echo "Cd into client and run 'npm run dev' to start the client in dev mode."
[private]
npm-install directory:
cd {{directory}} && npm install
# Builds the client with npm (result in client/dist)
[private]
npm-build directory: (npm-install directory)
cd {{directory}} && npm run build
@echo "Built client at {{directory}}/dist"
# Builds a debug container
[private]
build-container-server-debug:
{{runtime}} build -t fb-server-debug -f container/ContainerfileDebug .
podman build -t fb-server-debug -f container/ContainerfileDebug .
# Builds a debug container and runs it
[private]
start-debug: build-container-server-debug remove-podman-containers
{{runtime}} run -d -e DATABASE_URL=sqlite:debug.db -p 8080:8080 --name frostbyte-debug fb-server-debug
start-debug: start-postgres-dev clean-podman init-sqlx build-container-server-debug
podman network create fb_network --ignore
podman run -d --network fb_network -e DATABASE_URL=postgres://postgres:password@postgres:5432/frostbyte -p 8080:8080 --name frostbyte-debug fb-server-debug
@echo "Debug server started."
# Builds a release container
[private]
build-container-release:
{{runtime}} build -t fb-server -f container/Containerfile .
podman build -t fb-server -f container/Containerfile .
# Builds a release container and runs it
start-release: build-container-release remove-podman-containers
{{runtime}} network create fb_network --ignore
{{runtime}} run -d --network fb_network -e DATABASE_URL=sqlite:release.db -p 8080:8080 --name frostbyte fb-server
start-release: start-postgres-dev clean-podman init-sqlx build-container-release
podman network create fb_network --ignore
podman run -d --network fb_network -e DATABASE_URL=postgres://postgres:password@postgres:5432/frostbyte -p 8080:8080 --name frostbyte fb-server
# Initializes the database, runs migrations and then prepares sqlx
init-sqlx:
echo "DATABASE_URL=sqlite:debug.db" > server/.env
cd server && sqlx database create
cd server && sqlx migrate run
echo "DATABASE_URL=postgres://postgres:password@localhost:5432/frostbyte" > server/.env
cd server && sqlx database create --connect-timeout 40 # Postgres takes a while to start up
cd server && sqlx migrate run --source migrations_pg
cd server && cargo sqlx prepare
# Removes and stops any containers related to the project
# Starts a postgres container for development
[private]
remove-podman-containers:
{{runtime}} network rm -f fb_network
{{runtime}} container rm -f frostbyte
{{runtime}} container rm -f frostbyte-debug
start-postgres-dev:
podman rm -f postgres
podman run --network fb_network --name postgres -e POSTGRES_PASSWORD=password -d -p 5432:5432 docker.io/postgres:16.1-alpine
# Deletes everything podman related (even unrelated to the project)
# Forcefully stops and removes the frostbyte container
[private]
prune-podman:
{{runtime}} stop -a
{{runtime}} rm -af
{{runtime}} image rm -af
{{runtime}} system prune -af
{{runtime}} system reset --force
clean-podman:
podman container rm -f frostbyte
podman container rm -f frostbyte-debug
# Forcefully removes the frostbyte images
[private]
clean-images:
podman image rm -f fb-server
podman image rm -f fb-server-debug
# Cleans up everything related to the project
clean:
{{runtime}} container rm -f frostbyte
{{runtime}} container rm -f frostbyte-debug
{{runtime}} image rm -f fb-server
{{runtime}} image rm -f fb-server-debug
clean: clean-podman clean-images
rm -rf client/dist
rm -rf client/node_modules
rm -rf client-solid/dist
rm -rf client-solid/node_modules
rm -rf server/public
rm -rf server/target
@echo "Cleaned up! Make sure to run 'just nuke' to nuke everything podman related."
# Nukes everything. No mercy. Leave no trace.
nuke: clean prune-podman
@echo "Nuked everything! You're starting from scratch now."
@echo "Cleaned up! Make sure to clean up podman volumes and networks."

View file

@ -1,20 +1,22 @@
{
"db_name": "SQLite",
"query": "SELECT username FROM users WHERE username = ?",
"db_name": "PostgreSQL",
"query": "SELECT username FROM users WHERE username = $1",
"describe": {
"columns": [
{
"name": "username",
"ordinal": 0,
"name": "username",
"type_info": "Text"
}
],
"parameters": {
"Right": 1
"Left": [
"Text"
]
},
"nullable": [
false
]
},
"hash": "fdbb1cd2873a5c866faaf3a60185d1f98e14198655aa536c2227ef8d2c6b88e1"
"hash": "16e84d577155f3c47fcb736bbad4dcaf05b21c79d47fe008e209191157f5697e"
}

View file

@ -1,46 +1,46 @@
{
"db_name": "SQLite",
"db_name": "PostgreSQL",
"query": "SELECT * FROM posts WHERE id = (SELECT MAX(id) FROM posts)",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Int64"
"name": "id",
"type_info": "Int8"
},
{
"name": "user_id",
"ordinal": 1,
"type_info": "Int64"
"name": "user_id",
"type_info": "Int8"
},
{
"name": "content",
"ordinal": 2,
"name": "content",
"type_info": "Text"
},
{
"name": "upvotes",
"ordinal": 3,
"type_info": "Int64"
"name": "upvotes",
"type_info": "Int4"
},
{
"name": "downvotes",
"ordinal": 4,
"type_info": "Int64"
"name": "downvotes",
"type_info": "Int4"
},
{
"name": "created_at",
"ordinal": 5,
"type_info": "Datetime"
"name": "created_at",
"type_info": "Timestamp"
},
{
"name": "updated_at",
"ordinal": 6,
"type_info": "Datetime"
"name": "updated_at",
"type_info": "Timestamp"
}
],
"parameters": {
"Right": 0
"Left": []
},
"nullable": [
false,

View file

@ -1,20 +0,0 @@
{
"db_name": "SQLite",
"query": "SELECT id FROM users WHERE username = ?",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Int64"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "2e61cd30a6cd3e0937dd096b4f94493e8bcb8c10687d0f8c0592fe38ed956fa6"
}

View file

@ -1,36 +1,38 @@
{
"db_name": "SQLite",
"query": "SELECT * FROM users WHERE username = ?",
"db_name": "PostgreSQL",
"query": "SELECT * FROM users WHERE username = $1",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Int64"
"name": "id",
"type_info": "Int8"
},
{
"name": "username",
"ordinal": 1,
"name": "username",
"type_info": "Text"
},
{
"name": "password",
"ordinal": 2,
"name": "password",
"type_info": "Text"
},
{
"name": "created_at",
"ordinal": 3,
"type_info": "Datetime"
"name": "created_at",
"type_info": "Timestamp"
},
{
"name": "updated_at",
"ordinal": 4,
"type_info": "Datetime"
"name": "updated_at",
"type_info": "Timestamp"
}
],
"parameters": {
"Right": 1
"Left": [
"Text"
]
},
"nullable": [
false,
@ -40,5 +42,5 @@
false
]
},
"hash": "98f4c0bfff04e07f5d0a46d48a31d24655826eebdf09c7f9f45d770df02035d3"
"hash": "606364c79e0990deb07dfbe6c32b3d302d083ec5333f3a5ce04113c38a041100"
}

View file

@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "INSERT INTO users (username, password) VALUES (?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "78ecd3bc5a999dc227a86556f0ce41065974104b735d96b3c5785480023bb60a"
}

View file

@ -1,10 +1,10 @@
{
"db_name": "SQLite",
"db_name": "PostgreSQL",
"query": "INSERT INTO posts (user_id, content) VALUES (1, 'Hello world! The demo username is user and the password is pass.')",
"describe": {
"columns": [],
"parameters": {
"Right": 0
"Left": []
},
"nullable": []
},

View file

@ -0,0 +1,15 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO posts (user_id, content) VALUES ($1, $2)",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Text"
]
},
"nullable": []
},
"hash": "a2835289cba16d38401e5324876508b8397ef7fbb9eb521ac3c5e57206eeecf7"
}

View file

@ -1,46 +1,46 @@
{
"db_name": "SQLite",
"db_name": "PostgreSQL",
"query": "SELECT * FROM posts WHERE id = 1",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Int64"
"name": "id",
"type_info": "Int8"
},
{
"name": "user_id",
"ordinal": 1,
"type_info": "Int64"
"name": "user_id",
"type_info": "Int8"
},
{
"name": "content",
"ordinal": 2,
"name": "content",
"type_info": "Text"
},
{
"name": "upvotes",
"ordinal": 3,
"type_info": "Int64"
"name": "upvotes",
"type_info": "Int4"
},
{
"name": "downvotes",
"ordinal": 4,
"type_info": "Int64"
"name": "downvotes",
"type_info": "Int4"
},
{
"name": "created_at",
"ordinal": 5,
"type_info": "Datetime"
"name": "created_at",
"type_info": "Timestamp"
},
{
"name": "updated_at",
"ordinal": 6,
"type_info": "Datetime"
"name": "updated_at",
"type_info": "Timestamp"
}
],
"parameters": {
"Right": 0
"Left": []
},
"nullable": [
false,

View file

@ -1,46 +1,48 @@
{
"db_name": "SQLite",
"query": "SELECT * FROM posts WHERE id = ?",
"db_name": "PostgreSQL",
"query": "SELECT * FROM posts WHERE id = $1",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Int64"
"name": "id",
"type_info": "Int8"
},
{
"name": "user_id",
"ordinal": 1,
"type_info": "Int64"
"name": "user_id",
"type_info": "Int8"
},
{
"name": "content",
"ordinal": 2,
"name": "content",
"type_info": "Text"
},
{
"name": "upvotes",
"ordinal": 3,
"type_info": "Int64"
"name": "upvotes",
"type_info": "Int4"
},
{
"name": "downvotes",
"ordinal": 4,
"type_info": "Int64"
"name": "downvotes",
"type_info": "Int4"
},
{
"name": "created_at",
"ordinal": 5,
"type_info": "Datetime"
"name": "created_at",
"type_info": "Timestamp"
},
{
"name": "updated_at",
"ordinal": 6,
"type_info": "Datetime"
"name": "updated_at",
"type_info": "Timestamp"
}
],
"parameters": {
"Right": 1
"Left": [
"Int8"
]
},
"nullable": [
false,
@ -52,5 +54,5 @@
false
]
},
"hash": "da280dfbdfe992918eb4f25ca61c08fc01474c3753a63e05b02051f5c066abc2"
"hash": "b6019471ff1989ef2f0658b0b34e683fdc706751e2bb69043544c9a4d08b5ba0"
}

View file

@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "INSERT INTO posts (user_id, content) VALUES (?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "c2804e9c7bbd92dcddc34dd6f9d95a2598bb69984d6023e38eeea54454887b90"
}

View file

@ -0,0 +1,15 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO users (username, password) VALUES ($1, $2)",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text",
"Text"
]
},
"nullable": []
},
"hash": "c936f44864dafe4660a736babd5f93050b7d35c66c0fe0c86f7b2dcdb7a1e3eb"
}

View file

@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT id FROM users WHERE username = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false
]
},
"hash": "dd99e48b1572e25db38f03da95984fda1072913b29bb6b3753a0d351583dfff6"
}

View file

@ -1,46 +1,49 @@
{
"db_name": "SQLite",
"query": "SELECT * FROM posts ORDER BY created_at DESC LIMIT ? OFFSET ?",
"db_name": "PostgreSQL",
"query": "SELECT * FROM posts ORDER BY created_at DESC LIMIT $1 OFFSET $2",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Int64"
"name": "id",
"type_info": "Int8"
},
{
"name": "user_id",
"ordinal": 1,
"type_info": "Int64"
"name": "user_id",
"type_info": "Int8"
},
{
"name": "content",
"ordinal": 2,
"name": "content",
"type_info": "Text"
},
{
"name": "upvotes",
"ordinal": 3,
"type_info": "Int64"
"name": "upvotes",
"type_info": "Int4"
},
{
"name": "downvotes",
"ordinal": 4,
"type_info": "Int64"
"name": "downvotes",
"type_info": "Int4"
},
{
"name": "created_at",
"ordinal": 5,
"type_info": "Datetime"
"name": "created_at",
"type_info": "Timestamp"
},
{
"name": "updated_at",
"ordinal": 6,
"type_info": "Datetime"
"name": "updated_at",
"type_info": "Timestamp"
}
],
"parameters": {
"Right": 2
"Left": [
"Int8",
"Int8"
]
},
"nullable": [
false,
@ -52,5 +55,5 @@
false
]
},
"hash": "8f5e9d8d0c7c33d31f02a9cbd9886ed945d788caeb704b2ee5f743f7bf5fac88"
"hash": "f68cd95363d7da716b14f430118176ed4da34e450fc07b812f6bf77073cc2128"
}

492
server/Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -19,7 +19,7 @@ log = "0.4.20"
serde = { version = "1.0.188", features = ["derive"] }
serde_json = "1.0.107"
sled = { version = "0.34.7" }
sqlx = { version = "0.7.2", features = ["sqlite", "runtime-tokio", "chrono", "uuid"] }
sqlx = { version = "0.7.2", features = ["runtime-tokio", "chrono", "uuid", "postgres", "tls-rustls"] }
uuid = { version = "1.4.1", features = ["serde", "v4"] }
[profile.dev.package.sqlx-macros]

View file

@ -0,0 +1,38 @@
CREATE TABLE IF NOT EXISTS users (
id BIGSERIAL PRIMARY KEY,
username TEXT NOT NULL UNIQUE,
password TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
-- Create a function to set created_at and updated_at on INSERT
CREATE OR REPLACE FUNCTION set_timestamps_on_insert() RETURNS TRIGGER AS $$
BEGIN
NEW.created_at = NOW();
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
-- Create a trigger to call the function after INSERT
CREATE TRIGGER set_timestamps_on_insert
BEFORE INSERT ON users
FOR EACH ROW
EXECUTE FUNCTION set_timestamps_on_insert();
-- Create a function to set updated_at on UPDATE
CREATE OR REPLACE FUNCTION set_updated_at() RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
-- Create a trigger to call the function after UPDATE
CREATE TRIGGER set_updated_at
BEFORE UPDATE ON users
FOR EACH ROW
EXECUTE FUNCTION set_updated_at();
CREATE INDEX users_username_index ON users (username);

View file

@ -0,0 +1,43 @@
CREATE TABLE IF NOT EXISTS posts (
id BIGSERIAL PRIMARY KEY,
user_id BIGINT NOT NULL,
content TEXT NOT NULL,
upvotes INTEGER NOT NULL DEFAULT 0,
downvotes INTEGER NOT NULL DEFAULT 0,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id)
);
-- Create a function to set created_at and updated_at on INSERT
CREATE OR REPLACE FUNCTION set_timestamps_on_insert() RETURNS TRIGGER AS $$
BEGIN
NEW.created_at = NOW();
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
-- Create a trigger to call the function after INSERT
CREATE TRIGGER set_timestamps_on_insert
BEFORE INSERT ON posts
FOR EACH ROW
EXECUTE FUNCTION set_timestamps_on_insert();
-- Create a function to set updated_at on UPDATE
CREATE OR REPLACE FUNCTION set_updated_at() RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
-- Create a trigger to call the function after UPDATE
CREATE TRIGGER set_updated_at
BEFORE UPDATE ON posts
FOR EACH ROW
EXECUTE FUNCTION set_updated_at();
CREATE INDEX posts_user_id_index ON posts (user_id);
CREATE INDEX posts_id_index ON posts (id);
CREATE INDEX idx_created_at_desc ON posts (created_at DESC);

View file

@ -4,13 +4,13 @@ use argon2::{
Argon2, PasswordHasher, PasswordVerifier,
};
use log::{info, warn};
use sqlx::SqlitePool;
use sqlx::PgPool;
// Gets the latest posts from the database, ordered by created_at
pub async fn db_get_latest_posts(pool: &SqlitePool, limit: i64, offset: i64) -> Vec<Post> {
pub async fn db_get_latest_posts(pool: &PgPool, limit: i64, offset: i64) -> Vec<Post> {
sqlx::query_as!(
Post,
"SELECT * FROM posts ORDER BY created_at DESC LIMIT ? OFFSET ?",
"SELECT * FROM posts ORDER BY created_at DESC LIMIT $1 OFFSET $2",
limit,
offset
)
@ -20,19 +20,19 @@ pub async fn db_get_latest_posts(pool: &SqlitePool, limit: i64, offset: i64) ->
}
// Gets the post with id from the database
pub async fn db_get_post(id: i64, pool: &SqlitePool) -> Option<Post> {
sqlx::query_as!(Post, "SELECT * FROM posts WHERE id = ?", id)
pub async fn db_get_post(id: i64, pool: &PgPool) -> Option<Post> {
sqlx::query_as!(Post, "SELECT * FROM posts WHERE id = $1", id)
.fetch_one(pool)
.await
.ok()
}
// Inserts a new post to the database
pub async fn db_new_post(userid: i64, content: &str, pool: &SqlitePool) -> Option<Post> {
pub async fn db_new_post(userid: i64, content: &str, pool: &PgPool) -> Option<Post> {
info!("User with id {} submitted a post", userid);
let insert_query = sqlx::query!(
"INSERT INTO posts (user_id, content) VALUES (?, ?)",
"INSERT INTO posts (user_id, content) VALUES ($1, $2)",
userid,
content
)
@ -57,8 +57,8 @@ pub async fn db_new_post(userid: i64, content: &str, pool: &SqlitePool) -> Optio
Some(post)
}
pub async fn db_user_exists(username: String, pool: &SqlitePool) -> bool {
let exists = sqlx::query!("SELECT username FROM users WHERE username = ?", username)
pub async fn db_user_exists(username: String, pool: &PgPool) -> bool {
let exists = sqlx::query!("SELECT username FROM users WHERE username = $1", username)
.fetch_one(pool)
.await
.ok()
@ -67,9 +67,9 @@ pub async fn db_user_exists(username: String, pool: &SqlitePool) -> bool {
exists.is_some()
}
pub async fn db_user_login(username: String, password: String, pool: &SqlitePool) -> Option<User> {
pub async fn db_user_login(username: String, password: String, pool: &PgPool) -> Option<User> {
let username = username.clone();
let user = sqlx::query_as!(User, "SELECT * FROM users WHERE username = ?", username)
let user = sqlx::query_as!(User, "SELECT * FROM users WHERE username = $1", username)
.fetch_one(pool)
.await
.ok()?;
@ -95,7 +95,7 @@ pub async fn db_user_login(username: String, password: String, pool: &SqlitePool
}
}
pub async fn db_new_user(username: String, password: String, pool: &SqlitePool) -> Option<User> {
pub async fn db_new_user(username: String, password: String, pool: &PgPool) -> Option<User> {
// First check if the user already exists
match db_user_exists(username.clone(), pool).await {
true => {
@ -113,7 +113,7 @@ pub async fn db_new_user(username: String, password: String, pool: &SqlitePool)
// Insert our new user into the database
let insert_query = sqlx::query!(
"INSERT INTO users (username, password) VALUES (?, ?)",
"INSERT INTO users (username, password) VALUES ($1, $2)",
username,
phc_hash
)
@ -123,7 +123,7 @@ pub async fn db_new_user(username: String, password: String, pool: &SqlitePool)
match insert_query {
Ok(_) => {
info!("User: {} registered", username);
let user = sqlx::query_as!(User, "SELECT * FROM users WHERE username = ?", username)
let user = sqlx::query_as!(User, "SELECT * FROM users WHERE username = $1", username)
.fetch_one(pool)
.await
.ok()?;

View file

@ -79,7 +79,7 @@ pub async fn new_post(new_post: Json<NewPost>, state: Data<ServerState>) -> Resu
let username = claims.sub.clone();
// This one is avoidable if we just store the user id in the token
let userid = sqlx::query!("SELECT id FROM users WHERE username = ?", username)
let userid = sqlx::query!("SELECT id FROM users WHERE username = $1", username)
.fetch_one(&state.pool)
.await
.unwrap()

View file

@ -4,10 +4,8 @@ use std::sync::Mutex;
use log::error;
use log::info;
use sqlx::migrate::MigrateDatabase;
use sqlx::Pool;
use sqlx::Sqlite;
use sqlx::{self, sqlite};
use sqlx::postgres::PgPoolOptions;
use sqlx::PgPool;
#[derive(Clone)]
pub struct CaptchaState {
@ -24,7 +22,7 @@ impl CaptchaState {
#[derive(Clone)]
pub struct ServerState {
pub pool: Pool<Sqlite>,
pub pool: PgPool,
}
impl ServerState {
@ -38,17 +36,13 @@ impl ServerState {
info!("Using db_url: {}", &db_url);
if !sqlx::Sqlite::database_exists(&db_url).await.unwrap() {
sqlx::Sqlite::create_database(&db_url).await.unwrap();
}
let pool = sqlite::SqlitePoolOptions::new()
let pool = PgPoolOptions::new()
.max_connections(5)
.connect(&db_url)
.await
.unwrap();
sqlx::migrate!("./migrations").run(&pool).await.unwrap();
sqlx::migrate!("./migrations_pg").run(&pool).await.unwrap();
match crate::db::db_new_user("imbus".to_string(), "kartellen1234".to_string(), &pool).await
{
@ -63,13 +57,10 @@ impl ServerState {
}
}
#[cfg(debug_assertions)]
use sqlx::SqlitePool;
// Inserts a bunch of dummy data into the database
// Mostly useful for debugging new posts, as we need to satisfy foreign key constraints.
#[cfg(debug_assertions)]
async fn debug_setup(pool: &SqlitePool) -> Result<(), sqlx::Error> {
async fn debug_setup(pool: &PgPool) -> Result<(), sqlx::Error> {
use sqlx::query;
use crate::db::db_new_user;