Merge branch 'master' of git.silversoft.se:Imbus/FrostByte

This commit is contained in:
Imbus 2023-12-14 22:50:42 +01:00
commit 9bfe92f0d6
24 changed files with 498 additions and 219 deletions

View file

@ -3,10 +3,11 @@
<head> <head>
<meta charset="UTF-8" /> <meta charset="UTF-8" />
<meta name="darkreader-lock">
<link rel="shortcut icon" <link rel="shortcut icon"
href="data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' class='MuiSvgIcon-root MuiSvgIcon-fontSizeLarge css-1756clo' focusable='false' aria-hidden='true' viewBox='0 0 24 24' data-testid='AcUnitIcon'%3E%3Cpath d='M22 11h-4.17l3.24-3.24-1.41-1.42L15 11h-2V9l4.66-4.66-1.42-1.41L13 6.17V2h-2v4.17L7.76 2.93 6.34 4.34 11 9v2H9L4.34 6.34 2.93 7.76 6.17 11H2v2h4.17l-3.24 3.24 1.41 1.42L9 13h2v2l-4.66 4.66 1.42 1.41L11 17.83V22h2v-4.17l3.24 3.24 1.42-1.41L13 15v-2h2l4.66 4.66 1.41-1.42L17.83 13H22z'%3E%3C/path%3E%3C/svg%3E" /> href="data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' class='MuiSvgIcon-root MuiSvgIcon-fontSizeLarge css-1756clo' focusable='false' aria-hidden='true' viewBox='0 0 24 24' data-testid='AcUnitIcon'%3E%3Cpath d='M22 11h-4.17l3.24-3.24-1.41-1.42L15 11h-2V9l4.66-4.66-1.42-1.41L13 6.17V2h-2v4.17L7.76 2.93 6.34 4.34 11 9v2H9L4.34 6.34 2.93 7.76 6.17 11H2v2h4.17l-3.24 3.24 1.41 1.42L9 13h2v2l-4.66 4.66 1.42 1.41L11 17.83V22h2v-4.17l3.24 3.24 1.42-1.41L13 15v-2h2l4.66 4.66 1.41-1.42L17.83 13H22z'%3E%3C/path%3E%3C/svg%3E" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>FrostByteSolid</title> <title>FrostByte</title>
</head> </head>
<body> <body>

View file

@ -28,7 +28,7 @@ export function PostSegment(props: { post: Post }): JSXElement {
return ( return (
<div class="card compact w-full flex-grow border-b-2 border-b-base-300 bg-base-200 text-base-content transition-all hover:bg-base-300"> <div class="card compact w-full flex-grow border-b-2 border-b-base-300 bg-base-200 text-base-content transition-all hover:bg-base-300">
<div class="card-body"> <div class="card-body">
<p class="break-words text-base-content">{props.post?.content}</p> <p class="break-words text-base-content md:px-6 md:pt-2">{props.post?.content}</p>
<div class="card-actions justify-end"> <div class="card-actions justify-end">
<button <button
onClick={(): void => nav("/post/" + props.post?.id)} onClick={(): void => nav("/post/" + props.post?.id)}

View file

@ -46,8 +46,7 @@ export function Footer(): JSXElement {
</nav> </nav>
<aside> <aside>
<p> <p>
Copyright © {new Date().getFullYear()} - All right reserved by Swarm <b>{new Date().getFullYear()} FrostByte</b>
Industries Ltd
</p> </p>
</aside> </aside>
</footer> </footer>

View file

@ -1,9 +1,9 @@
pg_pass := "password" pg_pass := "password"
pg_user := "postgres" pg_user := "postgres"
pg_container := "postgres" pg_container := "postgres-frostbyte" # This is the name of the postgres container
pg_port := "5432" pg_port := "5432"
network := "fb_network" network := "fb_network"
db_name := "frostbyte" db_name := "frostbyte" # This is the name of the database
conn_string := "postgres://" + pg_user + ":" + pg_pass + "@" + pg_container + ":" + pg_port / db_name conn_string := "postgres://" + pg_user + ":" + pg_pass + "@" + pg_container + ":" + pg_port / db_name
conn_local := "postgres://" + pg_user + ":" + pg_pass + "@" + "localhost" + ":" + pg_port / db_name conn_local := "postgres://" + pg_user + ":" + pg_pass + "@" + "localhost" + ":" + pg_port / db_name
@ -24,6 +24,7 @@ build-container-server-debug:
start-debug: start-postgres-dev clean-podman init-sqlx build-container-server-debug start-debug: start-postgres-dev clean-podman init-sqlx build-container-server-debug
podman network create {{network}} --ignore podman network create {{network}} --ignore
podman run -d --network {{network}} -e {{env_string}} -p 8080:8080 --name frostbyte-debug fb-server-debug podman run -d --network {{network}} -e {{env_string}} -p 8080:8080 --name frostbyte-debug fb-server-debug
podman ps | grep frostbyte-debug
@echo "Debug server started." @echo "Debug server started."
# Builds a release container # Builds a release container
@ -36,11 +37,17 @@ start-release: start-postgres-dev clean-podman init-sqlx build-container-release
podman run -d --network {{network}} -e {{env_string}} -p 8080:8080 --name frostbyte fb-server podman run -d --network {{network}} -e {{env_string}} -p 8080:8080 --name frostbyte fb-server
# Initializes the database, runs migrations and then prepares sqlx # Initializes the database, runs migrations and then prepares sqlx
init-sqlx: [private]
init-sqlx: install-sqlx
echo {{env_local}} > server/.env echo {{env_local}} > server/.env
cd server && sqlx database create --connect-timeout 40 # Postgres takes a while to start up cd server && sqlx database create --connect-timeout 40 # Postgres takes a while to start up
cd server && sqlx migrate run --source migrations_pg cd server && sqlx migrate run
cd server && cargo sqlx prepare cd server && cargo sqlx prepare # If this fails, try running just clean
# Shorthand for installing sqlx
[private]
install-sqlx:
cargo install sqlx-cli
# Starts a postgres container for development # Starts a postgres container for development
[private] [private]
@ -58,18 +65,33 @@ clean-podman:
podman container rm -f frostbyte podman container rm -f frostbyte
podman container rm -f frostbyte-debug podman container rm -f frostbyte-debug
# Removes the database container
[private]
clean-db:
podman container rm -f {{pg_container}}
# Removes the network
[private]
clean-network:
podman network rm -f {{network}}
# Forcefully removes the frostbyte images # Forcefully removes the frostbyte images
[private] [private]
clean-images: clean-images:
podman image rm -f fb-server podman image rm -f fb-server
podman image rm -f fb-server-debug podman image rm -f fb-server-debug
podman image rm -f postgres
# Cleans up everything related to the project # Cleans up everything related to the project
clean: clean-podman clean-images clean: clean-podman clean-db clean-images clean-network && state
rm -rf client/dist
rm -rf client/node_modules
rm -rf client-solid/dist rm -rf client-solid/dist
rm -rf client-solid/node_modules rm -rf client-solid/node_modules
rm -rf server/public rm -rf server/public
rm -rf server/target rm -rf server/target
@echo "Cleaned up! Make sure to clean up podman volumes and networks." @echo "Cleaned up! Make sure to clean up podman volumes and networks."
state:
podman ps -a
podman images ls -a
podman network ls
du -sch client* server

View file

@ -0,0 +1,72 @@
{
"db_name": "PostgreSQL",
"query": "SELECT * FROM comments WHERE parent_post_id = $1 ORDER BY created_at DESC LIMIT $2 OFFSET $3",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "parent_post_id",
"type_info": "Int8"
},
{
"ordinal": 2,
"name": "parent_comment_id",
"type_info": "Int8"
},
{
"ordinal": 3,
"name": "author_user_id",
"type_info": "Int8"
},
{
"ordinal": 4,
"name": "content",
"type_info": "Text"
},
{
"ordinal": 5,
"name": "upvotes",
"type_info": "Int4"
},
{
"ordinal": 6,
"name": "downvotes",
"type_info": "Int4"
},
{
"ordinal": 7,
"name": "created_at",
"type_info": "Timestamp"
},
{
"ordinal": 8,
"name": "updated_at",
"type_info": "Timestamp"
}
],
"parameters": {
"Left": [
"Int8",
"Int8",
"Int8"
]
},
"nullable": [
false,
false,
true,
false,
false,
false,
false,
false,
false
]
},
"hash": "345472dbe81319923bf40fc39a1f8609a54f8ba99bc55f208fb01cda5dd219f7"
}

View file

@ -0,0 +1,17 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO comments (parent_post_id, parent_comment_id, author_user_id, content) VALUES ($1, $2, $3, $4)",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Int8",
"Int8",
"Text"
]
},
"nullable": []
},
"hash": "fe72509852c87463cea9775d9606e89a9851b372b39d68a10c16961acd968eef"
}

16
server/Cargo.lock generated
View file

@ -19,6 +19,21 @@ dependencies = [
"tracing", "tracing",
] ]
[[package]]
name = "actix-cors"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b340e9cfa5b08690aae90fb61beb44e9b06f44fe3d0f93781aaa58cfba86245e"
dependencies = [
"actix-utils",
"actix-web",
"derive_more",
"futures-util",
"log",
"once_cell",
"smallvec",
]
[[package]] [[package]]
name = "actix-files" name = "actix-files"
version = "0.6.2" version = "0.6.2"
@ -1840,6 +1855,7 @@ dependencies = [
name = "server" name = "server"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"actix-cors",
"actix-files", "actix-files",
"actix-web", "actix-web",
"argon2", "argon2",

View file

@ -6,6 +6,7 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
actix-cors = "0.6.4"
actix-files = "0.6.2" actix-files = "0.6.2"
actix-web = "4.4.0" actix-web = "4.4.0"
argon2 = { version = "0.5.2", features = ["zeroize"] } argon2 = { version = "0.5.2", features = ["zeroize"] }

View file

@ -1,31 +1,38 @@
CREATE TABLE CREATE TABLE IF NOT EXISTS users (
IF NOT EXISTS users ( id BIGSERIAL PRIMARY KEY,
id INTEGER PRIMARY KEY NOT NULL,
username TEXT NOT NULL UNIQUE, username TEXT NOT NULL UNIQUE,
password TEXT NOT NULL, password TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
); );
-- Create a trigger to set created_at and updated_at on INSERT -- Create a function to set created_at and updated_at on INSERT
CREATE TRIGGER IF NOT EXISTS set_created_at AFTER INSERT ON users BEGIN CREATE OR REPLACE FUNCTION set_timestamps_on_insert() RETURNS TRIGGER AS $$
UPDATE users BEGIN
SET NEW.created_at = NOW();
created_at = CURRENT_TIMESTAMP NEW.updated_at = NOW();
WHERE RETURN NEW;
id = NEW.id;
END; END;
$$ LANGUAGE plpgsql;
-- Create a trigger to set updated_at on UPDATE -- Create a trigger to call the function after INSERT
CREATE TRIGGER IF NOT EXISTS set_updated_at AFTER CREATE TRIGGER set_timestamps_on_insert
UPDATE ON users BEGIN BEFORE INSERT ON users
UPDATE users FOR EACH ROW
SET EXECUTE FUNCTION set_timestamps_on_insert();
updated_at = CURRENT_TIMESTAMP
WHERE
id = NEW.id;
-- Create a function to set updated_at on UPDATE
CREATE OR REPLACE FUNCTION set_updated_at() RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END; END;
$$ LANGUAGE plpgsql;
-- Create a trigger to call the function after UPDATE
CREATE TRIGGER set_updated_at
BEFORE UPDATE ON users
FOR EACH ROW
EXECUTE FUNCTION set_updated_at();
CREATE INDEX users_username_index ON users (username); CREATE INDEX users_username_index ON users (username);

View file

@ -1,7 +1,6 @@
CREATE TABLE CREATE TABLE IF NOT EXISTS posts (
IF NOT EXISTS posts ( id BIGSERIAL PRIMARY KEY,
id INTEGER PRIMARY KEY NOT NULL, user_id BIGINT NOT NULL,
user_id INTEGER NOT NULL,
content TEXT NOT NULL, content TEXT NOT NULL,
upvotes INTEGER NOT NULL DEFAULT 0, upvotes INTEGER NOT NULL DEFAULT 0,
downvotes INTEGER NOT NULL DEFAULT 0, downvotes INTEGER NOT NULL DEFAULT 0,
@ -10,29 +9,35 @@ CREATE TABLE
FOREIGN KEY (user_id) REFERENCES users (id) FOREIGN KEY (user_id) REFERENCES users (id)
); );
-- Create a trigger to set created_at and updated_at on INSERT -- Create a function to set created_at and updated_at on INSERT
CREATE TRIGGER IF NOT EXISTS set_created_at AFTER INSERT ON posts BEGIN CREATE OR REPLACE FUNCTION set_timestamps_on_insert() RETURNS TRIGGER AS $$
UPDATE posts BEGIN
SET NEW.created_at = NOW();
created_at = CURRENT_TIMESTAMP NEW.updated_at = NOW();
WHERE RETURN NEW;
id = NEW.id;
END; END;
$$ LANGUAGE plpgsql;
-- Create a trigger to set updated_at on UPDATE -- Create a trigger to call the function after INSERT
CREATE TRIGGER IF NOT EXISTS set_updated_at AFTER CREATE TRIGGER set_timestamps_on_insert
UPDATE ON posts BEGIN BEFORE INSERT ON posts
UPDATE posts FOR EACH ROW
SET EXECUTE FUNCTION set_timestamps_on_insert();
updated_at = CURRENT_TIMESTAMP
WHERE
id = NEW.id;
-- Create a function to set updated_at on UPDATE
CREATE OR REPLACE FUNCTION set_updated_at() RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END; END;
$$ LANGUAGE plpgsql;
create INDEX IF NOT EXISTS posts_user_id_index ON posts (user_id); -- Create a trigger to call the function after UPDATE
CREATE TRIGGER set_updated_at
create INDEX IF NOT EXISTS posts_id_index ON posts (id); BEFORE UPDATE ON posts
FOR EACH ROW
EXECUTE FUNCTION set_updated_at();
CREATE INDEX posts_user_id_index ON posts (user_id);
CREATE INDEX posts_id_index ON posts (id);
CREATE INDEX idx_created_at_desc ON posts (created_at DESC); CREATE INDEX idx_created_at_desc ON posts (created_at DESC);

View file

@ -0,0 +1,47 @@
CREATE TABLE IF NOT EXISTS comments (
id BIGSERIAL PRIMARY KEY,
parent_post_id BIGINT NOT NULL,
parent_comment_id BIGINT,
author_user_id BIGINT NOT NULL,
content TEXT NOT NULL,
upvotes INTEGER NOT NULL DEFAULT 0,
downvotes INTEGER NOT NULL DEFAULT 0,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (parent_post_id) REFERENCES posts (id),
FOREIGN KEY (parent_comment_id) REFERENCES comments (id),
FOREIGN KEY (author_user_id) REFERENCES users (id)
);
-- Create a function to set created_at and updated_at on INSERT
CREATE OR REPLACE FUNCTION comments_set_timestamps_on_insert() RETURNS TRIGGER AS $$
BEGIN
NEW.created_at = NOW();
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
-- Create a trigger to call the function after INSERT
CREATE TRIGGER comments_set_timestamps_on_insert
BEFORE INSERT ON posts
FOR EACH ROW
EXECUTE FUNCTION set_timestamps_on_insert();
-- Create a function to set updated_at on UPDATE
CREATE OR REPLACE FUNCTION comments_set_updated_at() RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
-- Create a trigger to call the function after UPDATE
CREATE TRIGGER comments_set_updated_at
BEFORE UPDATE ON posts
FOR EACH ROW
EXECUTE FUNCTION comments_set_updated_at();
CREATE INDEX comments_parent_post_id_index ON comments (parent_post_id);
CREATE INDEX comments_parent_comment_id_index ON comments (parent_comment_id);
CREATE INDEX comments_user_id_index ON comments (author_user_id);

View file

@ -1,38 +0,0 @@
CREATE TABLE IF NOT EXISTS users (
id BIGSERIAL PRIMARY KEY,
username TEXT NOT NULL UNIQUE,
password TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
-- Create a function to set created_at and updated_at on INSERT
CREATE OR REPLACE FUNCTION set_timestamps_on_insert() RETURNS TRIGGER AS $$
BEGIN
NEW.created_at = NOW();
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
-- Create a trigger to call the function after INSERT
CREATE TRIGGER set_timestamps_on_insert
BEFORE INSERT ON users
FOR EACH ROW
EXECUTE FUNCTION set_timestamps_on_insert();
-- Create a function to set updated_at on UPDATE
CREATE OR REPLACE FUNCTION set_updated_at() RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
-- Create a trigger to call the function after UPDATE
CREATE TRIGGER set_updated_at
BEFORE UPDATE ON users
FOR EACH ROW
EXECUTE FUNCTION set_updated_at();
CREATE INDEX users_username_index ON users (username);

View file

@ -1,43 +0,0 @@
CREATE TABLE IF NOT EXISTS posts (
id BIGSERIAL PRIMARY KEY,
user_id BIGINT NOT NULL,
content TEXT NOT NULL,
upvotes INTEGER NOT NULL DEFAULT 0,
downvotes INTEGER NOT NULL DEFAULT 0,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id)
);
-- Create a function to set created_at and updated_at on INSERT
CREATE OR REPLACE FUNCTION set_timestamps_on_insert() RETURNS TRIGGER AS $$
BEGIN
NEW.created_at = NOW();
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
-- Create a trigger to call the function after INSERT
CREATE TRIGGER set_timestamps_on_insert
BEFORE INSERT ON posts
FOR EACH ROW
EXECUTE FUNCTION set_timestamps_on_insert();
-- Create a function to set updated_at on UPDATE
CREATE OR REPLACE FUNCTION set_updated_at() RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
-- Create a trigger to call the function after UPDATE
CREATE TRIGGER set_updated_at
BEFORE UPDATE ON posts
FOR EACH ROW
EXECUTE FUNCTION set_updated_at();
CREATE INDEX posts_user_id_index ON posts (user_id);
CREATE INDEX posts_id_index ON posts (id);
CREATE INDEX idx_created_at_desc ON posts (created_at DESC);

View file

@ -1,4 +1,4 @@
use crate::routes::{Post, User}; use crate::types::{Comment, Post, User};
use argon2::{ use argon2::{
password_hash::{rand_core::OsRng, SaltString}, password_hash::{rand_core::OsRng, SaltString},
Argon2, PasswordHasher, PasswordVerifier, Argon2, PasswordHasher, PasswordVerifier,
@ -6,6 +6,50 @@ use argon2::{
use log::{info, warn}; use log::{info, warn};
use sqlx::PgPool; use sqlx::PgPool;
pub async fn db_new_comment(
pool: &PgPool,
parent_post_id: i64,
parent_comment_id: Option<i64>,
user_id: i64,
content: &str,
) -> bool {
let insert_query = sqlx::query!(
"INSERT INTO comments (parent_post_id, parent_comment_id, author_user_id, content) VALUES ($1, $2, $3, $4)",
parent_post_id,
parent_comment_id.unwrap_or(-1), // This is a bit of a hack
user_id,
content
)
.execute(pool)
.await;
if insert_query.is_err() {
let s = insert_query.err().unwrap();
warn!("Error inserting comment into database: {}", s);
return false;
}
true
}
pub async fn db_get_comments(
pool: &PgPool,
parent_post_id: i64,
limit: i64,
offset: i64,
) -> Vec<Comment> {
sqlx::query_as!(
Comment,
"SELECT * FROM comments WHERE parent_post_id = $1 ORDER BY created_at DESC LIMIT $2 OFFSET $3",
parent_post_id,
limit,
offset
)
.fetch_all(pool)
.await
.unwrap()
}
/// Gets the latest posts from the database, ordered by created_at /// Gets the latest posts from the database, ordered by created_at
pub async fn db_get_latest_posts(pool: &PgPool, limit: i64, offset: i64) -> Vec<Post> { pub async fn db_get_latest_posts(pool: &PgPool, limit: i64, offset: i64) -> Vec<Post> {
sqlx::query_as!( sqlx::query_as!(

View file

@ -1,3 +1,4 @@
use actix_cors::Cors;
use actix_files::Files; use actix_files::Files;
use actix_web::middleware; use actix_web::middleware;
use actix_web::web::Data; use actix_web::web::Data;
@ -8,9 +9,10 @@ mod db;
mod jwt; mod jwt;
mod routes; mod routes;
mod state; mod state;
mod types;
mod util; mod util;
use routes::{get_posts, login, new_post, post_by_id, register}; use routes::{get_comments, get_posts, login, new_comment, new_post, post_by_id, register};
use state::CaptchaState; use state::CaptchaState;
use state::ServerState; use state::ServerState;
use util::hex_string; use util::hex_string;
@ -33,7 +35,13 @@ async fn main() -> std::io::Result<()> {
info!("Spinning up server on http://localhost:8080"); info!("Spinning up server on http://localhost:8080");
HttpServer::new(move || { HttpServer::new(move || {
let cors = Cors::default()
.allowed_origin("https://shitpost.se")
.allowed_methods(vec!["GET", "POST"])
.max_age(3600);
App::new() App::new()
.wrap(cors)
.wrap(middleware::Compress::default()) .wrap(middleware::Compress::default())
.wrap(middleware::Logger::default()) .wrap(middleware::Logger::default())
.wrap(middleware::NormalizePath::trim()) .wrap(middleware::NormalizePath::trim())
@ -41,6 +49,8 @@ async fn main() -> std::io::Result<()> {
scope("/api") scope("/api")
.service(get_posts) .service(get_posts)
.service(new_post) .service(new_post)
.service(new_comment)
.service(get_comments)
.service(post_by_id) .service(post_by_id)
.service(login) .service(login)
.service(register) .service(register)

View file

@ -0,0 +1,69 @@
use crate::db::{db_get_comments, db_new_comment};
use crate::jwt::validate_token;
use crate::types::{CommentQueryParams, NewComment};
use crate::ServerState;
use actix_web::get;
use actix_web::web::{Data, Query};
use actix_web::{post, web::Json, HttpResponse, Responder, Result};
use log::info;
#[get("/comments")]
pub async fn get_comments(
comment_filter: Query<CommentQueryParams>,
state: Data<ServerState>,
) -> Result<impl Responder> {
let post_id = comment_filter.post_id;
let limit = comment_filter.limit.unwrap_or(10);
let offset = comment_filter.offset.unwrap_or(0);
info!(
"Getting comments for post {} with limit {} and offset {}",
post_id, limit, offset
);
let comments = db_get_comments(&state.pool, post_id, limit, offset).await;
Ok(HttpResponse::Ok().json(comments))
}
#[post("/comments")]
pub async fn new_comment(
data: Json<NewComment>,
state: Data<ServerState>,
) -> Result<impl Responder> {
let user_claims = validate_token(&data.user_token);
// Bail if the token is invalid
if let Err(e) = user_claims {
info!("Error validating token: {}", e);
return Ok(HttpResponse::BadRequest().json("Error"));
}
let claims = user_claims.unwrap();
info!("User {:?} created a new comment", &claims.sub);
let content = data.content.clone();
let username = claims.sub.clone();
// This one is avoidable if we just store the user id in the token
let userid = sqlx::query!("SELECT id FROM users WHERE username = $1", username)
.fetch_one(&state.pool)
.await
.unwrap()
.id;
let success = db_new_comment(
&state.pool,
data.parent_post_id,
data.parent_comment_id,
userid,
&content,
)
.await;
match success {
true => Ok(HttpResponse::Ok().json("Success")),
false => Ok(HttpResponse::BadRequest().json("Error")),
}
}

View file

@ -1,5 +1,7 @@
mod comment;
mod post; mod post;
mod users; mod users;
pub use comment::*;
pub use post::*; pub use post::*;
pub use users::*; pub use users::*;

View file

@ -1,57 +1,17 @@
use crate::db::{db_get_latest_posts, db_get_post, db_new_post}; use crate::db::{db_get_latest_posts, db_get_post, db_new_post};
use crate::jwt::validate_token; use crate::jwt::validate_token;
use crate::types::{NewPost, PostQueryParams};
use crate::ServerState; use crate::ServerState;
use actix_web::web::{Data, Path, Query}; use actix_web::web::{Data, Path, Query};
use actix_web::{get, post, web::Json, HttpResponse, Responder, Result}; use actix_web::{get, post, web::Json, HttpResponse, Responder, Result};
use log::info; use log::info;
use serde::{Deserialize, Serialize};
use sqlx::FromRow;
// The post as it is received from the client
// The token is used to identify the user
#[derive(Debug, Serialize, Deserialize)]
pub struct NewPost {
pub content: String,
pub token: String,
}
// The post as it is stored in the database, with all the related metadata
#[derive(Debug, Serialize, Deserialize, Clone, FromRow)]
pub struct Post {
pub id: i64,
pub user_id: i64,
pub content: String,
pub upvotes: i64,
pub downvotes: i64,
pub created_at: chrono::NaiveDateTime,
pub updated_at: chrono::NaiveDateTime,
}
/// The user as it is stored in the database, with all the related metadata
#[derive(Debug, Serialize, Deserialize, Clone, FromRow)]
pub struct User {
pub id: i64,
pub username: String,
pub password: String,
pub created_at: chrono::NaiveDateTime,
pub updated_at: chrono::NaiveDateTime,
}
// These look like /posts?limit=10&offset=20 in the URL
// Note that these are optional
/// Query parameters for the /posts endpoint
#[derive(Debug, Serialize, Deserialize)]
pub struct QueryParams {
limit: Option<i64>,
offset: Option<i64>,
}
/// Gets all posts from the database, query parameters are optional /// Gets all posts from the database, query parameters are optional
/// If limit is not specified, it defaults to a sane value /// If limit is not specified, it defaults to a sane value
#[get("/posts")] #[get("/posts")]
pub async fn get_posts( pub async fn get_posts(
query: Query<QueryParams>, query: Query<PostQueryParams>,
state: Data<ServerState>, state: Data<ServerState>,
) -> Result<impl Responder> { ) -> Result<impl Responder> {
if let (Some(lim), Some(ofs)) = (query.limit, query.offset) { if let (Some(lim), Some(ofs)) = (query.limit, query.offset) {

View file

@ -1,32 +1,13 @@
use crate::db::{db_new_user, db_user_login}; use crate::db::{db_new_user, db_user_login};
use crate::jwt::token_factory; use crate::jwt::token_factory;
use crate::state::CaptchaState; use crate::state::CaptchaState;
use crate::types::{AuthResponse, LoginData, RegisterData};
use crate::ServerState; use crate::ServerState;
use actix_web::web::Data; use actix_web::web::Data;
use actix_web::{post, web::Json, HttpResponse, Responder, Result}; use actix_web::{post, web::Json, HttpResponse, Responder, Result};
use biosvg::BiosvgBuilder; use biosvg::BiosvgBuilder;
use log::*; use log::*;
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
pub struct LoginData {
username: String,
password: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct AuthResponse {
username: String,
token: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct RegisterData {
username: String,
password: String,
captcha: String,
}
#[post("/register")] #[post("/register")]
pub async fn register( pub async fn register(
@ -77,12 +58,6 @@ pub async fn login(data: Json<LoginData>, state: Data<ServerState>) -> Result<im
} }
} }
#[derive(Debug, Serialize, Deserialize)]
pub struct CaptchaResponse {
captcha_svg: String,
captcha_id: i32,
}
/// Request a captcha from the captcha service /// Request a captcha from the captcha service
#[allow(unreachable_code, unused_variables)] #[allow(unreachable_code, unused_variables)]
#[post("/captcha")] #[post("/captcha")]

View file

@ -43,7 +43,7 @@ impl ServerState {
.await .await
.unwrap(); .unwrap();
sqlx::migrate!("./migrations_pg").run(&pool).await.unwrap(); sqlx::migrate!("./migrations").run(&pool).await.unwrap();
match crate::db::db_new_user("imbus".to_string(), "kartellen1234".to_string(), &pool).await match crate::db::db_new_user("imbus".to_string(), "kartellen1234".to_string(), &pool).await
{ {

View file

@ -0,0 +1,33 @@
use serde::{Deserialize, Serialize};
/// The comment as it is received from the client
#[derive(Debug, Serialize, Deserialize)]
pub struct NewComment {
pub parent_post_id: i64,
pub parent_comment_id: Option<i64>,
pub user_token: String,
pub content: String,
}
/// The comment as it is stored in the database, with all the related metadata
/// This is also the comment as it is sent to the client
#[derive(Debug, Serialize, Deserialize, Clone, sqlx::FromRow)]
pub struct Comment {
pub id: i64,
pub parent_post_id: i64,
pub parent_comment_id: Option<i64>,
pub author_user_id: i64,
pub upvotes: i64,
pub downvotes: i64,
pub content: String,
pub created_at: chrono::NaiveDateTime,
pub updated_at: chrono::NaiveDateTime,
}
/// Query parameters for the /comments endpoint
#[derive(Debug, Serialize, Deserialize)]
pub struct CommentQueryParams {
pub post_id: i64,
pub limit: Option<i64>,
pub offset: Option<i64>,
}

7
server/src/types/mod.rs Normal file
View file

@ -0,0 +1,7 @@
mod comment;
mod post;
mod user;
pub use comment::*;
pub use post::*;
pub use user::*;

31
server/src/types/post.rs Normal file
View file

@ -0,0 +1,31 @@
use serde::{Deserialize, Serialize};
use sqlx::FromRow;
// The post as it is received from the client
// The token is used to identify the user
#[derive(Debug, Serialize, Deserialize)]
pub struct NewPost {
pub content: String,
pub token: String,
}
// The post as it is stored in the database, with all the related metadata
#[derive(Debug, Serialize, Deserialize, Clone, FromRow)]
pub struct Post {
pub id: i64,
pub user_id: i64,
pub content: String,
pub upvotes: i64,
pub downvotes: i64,
pub created_at: chrono::NaiveDateTime,
pub updated_at: chrono::NaiveDateTime,
}
// These look like /posts?limit=10&offset=20 in the URL
// Note that these are optional
/// Query parameters for the /posts endpoint
#[derive(Debug, Serialize, Deserialize)]
pub struct PostQueryParams {
pub limit: Option<i64>,
pub offset: Option<i64>,
}

42
server/src/types/user.rs Normal file
View file

@ -0,0 +1,42 @@
use serde::{Deserialize, Serialize};
use sqlx::FromRow;
/// The user as it is stored in the database, with all the related metadata
#[derive(Debug, Serialize, Deserialize, Clone, FromRow)]
pub struct User {
pub id: i64,
pub username: String,
pub password: String,
pub created_at: chrono::NaiveDateTime,
pub updated_at: chrono::NaiveDateTime,
}
/// The data recieved from the login form
#[derive(Debug, Serialize, Deserialize)]
pub struct LoginData {
pub username: String,
pub password: String,
}
/// The data recieved from the registration form
#[derive(Debug, Serialize, Deserialize)]
pub struct RegisterData {
pub username: String,
pub password: String,
pub captcha: String,
}
/// The response sent to the client after a successful login or registration
#[derive(Debug, Serialize, Deserialize)]
pub struct AuthResponse {
pub username: String,
pub token: String,
}
/// Data sent to the client to render the captcha
/// The captcha_id is used to identify the captcha in the database
#[derive(Debug, Serialize, Deserialize)]
pub struct CaptchaResponse {
pub captcha_svg: String,
pub captcha_id: i32,
}