Compare commits

...

62 commits
base ... master

Author SHA1 Message Date
Imbus
d69a9dca2d Zip target and build instructions 2024-05-05 13:59:43 +02:00
Imbus
eb3b594aaa ERD target proper dependencies in make 2024-05-05 13:32:20 +02:00
Imbus
50f86a1682 Zip target final 2024-05-05 13:30:43 +02:00
Imbus
de0d667d79 ... 2024-05-05 13:29:06 +02:00
Imbus
625b5874b1 Zip target 2024-05-05 13:28:08 +02:00
Imbus
d0f2bd944d Final touches to sql 2024-05-05 13:25:49 +02:00
Imbus
b4b12b31a2 New table for deliveries 2024-05-05 12:51:01 +02:00
Imbus
1969d2f98f Some more cleaning in tables 2024-05-05 11:52:55 +02:00
Imbus
72d0380023 Cleaning 2024-05-05 11:47:26 +02:00
Imbus
09765fdb24 Some more tables and using views in query 2024-05-05 11:39:28 +02:00
Imbus
6554cb2b6f All tests passing 2024-05-05 10:52:13 +02:00
Imbus
4aeb738df3 Passing all but one test 2024-05-05 10:44:37 +02:00
Imbus
19b6b05b69 Dropping entire database every migration 2024-05-05 10:44:28 +02:00
Imbus
7d21a572af Helper tostring in recipe for easier debugging 2024-05-05 10:44:07 +02:00
Imbus
8c8584b6bf DB script fix. INT is not the same as INTEGER, apparently 2024-05-05 09:37:33 +02:00
Imbus
9843a078ea More tests passing 2024-05-05 07:25:19 +02:00
Imbus
71687c8bf7 Uncomment orders 2024-05-05 07:25:10 +02:00
Imbus
35607ce341 getPallets seemingly working 2024-05-05 06:47:56 +02:00
Imbus
0b8a771aa5 Migrate now deletes db... 2024-05-04 14:18:00 +02:00
Imbus
54939ca99b Redundant deliveries table removed 2024-05-04 14:17:48 +02:00
Imbus
fcde7e6f1e Proper clean target 2024-05-04 13:58:02 +02:00
Imbus
c8752c388c Typos 2024-05-04 13:57:55 +02:00
Imbus
c186c3b515 Demo steps 2024-05-04 13:52:41 +02:00
Imbus
884662e409 Renaming in database.java 2024-05-04 13:45:38 +02:00
Imbus
d52e1507d7 Table names in getCookies 2024-05-04 13:40:31 +02:00
Imbus
25f5dd151d Name changes 2024-05-04 13:40:12 +02:00
Imbus
926c8cb06f Typo 2024-05-04 13:38:10 +02:00
Imbus
2a834d8a8a Correct form of table name 2024-05-04 13:37:25 +02:00
Imbus
844ace05b0 Cookie table... 2024-05-04 13:36:27 +02:00
Imbus
cd11effb42 Typo 2024-05-04 13:34:22 +02:00
Imbus
dd1781fa71 make db consistent with readme 2024-05-04 13:33:24 +02:00
Imbus
bd4257e790 markdown syntax errors 2024-05-04 13:32:53 +02:00
Imbus
d9dc63ae25 Readme changes again 2024-05-04 13:29:12 +02:00
Imbus
5513fd445e Readme changes 2024-05-04 13:28:40 +02:00
Imbus
0445d0f795 Order table 2024-05-04 13:21:44 +02:00
Imbus
29b68d05e0 Readme, initial 2024-05-04 12:54:55 +02:00
Imbus
8dc532c6cc Makefile target for generating ERD diagram with eralchemy 2024-05-04 10:52:12 +02:00
Imbus
684ed3f130 Ifnore 2024-05-04 10:51:45 +02:00
Imbus
cbed41abb3 Makefile & ignore 2024-05-03 08:51:16 +02:00
Imbus
b9c8cf5b05 Release in makefile 2024-05-03 08:34:42 +02:00
Imbus
43668dc8d3 Passing more tests 2024-05-03 08:34:33 +02:00
Imbus
9425d585bf Updated sql 2024-05-03 08:34:18 +02:00
Imbus
e0310d0410 Ignore 2024-05-03 08:33:17 +02:00
Imbus
3530b4d140 Ignore 2024-05-03 08:18:53 +02:00
Imbus
43958cffd4 Some default recipes 2024-05-03 08:17:00 +02:00
Imbus
c27f07d06d Makefile target for manual migration to help with debugging 2024-05-03 08:16:54 +02:00
Imbus
0b9c183a2d Fixing migration method 2024-05-03 08:16:38 +02:00
Imbus
0416a1d3da Correcting sql, work on tables 2024-05-03 08:16:21 +02:00
Imbus
83e82c93aa Update script location in main 2024-05-03 05:36:07 +02:00
Imbus
87ad067bbf Rename migration scripts 2024-05-03 05:35:13 +02:00
Imbus
570366c8e7 Reworking tables 2024-05-03 05:30:52 +02:00
Imbus
5988b459b4 Recipe and ingredient helper classes 2024-05-03 05:30:29 +02:00
Imbus
a1ac8a366b Select helper, unsafe but functional 2024-05-03 05:29:54 +02:00
Imbus
87ba5bb679 Snake case table names 2024-05-03 01:59:07 +02:00
Imbus
e6560653d9 Testing dependencies 2024-05-03 01:18:43 +02:00
Imbus
c591c6431b Version bumps 2024-04-22 14:07:25 +02:00
Imbus
18b2e14e78 Sample data 2024-04-22 14:07:00 +02:00
Imbus
09e733add3 Cleaner migration 2024-04-22 13:59:38 +02:00
Imbus
ebbd68d6f2 Correcting path to migration in main 2024-04-22 13:30:39 +02:00
Imbus
7f67fe30c1 Removed submodule 2024-04-22 13:27:51 +02:00
Imbus
e79fab26ef Added table migrations to project instead of submodule 2024-04-22 13:27:42 +02:00
Imbus
75fd5259bf Shadow jar 2024-04-21 17:43:50 +02:00
13 changed files with 755 additions and 54 deletions

10
.gitignore vendored
View file

@ -23,4 +23,12 @@ gradle-app.setting
__MACOSX
.DS_Store
.vscode
krusty.sqlite3
krusty.sqlite3
*.sqlite3
*.db
*.tar.gz
*.zip
*.minisig
*.jpg
*.pdf

3
.gitmodules vendored
View file

@ -1,3 +0,0 @@
[submodule "app/CrustyCookies"]
path = app/CrustyCookies
url = https://git.silversoft.se/Imbus/CrustyCookies.git

146
README.md Normal file
View file

@ -0,0 +1,146 @@
# Krusty Cookies
> Krusty Kookies is a bakery which specializes in cookies, and they need a database to keep track of their production and deliveries.
## Building and testing
This project uses sqlite3 as a database. Migrations happens automatically on launch.
Migrations drop all tables and recreate them, so all data is lost on restart.
By default jdbc connects sqlite to an in-memory database, so all data is lost on restart anyway.
```bash
./gradlew build
./gradlew test
```
The gradle environment is bumped to a recent version, and it is configured with kotlin and junit5.
The syntax for junit4 was ported to junit5.
**Most** of the pre-configured deps in the handout contained CVEs of varying severity, so they were updated to newer versions.
No tests were changed, some helper classes were implemented.
## Base tables
**This description is no longer consistent with the current state of the project.**
Unsuprisingly, we will need a cookie table.
```sql
CREATE TABLE cookies (
cookie_id INT PRIMARY KEY,
cookie_name VARCHAR(50) NOT NULL UNIQUE,
);
```
Last i checked, a commercial bakery needs customers:
```sql
CREATE TABLE customers (
customer_id INT PRIMARY KEY,
customer_name VARCHAR(50) NOT NULL,
customer_address VARCHAR(50) NOT NULL,
);
```
We could also have a recipe table that relates ingredients to cookies. But instead, we just keep track of inventory (raw materials) and let the business logic handle orders/production by subtracting a certain set of ingredients from the inventory, and adding the corresponding pallets to the "freezer".
```sql
CREATE TABLE raw_materials (
ingredient_id INT PRIMARY KEY,
ingredient_name VARCHAR(50) NOT NULL UNIQUE,
ingredient_quantity INT NOT NULL,
unit VARCHAR(50) NOT NULL CHECK (unit IN ('g', 'ml'))
);
```
> When a pallet is produced, the raw materials storage must be updated, and the company must be able to check the amount in store of each ingredient, and to see when, and **how much of**, an ingredient was last delivered into storage.
Because of the 'how much of' part, we cannot simply record a last_delivery field in the raw_materials table.
We will use a separate table to keep track of increments in inventory (raw material deliveries).
```sql
CREATE TABLE raw_materials_deliveries (
delivery_id INT PRIMARY KEY,
ingredient_id INT NOT NULL,
delivery_date DATE NOT NULL,
delivery_quantity INT NOT NULL,
unit VARCHAR(50) NOT NULL CHECK (unit IN ('g', 'ml')),
FOREIGN KEY (ingredient_id) REFERENCES raw_materials(ingredient_id)
);
```
When recieving new inventory, we need to initiate a transaction that updates the inventory table, and adds a new row to the raw_mat_delivery table.
### Pallets and orders
> The cookies are baked in large quantities, and then quickly frozen and packaged in bags with 15 cookies in each bag. The bags are put into boxes, with 10 bags per box. Finally, the boxes are stacked on pallets, where each pallet contains 36 boxes.
15 x 10 x 36 = 5400 cookies per pallet. So, to produce a pallet: calculate the total material cost and subtract it from the inventory.
> The company only delivers to to wholesale Links to an external site. customers, and a typical order looks like “send 10 pallets of Tango cookies, and 6 pallets of Berliners to Kalaskakor AB” pallets are the unit of all orders (i.e., you cant break up a pallet in an order).
This is reiterating the fact that 'pallet' is the atomic unit of orders. Furthermore:
> A pallet is considered to be produced when the pallet label is read at the entrance to the deep-freeze storage. The pallet number, product name, and date and time of production are registered in the database. The pallet number is unique.
Conceptually, what happens before the cookies arrive in the freezer is not interesting to us. We only care about the final pallets. The number of cookies per pallet is also of intrest, to keep track of the inventory (recipes and subsequent raw_materials subtractions are handled by the business logic).
Either we have pallets in the freezer, or we make more pallets. If we do not have enough inventory to make a pallet, we can't make a pallet, and the order is rejected.
```sql
CREATE TABLE pallets (
pallet_id INT PRIMARY KEY,
cookie_id INT NOT NULL,
order_id INT NOT NULL,
status VARCHAR(50) NOT NULL CHECK (status IN ('freezer', 'delivered', 'blocked')),
production_date DATE NOT NULL,
FOREIGN KEY (cookie_id) REFERENCES cookie(cookie_id)
FOREIGN KEY (order_id) REFERENCES orders(order_id)
);
```
> When the truck is fully loaded, the driver receives a loading bill containing customer names, addresses, and the number of pallets of each product that is to be delivered to each customer. A transport may contain deliveries intended for different customers.
This suggests that we may need to relate pallets to customers via a truck entity, however, this truck entity is never referenced, so we can omit it entirely.
> On delivery, pallets are transported from the deep-freeze storeroom via a loading ramp to the freezer trucks each truck loads 60 pallets. The entry to the loading ramp contains a bar code reader which reads the pallet label. Pallets must be loaded in production date order.
Again, the truck as an entity is irrelevant and we only need a table to keep track of what pallet was delivered to what customer along with a date.
```sql
```
> Orders must be registered in the database, and, for production planning purposes, the company must be able to see all orders which are to be delivered during a specific time period.
Note that the individial pallets hold the delivery date.
```sql
CREATE TABLE orders (
order_id INT PRIMARY KEY,
customer_id INT NOT NULL,
cookie_id INT NOT NULL,
order_date DATE NOT NULL DEFAULT CURRENT_DATE CHECK (order_date >= CURRENT_DATE),
FOREIGN KEY (customer_id) REFERENCES customer(customer_id),
FOREIGN KEY (cookie_id) REFERENCES cookie(cookie_id)
);
```
> The company continuously take random samples among the products, and the samples are analyzed in their laboratory. If a sample doesnt meet their quality standards, all pallets containing that product which have been produced during a specific time interval are blocked. A blocked pallet may not be delivered to customers.
So, our pallet table needs a status column. This conveniently fits as an enum of 'freezer', 'delivered' and 'blocked'.
> All pallets must be traceable, for instance, the company needs to be able to see all information about a pallet with a given number (the contents of the pallet, the location of the pallet, if the pallet is delivered and in that case to whom, etc.). They must also be able to see which pallets contain a certain product and which pallets have been produced during a certain time interval.
This should all be possible with a simple join query.
> Blocked products are of special interest. The company needs to find out **which products are blocked**, and also which pallets contain a certain blocked product.
As well as:
> Finally, they must be able to check **which pallets have been delivered** to a given customer, and the date and time of delivery.
And:
> Orders must be registered in the database, and, for production planning purposes, the company must be able to see all orders which are to be **delivered during a specific time period**.
These are all trivial queries.

@ -1 +0,0 @@
Subproject commit be904a7ab3925f7bbf3b54126e25480476ae49e8

View file

@ -0,0 +1,124 @@
PRAGMA foreign_keys = OFF;
-- Drop everything...
DROP TABLE IF EXISTS pallets;
DROP TABLE IF EXISTS raw_materials_deliveries;
DROP TABLE IF EXISTS raw_materials;
DROP TABLE IF EXISTS orders;
DROP TABLE IF EXISTS customers;
DROP TABLE IF EXISTS cookies;
--------------------------------------------
-- Orders, deliveries and customers
--------------------------------------------
-- Our known customers, may need more fields
CREATE TABLE IF NOT EXISTS customers (
customer_id INTEGER PRIMARY KEY,
customer_name VARCHAR(50) NOT NULL,
customer_address VARCHAR(50) NOT NULL
);
-- Orders from customers.
CREATE TABLE IF NOT EXISTS orders (
order_id INTEGER PRIMARY KEY,
customer_id INT NOT NULL,
order_date DATE NOT NULL DEFAULT CURRENT_DATE CHECK (order_date >= CURRENT_DATE),
expected_delivery_date DATE NOT NULL,
FOREIGN KEY (customer_id) REFERENCES customers(customer_id)
);
CREATE TABLE IF NOT EXISTS order_spec (
nbr_pallets INTEGER NOT NULL,
order_id INTEGER NOT NULL,
cookie_id INTEGER NOT NULL,
FOREIGN KEY (order_id) REFERENCES orders(order_id),
FOREIGN KEY (cookie_id) REFERENCES cookies(cookie_id),
PRIMARY KEY (order_id, cookie_id)
);
--------------------------------------------
-- Cookies, raw_materials and recipes
--------------------------------------------
-- Notes: the unit type can be defined in terms
-- of volume or weight instead. Here we choose
-- to use static si-prefixes in relevant tables.
-- Holds the different types of cookies we can make.
CREATE TABLE IF NOT EXISTS cookies (
cookie_id INTEGER PRIMARY KEY,
cookie_name VARCHAR(50) NOT NULL UNIQUE
);
-- What types of raw_materials do we handle.
-- raw_materials quantity tells us amount in stock
CREATE TABLE IF NOT EXISTS raw_materials (
ingredient_id INTEGER PRIMARY KEY,
ingredient_name VARCHAR(50) NOT NULL UNIQUE,
ingredient_quantity INT NOT NULL,
unit VARCHAR(50) NOT NULL CHECK (unit IN ('g', 'ml'))
);
-- What raw_materials are in what cookies?
-- Glues together the cookies and raw_materials, a 'recipe'.
CREATE TABLE IF NOT EXISTS recipe_contents (
cookie_id INT NOT NULL,
ingredient_id INT NOT NULL,
quantity INT NOT NULL,
unit VARCHAR(50) NOT NULL CHECK (unit IN ('g', 'ml')),
PRIMARY KEY (cookie_id, ingredient_id),
FOREIGN KEY (cookie_id) REFERENCES cookies(cookie_id),
FOREIGN KEY (ingredient_id) REFERENCES raw_materials(ingredient_id)
);
-- When did we get the raw_materials?
CREATE TABLE IF NOT EXISTS raw_materials_deliveries (
delivery_id INTEGER PRIMARY KEY,
ingredient_id INT NOT NULL,
delivery_date DATE NOT NULL,
delivery_quantity INT NOT NULL,
unit VARCHAR(50) NOT NULL CHECK (unit IN ('g', 'ml')),
FOREIGN KEY (ingredient_id) REFERENCES raw_materials(ingredient_id)
);
--------------------------------------------
-- Pallet related tables
--------------------------------------------
-- Pallets are used to store cookies for delivery
-- Order related columns are unused for now.
CREATE TABLE IF NOT EXISTS pallets (
pallet_id INTEGER PRIMARY KEY,
cookie_id INT NOT NULL,
status VARCHAR(50) NOT NULL CHECK (status IN ('freezer', 'delivered', 'blocked')),
production_date DATE NOT NULL DEFAULT NOW,
FOREIGN KEY (cookie_id) REFERENCES cookies(cookie_id)
);
-- Connects pallets to orders
CREATE TABLE IF NOT EXISTS deliveries (
delivery_date DATE DEFAULT NOW,
order_id INT NOT NULL,
pallet_id INT NOT NULL,
FOREIGN KEY (order_id) REFERENCES orders(order_id),
FOREIGN KEY (pallet_id) REFERENCES pallets(pallet_id),
PRIMARY KEY (order_id, pallet_id)
);
--------------------------------------------
-- Views
--------------------------------------------
-- Pallet
CREATE VIEW IF NOT EXISTS pallets_view AS
SELECT
pallets.pallet_id,
cookie_name,
status,
production_date,
delivery_date
FROM pallets
LEFT JOIN cookies ON pallets.cookie_id = cookies.cookie_id
LEFT JOIN deliveries ON pallets.pallet_id = deliveries.pallet_id;
PRAGMA foreign_keys = ON;

View file

@ -0,0 +1,45 @@
-- Inserts here
INSERT
OR IGNORE INTO customers (customer_id, customer_name, customer_address)
VALUES
(1, 'Bjudkakor AB', 'Ystad'),
(2, 'Finkakor AB', 'Helsingborg'),
(3, 'Gästkakor AB', 'Hässleholm'),
(4, 'Kaffebröd AB', 'Landskrona'),
(5, 'Kalaskakor AB', 'Trelleborg'),
(6, 'Partykakor AB', 'Kristianstad'),
(7, 'Skånekakor AB', 'Perstorp'),
(8, 'Småbröd AB', 'Malmö');
INSERT
OR IGNORE INTO cookies (cookie_name)
VALUES
('Nut ring'),
('Nut cookie'),
('Amneris'),
('Tango'),
('Almond delight'),
('Berliner');
INSERT
OR IGNORE INTO raw_materials(ingredient_name, ingredient_quantity, unit)
VALUES
('Bread crumbs', 500000, 'g'),
('Butter', 500000, 'g'),
('Chocolate', 500000, 'g'),
('Chopped almonds', 500000, 'g'),
('Cinnamon', 500000, 'g'),
('Egg whites', 500000, 'ml'),
('Eggs', 500000, 'g'),
('Fine-ground nuts', 500000, 'g'),
('Flour', 500000, 'g'),
('Ground, roasted nuts', 500000, 'g'),
('Icing sugar', 500000, 'g'),
('Marzipan', 500000, 'g'),
('Potato starch', 500000, 'g'),
('Roasted, chopped nuts', 500000, 'g'),
('Sodium bicarbonate', 500000, 'g'),
('Sugar', 500000, 'g'),
('Vanilla', 500000, 'g'),
('Vanilla sugar', 500000, 'g'),
('Wheat flour', 500000, 'g');

View file

@ -8,6 +8,7 @@
plugins {
// Apply the application plugin to add support for building a CLI application in Java.
application
id("com.github.johnrengelman.shadow") version "8.1.1"
}
repositories {
@ -17,18 +18,17 @@ repositories {
dependencies {
testImplementation("org.junit.jupiter:junit-jupiter:5.10.2")
testImplementation("org.skyscreamer:jsonassert:1.5.0")
testImplementation("com.mashape.unirest:unirest-java:1.4.9")
testImplementation("org.skyscreamer:jsonassert:1.5.0") // For JSON assertions in tests.
testImplementation("com.mashape.unirest:unirest-java:1.4.9") // For HTTP requests in tests.
testRuntimeOnly("org.junit.platform:junit-platform-launcher:1.10.2")
implementation("com.google.guava:guava:32.1.1-jre")
// implementation("com.google.guava:guava:33.1.0-jre") // Currently not used.
implementation("com.sparkjava:spark-core:2.9.4")
implementation("com.fasterxml.jackson.core:jackson-core:2.17.0")
implementation("com.fasterxml.jackson.core:jackson-databind:2.17.0")
implementation("org.slf4j:slf4j-simple:1.7.30")
implementation("mysql:mysql-connector-java:8.0.19")
implementation("org.xerial:sqlite-jdbc:3.30.1")
implementation("org.slf4j:slf4j-simple:2.0.13")
implementation("org.xerial:sqlite-jdbc:3.45.3.0")
}
// Apply a specific Java toolchain to ease working on different environments.
@ -61,4 +61,4 @@ tasks.named<Jar>("jar") {
"Implementation-Version" to version
)
}
}
}

View file

@ -6,50 +6,278 @@ import spark.Response;
// Likely dependencies for db operations
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.SimpleDateFormat;
// Likely dependencies for general operations
import java.io.File;
import java.util.StringJoiner;
import java.io.IOException;
import java.io.FileNotFoundException;
import java.util.Scanner;
import java.sql.ResultSet;
import java.util.Date;
import java.util.Optional;
import java.util.StringJoiner;
import java.util.stream.Collectors;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.stream.Stream;
public class Database {
// Here, we use an in-memory database. This string could be changed to
// "jdbc:sqlite:<filename>.sqlite3" to use a file-based database instead.
// Nore that ":memory:" is an **SQLite specific** magic string that tells the
// underlying SQLite engine to store the database in memory.
private static final String jdbcString = "jdbc:sqlite::memory:";
private static final String jdbcString = "jdbc:sqlite::memory:";
// private static final String jdbcString = "jdbc:sqlite:krusty.db";
// Hold a single connection to the database. Note that this is
// not a pool, so this is not thread-safe nor efficient.
private Connection conn = null;
public String getCustomers(Request req, Response res) {
return "{}";
String result = selectQuery("customers", "customers", "customer_name", "customer_address");
result = result.replaceAll("customer_name", "name");
result = result.replaceAll("customer_address", "address");
return result;
}
public String getRawMaterials(Request req, Response res) {
return "{}";
String result = selectQuery("raw_materials", "raw-materials", "ingredient_name", "ingredient_quantity", "unit");
result = result.replaceAll("ingredient_name", "name");
result = result.replaceAll("ingredient_quantity", "amount");
return result;
}
public String getCookies(Request req, Response res) {
return "{\"cookies\":[]}";
String result = selectQuery("cookies", "cookies", "cookie_name");
result = result.replaceAll("cookie_name", "name");
return result;
}
public String getRecipes(Request req, Response res) {
// Essentially serialize DefaultRecipes to json
return "{}";
}
public String getPallets(Request req, Response res) {
// These queries look like:
// http://localhost:8888/api/v1/pallets?cookie=Nut+cookie&from=2024-05-23&to=2024-05-30&blocked=yes
// They may contain any combination of the parameters, or none at all.
Optional<Recipe> r = Optional.empty(); // Holds the ecipe if we have a cookie
Optional<Boolean> blocked = Optional.empty(); // Holds the blocked status
Optional<Date> from = Optional.empty(); // Holds the from date
Optional<Date> to = Optional.empty(); // Holds the to date
// Parameter validation block
{
// First we need the cookie parameter
String cookie = req.queryParams("cookie");
// And the blocked parameter
String blocked_str = req.queryParams("blocked");
// Then we need the date parameters
String from_str = req.queryParams("from");
String to_str = req.queryParams("to");
// Fancy functional one-liner to get the recipe if the cookie is present
if (cookie != null) {
r = Optional.ofNullable(DefaultRecipes.recipes.stream()
.filter(recipe -> recipe.name.equals(cookie))
.findFirst().orElse(null));
}
if (blocked_str != null) {
blocked = switch (blocked_str) {
case "yes" -> Optional.of(true);
case "no" -> Optional.of(false);
default -> Optional.empty();
};
}
if (from_str != null) {
try {
from = Optional.of(new SimpleDateFormat("yyyy-MM-dd").parse(from_str));
} catch (Exception e) {
from = Optional.empty();
}
}
if (to_str != null) {
try {
to = Optional.of(new SimpleDateFormat("yyyy-MM-dd").parse(to_str));
} catch (Exception e) {
to = Optional.empty();
}
}
// If the interval is negative, reset the dates
if (from.isPresent() && to.isPresent() && from.get().after(to.get())) {
from = Optional.empty();
to = Optional.empty();
}
}
// This type of code is unreadable, error prone and hard to maintain.
// The fact that im responsible for this code makes my soul hurt.
// This part almost made me write a simple query factory to handle this.
//
// SqlBuilder exists to 'take the pain out of generating SQL queries',
// but it's not in the standard library.
//
// Helmets, seatbelts and safety goggles on; we need to execute a query.
try {
Statement stmt = conn.createStatement();
StringBuilder query = new StringBuilder(
"SELECT cookie_name, status FROM pallets_view");
// r is validated here
if (r.isPresent()) {
query.append(" WHERE cookie_name = '" + r.get().name + "'");
}
if (from.isPresent()) {
String query_from = new SimpleDateFormat("yyyy-MM-dd").format(from.get());
// Super hacky, low quality code
String clause = query.toString().contains("WHERE") ? " AND " : " WHERE ";
query.append(clause + "production_date >= '" + query_from + "'");
}
if (to.isPresent()) {
String query_to = new SimpleDateFormat("yyyy-MM-dd").format(to.get());
// Super hacky, low quality code
String clause = query.toString().contains("WHERE") ? " AND " : " WHERE ";
query.append(clause + "production_date <= '" + query_to + "'");
}
if (blocked.isPresent()) {
// This again
String clause = query.toString().contains("WHERE") ? " AND " : " WHERE ";
query.append(clause);
// TODO: WARNING This logic is flawed. WARNING
// Remember, status can be 'freezer', 'delivered' or 'blocked'
query.append("status = " + (blocked.get() ? "'blocked'" : "'freezer'"));
}
System.out.println(query.toString());
ResultSet result = stmt.executeQuery(query.toString());
// Rename the columns
String jsonResult = Jsonizer.toJson(result, "pallets");
// Some carmack level code, as usual
jsonResult = jsonResult.replaceAll("cookie_name", "cookie");
jsonResult = jsonResult.replaceAll("freezer", "no");
jsonResult = jsonResult.replaceAll("delivered", "no");
jsonResult = jsonResult.replaceAll("blocked", "yes");
jsonResult = jsonResult.replaceAll("status", "blocked");
return jsonResult;
} catch (SQLException e) {
System.out.printf("Error executing query: \n%s", e);
}
// Statue 500, to give the client a
// chance to figure out that something went wrong
res.status(500);
return "{\"pallets\":[]}";
}
public String reset(Request req, Response res) {
try {
this.migrateScript("Migrations/create-schema.sql");
this.migrateScript("Migrations/initial-data.sql");
} catch (Exception e) {
System.out.printf("Error resetting database: \n%s", e);
res.status(500);
return "{}";
}
return "{}";
}
public String createPallet(Request req, Response res) {
// This on only has one query param and looks like:
// http://localhost:8888/api/v1/pallets?cookie=Amneris
Optional<Recipe> r = Optional.empty();
String cookie = req.queryParams("cookie");
if (cookie != null) {
r = Optional.ofNullable(DefaultRecipes.recipes.stream()
.filter(recipe -> recipe.name.equals(cookie))
.findFirst().orElse(null));
}
if (r.isEmpty()) {
res.status(404);
return "{}";
}
try (PreparedStatement getRawMaterials = conn
.prepareStatement("SELECT * FROM raw_materials WHERE ingredient_name = ?");
PreparedStatement decrementRawMaterials = conn.prepareStatement(
"UPDATE raw_materials SET ingredient_quantity = ingredient_quantity - ? WHERE ingredient_name = ?");
PreparedStatement insertPallet = conn.prepareStatement(
"INSERT INTO pallets (cookie_id, production_date, status) VALUES (?, ?, ?)");
PreparedStatement getCookieId = conn
.prepareStatement("SELECT cookie_id FROM cookies WHERE cookie_name = ?")) {
// Start transaction
conn.setAutoCommit(false);
for (Ingredient i : r.get().ingredients) {
getRawMaterials.setString(1, i.name);
ResultSet result = getRawMaterials.executeQuery();
if (!result.next()) {
conn.rollback();
res.status(500);
return "{}";
}
int amount_per_pallet = i.amount * 54; // 54 * 100
// Check if we have enough raw materials
if (result.getInt("ingredient_quantity") < amount_per_pallet) {
conn.rollback();
res.status(500);
return "{}";
}
decrementRawMaterials.setInt(1, amount_per_pallet);
decrementRawMaterials.setString(2, i.name);
decrementRawMaterials.executeUpdate();
}
// Fish out the cookie id
getCookieId.setString(1, cookie);
ResultSet cookie_rs = getCookieId.executeQuery();
if (!cookie_rs.next()) {
conn.rollback();
res.status(500);
return "{}";
}
int cookie_id = cookie_rs.getInt("cookie_id");
insertPallet.setInt(1, cookie_id);
insertPallet.setString(2, new SimpleDateFormat("yyyy-MM-dd").format(new Date()));
insertPallet.setString(3, "freezer");
System.out.println(insertPallet.toString());
insertPallet.executeUpdate();
conn.commit();
} catch (SQLException e) {
System.out.printf("Error starting transaction: \n%s", e);
}
res.status(201);
return "{}";
}
@ -64,40 +292,66 @@ public class Database {
}
}
// The script location is relative to the gradle
// build script ("build.gradle.kts", in this case).
/** Reads an sql script into the database */
public void migrateScript(String filename) {
// Open the file
StringBuilder script = new StringBuilder();
try {
File myObj = new File(filename);
Scanner myReader = new Scanner(myObj);
/**
* Selects columns from a table and returns the result as a JSON string.
* Does _absolutely no_ query sanitization, so be careful with user input.
*/
private String selectQuery(String table, String jsonName, String... columns) {
String jsonResult = "{}"; // Valid json to return if fail
while (myReader.hasNextLine()) {
String data = myReader.nextLine();
script.append(data);
try {
Statement stmt = this.conn.createStatement();
StringBuilder query = new StringBuilder("SELECT ");
StringJoiner args = new StringJoiner(", ");
for (String column : columns) {
args.add(column);
}
myReader.close();
} catch (FileNotFoundException e) {
System.out.println("Migration script not found...");
e.printStackTrace();
} finally {
System.out.println("Migration script read successfully...");
query.append(args.toString());
query.append(" FROM " + table + ";");
/* Sanitization is for cowards */
ResultSet result = stmt.executeQuery(query.toString());
jsonResult = Jsonizer.toJson(result, jsonName);
} catch (SQLException e) {
System.out.printf("Error executing query: \n%s", e);
}
// Execute the script
try {
conn.setAutoCommit(false);
Statement stmt = conn.createStatement();
stmt.execute(script.toString());
conn.commit();
conn.setAutoCommit(true);
return jsonResult;
}
// The script location is relative to the gradle
// build script ("build.gradle.kts", in this case).
// Assumes every statement ends with a semicolon. (notably broken for triggers)
/** Reads an sql script into the database */
public void migrateScript(String filename) throws IOException, SQLException {
try (Stream<String> lines = Files.lines(Paths.get(filename))) {
// Combine into one big string, with all comments and empty lines removed.
String[] statements = lines.filter(line -> !line.startsWith("--") && !line.isBlank())
.map(line -> line.replaceAll("--.*", "").replaceAll("\\s+", " ").trim())
.collect(Collectors.joining("\n")).split(";");
for (String query : statements) {
try (Statement statement = conn.createStatement()) {
statement.execute(query);
statement.close();
} catch (SQLException e) {
System.err.println("Error executing script: " + e.getMessage());
throw e;
}
}
System.out.println(String.format("Executed script %s", filename));
} catch (IOException e) {
System.err.println("Error reading script file: " + e.getMessage());
throw e;
} catch (SQLException e) {
System.out.println("Error executing migration script...");
e.printStackTrace();
} finally {
System.out.println("Migration script executed successfully...");
String prepend = String.format("Error executing script: %s", filename);
System.err.println(prepend + e.getMessage());
throw e;
}
}
}
}

View file

@ -0,0 +1,57 @@
package krusty;
import java.util.Arrays;
import java.util.List;
public class DefaultRecipes {
public static List<Recipe> recipes = Arrays.asList(
new Recipe("Nut ring",
new Ingredient[] {
new Ingredient("Flour", 450, "g"),
new Ingredient("Butter", 450, "g"),
new Ingredient("Icing sugar", 190, "g"),
new Ingredient("Roasted, chopped nuts", 225, "g")
}),
new Recipe("Nut cookie",
new Ingredient[] {
new Ingredient("Fine-ground nuts", 750, "g"),
new Ingredient("Ground, roasted nuts", 625, "g"),
new Ingredient("Bread crumbs", 125, "g"),
new Ingredient("Sugar", 375, "g"),
new Ingredient("Egg Whites", 350, "ml"),
new Ingredient("Chocolate", 50, "g")
}),
new Recipe("Amneris",
new Ingredient[] {
new Ingredient("Marzipan", 750, "g"),
new Ingredient("Butter", 250, "g"),
new Ingredient("Eggs", 250, "g"),
new Ingredient("Potato starch", 25, "g"),
new Ingredient("Wheat flour", 25, "g")
}),
new Recipe("Tango",
new Ingredient[] {
new Ingredient("Butter", 200, "g"),
new Ingredient("Sugar", 250, "g"),
new Ingredient("Flour", 300, "g"),
new Ingredient("Sodium bicarbonate", 4, "g"),
new Ingredient("Vanilla", 2, "g")
}),
new Recipe("Almond delight",
new Ingredient[] {
new Ingredient("Butter", 400, "g"),
new Ingredient("Sugar", 270, "g"),
new Ingredient("Chopped almonds", 279, "g"),
new Ingredient("Flour", 400, "g"),
new Ingredient("Cinnamon", 10, "g")
}),
new Recipe("Berliner",
new Ingredient[] {
new Ingredient("Flour", 350, "g"),
new Ingredient("Butter", 250, "g"),
new Ingredient("Icing sugar", 100, "g"),
new Ingredient("Eggs", 50, "g"),
new Ingredient("Vanilla sugar", 5, "g"),
new Ingredient("Chocolate", 50, "g")
}));
}

View file

@ -0,0 +1,16 @@
package krusty;
public class Ingredient {
public String name, unit;
public int amount;
public Ingredient(String name, int amount, String unit) {
this.name = name;
this.amount = amount;
this.unit = unit;
}
public String toString() {
return String.format("%s: %d %s", name, amount, unit);
}
}

View file

@ -0,0 +1,21 @@
package krusty;
public class Recipe {
public String name;
public Ingredient ingredients[];
public Recipe(String name, Ingredient[] ingredients) {
this.name = name;
this.ingredients = ingredients;
}
public String toString() {
StringBuilder sb = new StringBuilder(name + ": ");
for (Ingredient i : ingredients) {
sb.append(i.toString());
sb.append(" ");
}
return sb.toString();
}
}

View file

@ -19,7 +19,12 @@ public class ServerMain {
db.connect();
// Here, we can migrate an arbitrary number of SQL scripts.
db.migrateScript("CrustyCookies/Migrations/0010_users.sql");
try {
db.migrateScript("Migrations/create-schema.sql");
db.migrateScript("Migrations/initial-data.sql");
} catch (Exception e) {
throw new IOError(e);
}
port(PORT);

View file

@ -1,10 +1,39 @@
GITHASH := $(shell git rev-parse --short HEAD)
build:
./gradlew shadowJar
run:
./gradlew run
clean:
./gradlew clean
rm -f *.tar.gz *.tar.gz.minisig *.zip *.jpg
rm -f app/krusty.db
test:
./gradlew test
.PHONY: run clean test
dbdump:
sqlite3 app/krusty.db .dump
migrate:
rm -f app/krusty.db
sqlite3 app/krusty.db < app/Migrations/create-schema.sql
sqlite3 app/krusty.db < app/Migrations/initial-data.sql
release:
git ls-files -z | xargs -0 tar -czf krusty-imbus_$(GITHASH).tar.gz
minisign -Sm krusty-imbus_$(GITHASH).tar.gz
scp krusty-imbus_$(GITHASH).tar.gz server:/public/krusty/krusty-imbus_$(GITHASH).tar.gz
scp krusty-imbus_$(GITHASH).tar.gz.minisig server:/public/krusty/krusty-imbus_$(GITHASH).tar.gz.minisig
zip:
git archive --format=zip --prefix Rest11/ --output=Rest11.zip HEAD
7za a -tzip CourseProject11.zip ./app/Migrations/*.sql
# Generate ERD. Requires eralchemy2 (pip install eralchemy2)
erd: migrate
eralchemy2 -i sqlite:///app/krusty.db -o erd.jpg
.PHONY: run clean test build dbdump migrate release erd