Merge pull request 'backend rewritten in rust' (#11) from new-backend into main
Reviewed-on: #11
This commit is contained in:
commit
edcc2e7304
2
.cargo/config.toml
Normal file
2
.cargo/config.toml
Normal file
|
@ -0,0 +1,2 @@
|
|||
[alias]
|
||||
xtask = "run --package xtask --"
|
|
@ -3,6 +3,10 @@ DB_URL= # JDBC URL
|
|||
DB_USER=
|
||||
DB_PASSWORD=
|
||||
|
||||
# JWT secret key
|
||||
JWT_SECRET_KEY=
|
||||
JWT_EXPIRATION_TIME=
|
||||
|
||||
# Minio S3 section
|
||||
MINIO_ACCESS_KEY=
|
||||
MINIO_SECRET_KEY=
|
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -38,3 +38,6 @@ out/
|
|||
|
||||
### node-modules ###
|
||||
**/node_modules/
|
||||
|
||||
### target ###
|
||||
/target
|
|
@ -1,13 +0,0 @@
|
|||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="start frontend (dev)" type="js.build_tools.npm">
|
||||
<package-json value="$PROJECT_DIR$/web/package.json" />
|
||||
<command value="run" />
|
||||
<scripts>
|
||||
<script value="dev" />
|
||||
</scripts>
|
||||
<arguments value="--host" />
|
||||
<node-interpreter value="project" />
|
||||
<envs />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
|
@ -1,11 +0,0 @@
|
|||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="start server" type="Application" factoryName="Application">
|
||||
<option name="ALTERNATIVE_JRE_PATH" value="17" />
|
||||
<option name="ALTERNATIVE_JRE_PATH_ENABLED" value="true" />
|
||||
<option name="MAIN_CLASS_NAME" value="com.mixel.docusphere.DocuSphereApplication" />
|
||||
<module name="DocuSphere.server.main" />
|
||||
<method v="2">
|
||||
<option name="Make" enabled="true" />
|
||||
</method>
|
||||
</configuration>
|
||||
</component>
|
3580
Cargo.lock
generated
Normal file
3580
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
8
Cargo.toml
Normal file
8
Cargo.toml
Normal file
|
@ -0,0 +1,8 @@
|
|||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
"crates/backend",
|
||||
"crates/migration",
|
||||
"crates/entity",
|
||||
"crates/xtask",
|
||||
]
|
18
crates/backend/Cargo.toml
Normal file
18
crates/backend/Cargo.toml
Normal file
|
@ -0,0 +1,18 @@
|
|||
[package]
|
||||
name = "backend"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
sea-orm = { version = "1.0.1", features = [
|
||||
"sqlx-postgres",
|
||||
"runtime-tokio-rustls",
|
||||
"macros",
|
||||
] }
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
dotenvy = "*"
|
||||
entity = { path = "../entity" }
|
||||
actix-web = "4"
|
||||
actix-cors = "0.7"
|
||||
serde = { version = "*", features = ["derive"] }
|
||||
argon2 = { version = "*" }
|
1
crates/backend/src/controller.rs
Normal file
1
crates/backend/src/controller.rs
Normal file
|
@ -0,0 +1 @@
|
|||
pub mod user;
|
132
crates/backend/src/controller/user.rs
Normal file
132
crates/backend/src/controller/user.rs
Normal file
|
@ -0,0 +1,132 @@
|
|||
use actix_web::{error::ErrorInternalServerError, web, HttpResponse, Responder};
|
||||
use argon2::{
|
||||
password_hash::{rand_core::OsRng, PasswordHasher, SaltString},
|
||||
Argon2,
|
||||
};
|
||||
use entity::users;
|
||||
use sea_orm::{
|
||||
entity::prelude::DateTime, prelude::Uuid, ActiveModelTrait, ActiveValue, EntityTrait,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::AppState;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct UserWithoutPassword {
|
||||
id: Uuid,
|
||||
username: String,
|
||||
name: String,
|
||||
email: String,
|
||||
created_at: DateTime,
|
||||
updated_at: DateTime,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct CreateUserDto {
|
||||
username: String,
|
||||
name: String,
|
||||
email: String,
|
||||
password: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct UpdateUserDto {
|
||||
username: String,
|
||||
name: String,
|
||||
email: String,
|
||||
}
|
||||
|
||||
impl From<users::Model> for UserWithoutPassword {
|
||||
fn from(value: users::Model) -> Self {
|
||||
Self {
|
||||
id: value.user_id,
|
||||
username: value.username,
|
||||
name: value.name,
|
||||
email: value.email,
|
||||
created_at: value.created_at,
|
||||
updated_at: value.updated_at,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_users(state: web::Data<AppState>) -> actix_web::Result<impl Responder> {
|
||||
let db = &state.db;
|
||||
|
||||
let result = users::Entity::find()
|
||||
.all(db)
|
||||
.await
|
||||
.map_err(ErrorInternalServerError)?;
|
||||
|
||||
Ok(web::Json(result))
|
||||
}
|
||||
|
||||
pub async fn create_user(
|
||||
state: web::Data<AppState>,
|
||||
user: web::Json<CreateUserDto>,
|
||||
) -> actix_web::Result<impl Responder> {
|
||||
let db = &state.db;
|
||||
let user = user.into_inner();
|
||||
|
||||
let salt = SaltString::generate(&mut OsRng);
|
||||
let argon2 = Argon2::default();
|
||||
|
||||
let password_hash = argon2
|
||||
.hash_password(user.password.as_bytes(), &salt)
|
||||
.map_err(ErrorInternalServerError)?;
|
||||
|
||||
let user = users::ActiveModel {
|
||||
user_id: ActiveValue::NotSet,
|
||||
username: ActiveValue::Set(user.username),
|
||||
name: ActiveValue::Set(user.name),
|
||||
email: ActiveValue::Set(user.email),
|
||||
password_hash: ActiveValue::Set(password_hash.to_string()),
|
||||
created_at: ActiveValue::NotSet,
|
||||
updated_at: ActiveValue::NotSet,
|
||||
};
|
||||
|
||||
let user = user.insert(db).await.map_err(ErrorInternalServerError)?;
|
||||
|
||||
Ok(web::Json(UserWithoutPassword::from(user)))
|
||||
}
|
||||
|
||||
pub async fn delete_user(
|
||||
state: web::Data<AppState>,
|
||||
path: web::Path<Uuid>,
|
||||
) -> actix_web::Result<impl Responder> {
|
||||
let id = path.into_inner();
|
||||
|
||||
let db = &state.db;
|
||||
|
||||
entity::users::Entity::delete_by_id(id)
|
||||
.exec(db)
|
||||
.await
|
||||
.map_err(ErrorInternalServerError)?;
|
||||
|
||||
Ok(HttpResponse::Ok().finish())
|
||||
}
|
||||
|
||||
pub async fn update_user(
|
||||
state: web::Data<AppState>,
|
||||
path: web::Path<Uuid>,
|
||||
user: web::Json<UpdateUserDto>,
|
||||
) -> actix_web::Result<impl Responder> {
|
||||
let id = path.into_inner();
|
||||
|
||||
let db = &state.db;
|
||||
|
||||
let user = user.into_inner();
|
||||
|
||||
let user = users::ActiveModel {
|
||||
user_id: ActiveValue::Unchanged(id),
|
||||
username: ActiveValue::Set(user.username),
|
||||
name: ActiveValue::Set(user.name),
|
||||
email: ActiveValue::Set(user.email),
|
||||
password_hash: ActiveValue::NotSet,
|
||||
created_at: ActiveValue::NotSet,
|
||||
updated_at: ActiveValue::NotSet,
|
||||
};
|
||||
|
||||
let res = user.update(db).await.map_err(ErrorInternalServerError)?;
|
||||
|
||||
Ok(web::Json(res))
|
||||
}
|
44
crates/backend/src/main.rs
Normal file
44
crates/backend/src/main.rs
Normal file
|
@ -0,0 +1,44 @@
|
|||
use std::env;
|
||||
|
||||
use actix_web::{web, App, HttpServer};
|
||||
use sea_orm::{Database, DatabaseConnection};
|
||||
|
||||
mod controller;
|
||||
mod routes;
|
||||
|
||||
#[derive(Clone)]
|
||||
struct AppState {
|
||||
db: DatabaseConnection,
|
||||
}
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
#[cfg(debug_assertions)]
|
||||
println!("Running debug build -> enabling permissive CORS");
|
||||
|
||||
dotenvy::dotenv().ok();
|
||||
|
||||
let db_url = env::var("DATABASE_URL").expect("Env DATABASE_URL must be set");
|
||||
|
||||
let conn = Database::connect(&db_url)
|
||||
.await
|
||||
.expect("Connecting to Database failed");
|
||||
|
||||
let state = AppState { db: conn };
|
||||
|
||||
println!("Listening for connections...");
|
||||
HttpServer::new(move || {
|
||||
let cors = if cfg!(debug_assertions) {
|
||||
actix_cors::Cors::permissive()
|
||||
} else {
|
||||
actix_cors::Cors::default()
|
||||
};
|
||||
App::new()
|
||||
.wrap(cors)
|
||||
.app_data(web::Data::new(state.clone()))
|
||||
.configure(routes::config)
|
||||
})
|
||||
.bind(("127.0.0.1", 8080))?
|
||||
.run()
|
||||
.await
|
||||
}
|
16
crates/backend/src/old.rs
Normal file
16
crates/backend/src/old.rs
Normal file
|
@ -0,0 +1,16 @@
|
|||
use entity::zehner;
|
||||
use sea_orm::{ActiveValue, Database};
|
||||
|
||||
fn old() {
|
||||
let model = zehner::ActiveModel {
|
||||
id: ActiveValue::not_set(),
|
||||
title: ActiveValue::set(String::from("Mika ist der doof")),
|
||||
text: ActiveValue::set(Some("Johannes ist auch dabei".to_owned())),
|
||||
};
|
||||
|
||||
model.insert(&conn).await.unwrap();
|
||||
|
||||
let result = zehner::Entity::find().all(&conn).await.unwrap();
|
||||
|
||||
println!("{:#?}", result);
|
||||
}
|
20
crates/backend/src/routes.rs
Normal file
20
crates/backend/src/routes.rs
Normal file
|
@ -0,0 +1,20 @@
|
|||
use crate::controller::user;
|
||||
use actix_web::web::{self};
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::scope("/api").service(
|
||||
web::scope("/users")
|
||||
.service(
|
||||
web::resource("")
|
||||
.get(user::get_users)
|
||||
.post(user::create_user),
|
||||
)
|
||||
.service(
|
||||
web::resource("/{user_id}")
|
||||
.delete(user::delete_user)
|
||||
.put(user::update_user),
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
8
crates/entity/Cargo.toml
Normal file
8
crates/entity/Cargo.toml
Normal file
|
@ -0,0 +1,8 @@
|
|||
[package]
|
||||
name = "entity"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
sea-orm = "1.0.1"
|
||||
serde = { version = "*", features = ["derive"] }
|
34
crates/entity/src/documents.rs
Normal file
34
crates/entity/src/documents.rs
Normal file
|
@ -0,0 +1,34 @@
|
|||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.1
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "documents")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key, auto_increment = false)]
|
||||
pub document_id: Uuid,
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
pub user_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::users::Entity",
|
||||
from = "Column::UserId",
|
||||
to = "super::users::Column::UserId",
|
||||
on_update = "Cascade",
|
||||
on_delete = "Cascade"
|
||||
)]
|
||||
Users,
|
||||
}
|
||||
|
||||
impl Related<super::users::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Users.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
8
crates/entity/src/lib.rs
Normal file
8
crates/entity/src/lib.rs
Normal file
|
@ -0,0 +1,8 @@
|
|||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.1
|
||||
|
||||
pub mod prelude;
|
||||
|
||||
pub mod documents;
|
||||
pub mod post;
|
||||
pub mod users;
|
||||
pub mod zehner;
|
18
crates/entity/src/post.rs
Normal file
18
crates/entity/src/post.rs
Normal file
|
@ -0,0 +1,18 @@
|
|||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.1
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "post")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub title: String,
|
||||
pub text: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
6
crates/entity/src/prelude.rs
Normal file
6
crates/entity/src/prelude.rs
Normal file
|
@ -0,0 +1,6 @@
|
|||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.1
|
||||
|
||||
pub use super::documents::Entity as Documents;
|
||||
pub use super::post::Entity as Post;
|
||||
pub use super::users::Entity as Users;
|
||||
pub use super::zehner::Entity as Zehner;
|
31
crates/entity/src/users.rs
Normal file
31
crates/entity/src/users.rs
Normal file
|
@ -0,0 +1,31 @@
|
|||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.1
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "users")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key, auto_increment = false)]
|
||||
pub user_id: Uuid,
|
||||
pub username: String,
|
||||
pub name: String,
|
||||
pub email: String,
|
||||
pub password_hash: String,
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::documents::Entity")]
|
||||
Documents,
|
||||
}
|
||||
|
||||
impl Related<super::documents::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Documents.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
18
crates/entity/src/zehner.rs
Normal file
18
crates/entity/src/zehner.rs
Normal file
|
@ -0,0 +1,18 @@
|
|||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.1
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "zehner")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key, auto_increment = false)]
|
||||
pub id: Uuid,
|
||||
pub title: String,
|
||||
pub text: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
22
crates/migration/Cargo.toml
Normal file
22
crates/migration/Cargo.toml
Normal file
|
@ -0,0 +1,22 @@
|
|||
[package]
|
||||
name = "migration"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
name = "migration"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
async-std = { version = "1", features = ["attributes", "tokio1"] }
|
||||
|
||||
[dependencies.sea-orm-migration]
|
||||
version = "1.0.0"
|
||||
features = [
|
||||
# Enable at least one `ASYNC_RUNTIME` and `DATABASE_DRIVER` feature if you want to run migration via CLI.
|
||||
# View the list of supported features at https://www.sea-ql.org/SeaORM/docs/install-and-config/database-and-async-runtime.
|
||||
# e.g.
|
||||
"runtime-tokio-rustls", # `ASYNC_RUNTIME` feature
|
||||
"sqlx-postgres", # `DATABASE_DRIVER` feature
|
||||
]
|
41
crates/migration/README.md
Normal file
41
crates/migration/README.md
Normal file
|
@ -0,0 +1,41 @@
|
|||
# Running Migrator CLI
|
||||
|
||||
- Generate a new migration file
|
||||
```sh
|
||||
cargo run -- generate MIGRATION_NAME
|
||||
```
|
||||
- Apply all pending migrations
|
||||
```sh
|
||||
cargo run
|
||||
```
|
||||
```sh
|
||||
cargo run -- up
|
||||
```
|
||||
- Apply first 10 pending migrations
|
||||
```sh
|
||||
cargo run -- up -n 10
|
||||
```
|
||||
- Rollback last applied migrations
|
||||
```sh
|
||||
cargo run -- down
|
||||
```
|
||||
- Rollback last 10 applied migrations
|
||||
```sh
|
||||
cargo run -- down -n 10
|
||||
```
|
||||
- Drop all tables from the database, then reapply all migrations
|
||||
```sh
|
||||
cargo run -- fresh
|
||||
```
|
||||
- Rollback all applied migrations, then reapply all migrations
|
||||
```sh
|
||||
cargo run -- refresh
|
||||
```
|
||||
- Rollback all applied migrations
|
||||
```sh
|
||||
cargo run -- reset
|
||||
```
|
||||
- Check the status of all migrations
|
||||
```sh
|
||||
cargo run -- status
|
||||
```
|
18
crates/migration/src/lib.rs
Normal file
18
crates/migration/src/lib.rs
Normal file
|
@ -0,0 +1,18 @@
|
|||
pub use sea_orm_migration::prelude::*;
|
||||
|
||||
mod m20220101_000001_create_table;
|
||||
mod m20241003_175716_table_users;
|
||||
mod m20241003_175719_table_documents;
|
||||
|
||||
pub struct Migrator;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigratorTrait for Migrator {
|
||||
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
|
||||
vec![
|
||||
Box::new(m20220101_000001_create_table::Migration),
|
||||
Box::new(m20241003_175716_table_users::Migration),
|
||||
Box::new(m20241003_175719_table_documents::Migration),
|
||||
]
|
||||
}
|
||||
}
|
70
crates/migration/src/m20220101_000001_create_table.rs
Normal file
70
crates/migration/src/m20220101_000001_create_table.rs
Normal file
|
@ -0,0 +1,70 @@
|
|||
use sea_orm_migration::{prelude::*, schema::*};
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
// Replace the sample below with your own migration scripts
|
||||
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Post::Table)
|
||||
.if_not_exists()
|
||||
.col(pk_auto(Post::Id))
|
||||
.col(string(Post::Title))
|
||||
.col(string(Post::Text))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Zehner::Table)
|
||||
.if_not_exists()
|
||||
.col(
|
||||
uuid(Zehner::Id)
|
||||
.extra("DEFAULT gen_random_uuid()")
|
||||
.primary_key(),
|
||||
)
|
||||
.col(string(Zehner::Title).default("Todo"))
|
||||
.col(string_null(Zehner::Text))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
// Replace the sample below with your own migration scripts
|
||||
|
||||
manager
|
||||
.drop_table(Table::drop().table(Post::Table).to_owned())
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_table(Table::drop().table(Zehner::Table).to_owned())
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
enum Post {
|
||||
Table,
|
||||
Id,
|
||||
Title,
|
||||
Text,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
enum Zehner {
|
||||
Table,
|
||||
Id,
|
||||
Title,
|
||||
Text,
|
||||
}
|
48
crates/migration/src/m20241003_175716_table_users.rs
Normal file
48
crates/migration/src/m20241003_175716_table_users.rs
Normal file
|
@ -0,0 +1,48 @@
|
|||
use sea_orm_migration::{prelude::*, schema::*};
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
// Replace the sample below with your own migration scripts
|
||||
|
||||
manager
|
||||
.create_table(timestamps(
|
||||
Table::create()
|
||||
.table(Users::Table)
|
||||
.if_not_exists()
|
||||
.col(
|
||||
uuid(Users::UserId)
|
||||
.extra("DEFAULT gen_random_uuid()")
|
||||
.primary_key(),
|
||||
)
|
||||
.col(string(Users::Username))
|
||||
.col(string(Users::Name))
|
||||
.col(string(Users::Email))
|
||||
.col(string(Users::PasswordHash))
|
||||
.to_owned(),
|
||||
))
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
// Replace the sample below with your own migration scripts
|
||||
|
||||
manager
|
||||
.drop_table(Table::drop().table(Users::Table).to_owned())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
enum Users {
|
||||
Table,
|
||||
UserId,
|
||||
Username,
|
||||
Name,
|
||||
Email,
|
||||
PasswordHash,
|
||||
}
|
60
crates/migration/src/m20241003_175719_table_documents.rs
Normal file
60
crates/migration/src/m20241003_175719_table_documents.rs
Normal file
|
@ -0,0 +1,60 @@
|
|||
use sea_orm_migration::{prelude::*, schema::*};
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
// Replace the sample below with your own migration scripts
|
||||
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Documents::Table)
|
||||
.if_not_exists()
|
||||
.col(
|
||||
uuid(Documents::DocumentId)
|
||||
.extra("DEFAULT gen_random_uuid()")
|
||||
.primary_key(),
|
||||
)
|
||||
.col(string(Documents::Name))
|
||||
.col(string(Documents::Description))
|
||||
.col(uuid(Documents::UserId))
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk-users-user_id")
|
||||
.from(Documents::Table, Documents::UserId)
|
||||
.to(Users::Table, Users::UserId)
|
||||
.on_update(ForeignKeyAction::Cascade)
|
||||
.on_delete(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
// Replace the sample below with your own migration scripts
|
||||
|
||||
manager
|
||||
.drop_table(Table::drop().table(Documents::Table).to_owned())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
enum Documents {
|
||||
Table,
|
||||
DocumentId,
|
||||
Name,
|
||||
Description,
|
||||
UserId,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
enum Users {
|
||||
Table,
|
||||
UserId,
|
||||
}
|
6
crates/migration/src/main.rs
Normal file
6
crates/migration/src/main.rs
Normal file
|
@ -0,0 +1,6 @@
|
|||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[async_std::main]
|
||||
async fn main() {
|
||||
cli::run_cli(migration::Migrator).await;
|
||||
}
|
9
crates/xtask/Cargo.toml
Normal file
9
crates/xtask/Cargo.toml
Normal file
|
@ -0,0 +1,9 @@
|
|||
[package]
|
||||
name = "xtask"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
|
||||
[dependencies]
|
||||
clap = { version = "*" }
|
||||
ctrlc = { version = "*" }
|
83
crates/xtask/src/main.rs
Normal file
83
crates/xtask/src/main.rs
Normal file
|
@ -0,0 +1,83 @@
|
|||
use std::{
|
||||
env::{current_dir, var_os},
|
||||
path::PathBuf,
|
||||
process,
|
||||
};
|
||||
|
||||
use clap::Command;
|
||||
|
||||
fn main() {
|
||||
let workspace_dir = var_os("CARGO_WORKSPACE_DIR")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|| current_dir().unwrap());
|
||||
|
||||
let matches = cli().get_matches();
|
||||
|
||||
match matches.subcommand() {
|
||||
Some(("backend", _)) => {
|
||||
process::Command::new("cargo")
|
||||
.arg("run")
|
||||
.arg("-p")
|
||||
.arg("backend")
|
||||
.current_dir(&workspace_dir)
|
||||
.stdout(process::Stdio::inherit())
|
||||
.stderr(process::Stdio::inherit())
|
||||
.status()
|
||||
.expect("running backend");
|
||||
}
|
||||
Some(("entity", submatches)) => match submatches.subcommand() {
|
||||
Some(("generate", _)) => {
|
||||
process::Command::new("sea-orm-cli")
|
||||
.arg("generate")
|
||||
.arg("entity")
|
||||
.arg("-o")
|
||||
.arg("crates/entity/src/")
|
||||
.arg("--lib")
|
||||
.arg("--with-serde")
|
||||
.arg("both")
|
||||
.current_dir(&workspace_dir)
|
||||
.stdout(process::Stdio::inherit())
|
||||
.stderr(process::Stdio::inherit())
|
||||
.status()
|
||||
.expect("running entity generate");
|
||||
}
|
||||
Some(("clean", _)) => {
|
||||
let dir = workspace_dir.join("crates/entity/src");
|
||||
let files = dir.read_dir().expect("Failed to read entity directory");
|
||||
for file in files {
|
||||
let file = file.expect("failed to get file path");
|
||||
if file.file_name() == "lib.rs" {
|
||||
continue;
|
||||
}
|
||||
let file_path = file.path();
|
||||
match std::fs::remove_file(&file_path) {
|
||||
Ok(_) => println!("Removed file {}", file_path.display()),
|
||||
Err(_) => println!("Failed to remove file {}", file_path.display()),
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
panic!(
|
||||
"Unknown command: entity {:?}",
|
||||
submatches.subcommand().map(|c| c.0)
|
||||
)
|
||||
}
|
||||
},
|
||||
_ => {
|
||||
panic!("Unknown command: {:?}", matches.subcommand().map(|c| c.0))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn cli() -> Command {
|
||||
Command::new("xtask")
|
||||
.about("docusphere useful commands")
|
||||
.subcommand_required(true)
|
||||
.subcommand(Command::new("backend"))
|
||||
.subcommand(
|
||||
Command::new("entity")
|
||||
.subcommand_required(true)
|
||||
.subcommand(Command::new("generate"))
|
||||
.subcommand(Command::new("clean")),
|
||||
)
|
||||
}
|
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
Binary file not shown.
7
gradle/wrapper/gradle-wrapper.properties
vendored
7
gradle/wrapper/gradle-wrapper.properties
vendored
|
@ -1,7 +0,0 @@
|
|||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.8-bin.zip
|
||||
networkTimeout=10000
|
||||
validateDistributionUrl=true
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
249
gradlew
vendored
249
gradlew
vendored
|
@ -1,249 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
#
|
||||
# Copyright © 2015-2021 the original authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
##############################################################################
|
||||
#
|
||||
# Gradle start up script for POSIX generated by Gradle.
|
||||
#
|
||||
# Important for running:
|
||||
#
|
||||
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
|
||||
# noncompliant, but you have some other compliant shell such as ksh or
|
||||
# bash, then to run this script, type that shell name before the whole
|
||||
# command line, like:
|
||||
#
|
||||
# ksh Gradle
|
||||
#
|
||||
# Busybox and similar reduced shells will NOT work, because this script
|
||||
# requires all of these POSIX shell features:
|
||||
# * functions;
|
||||
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
|
||||
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
|
||||
# * compound commands having a testable exit status, especially «case»;
|
||||
# * various built-in commands including «command», «set», and «ulimit».
|
||||
#
|
||||
# Important for patching:
|
||||
#
|
||||
# (2) This script targets any POSIX shell, so it avoids extensions provided
|
||||
# by Bash, Ksh, etc; in particular arrays are avoided.
|
||||
#
|
||||
# The "traditional" practice of packing multiple parameters into a
|
||||
# space-separated string is a well documented source of bugs and security
|
||||
# problems, so this is (mostly) avoided, by progressively accumulating
|
||||
# options in "$@", and eventually passing that to Java.
|
||||
#
|
||||
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
|
||||
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
|
||||
# see the in-line comments for details.
|
||||
#
|
||||
# There are tweaks for specific operating systems such as AIX, CygWin,
|
||||
# Darwin, MinGW, and NonStop.
|
||||
#
|
||||
# (3) This script is generated from the Groovy template
|
||||
# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
|
||||
# within the Gradle project.
|
||||
#
|
||||
# You can find Gradle at https://github.com/gradle/gradle/.
|
||||
#
|
||||
##############################################################################
|
||||
|
||||
# Attempt to set APP_HOME
|
||||
|
||||
# Resolve links: $0 may be a link
|
||||
app_path=$0
|
||||
|
||||
# Need this for daisy-chained symlinks.
|
||||
while
|
||||
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
|
||||
[ -h "$app_path" ]
|
||||
do
|
||||
ls=$( ls -ld "$app_path" )
|
||||
link=${ls#*' -> '}
|
||||
case $link in #(
|
||||
/*) app_path=$link ;; #(
|
||||
*) app_path=$APP_HOME$link ;;
|
||||
esac
|
||||
done
|
||||
|
||||
# This is normally unused
|
||||
# shellcheck disable=SC2034
|
||||
APP_BASE_NAME=${0##*/}
|
||||
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
|
||||
APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD=maximum
|
||||
|
||||
warn () {
|
||||
echo "$*"
|
||||
} >&2
|
||||
|
||||
die () {
|
||||
echo
|
||||
echo "$*"
|
||||
echo
|
||||
exit 1
|
||||
} >&2
|
||||
|
||||
# OS specific support (must be 'true' or 'false').
|
||||
cygwin=false
|
||||
msys=false
|
||||
darwin=false
|
||||
nonstop=false
|
||||
case "$( uname )" in #(
|
||||
CYGWIN* ) cygwin=true ;; #(
|
||||
Darwin* ) darwin=true ;; #(
|
||||
MSYS* | MINGW* ) msys=true ;; #(
|
||||
NONSTOP* ) nonstop=true ;;
|
||||
esac
|
||||
|
||||
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
||||
|
||||
|
||||
# Determine the Java command to use to start the JVM.
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD=$JAVA_HOME/jre/sh/java
|
||||
else
|
||||
JAVACMD=$JAVA_HOME/bin/java
|
||||
fi
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
else
|
||||
JAVACMD=java
|
||||
if ! command -v java >/dev/null 2>&1
|
||||
then
|
||||
die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
fi
|
||||
|
||||
# Increase the maximum file descriptors if we can.
|
||||
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
|
||||
case $MAX_FD in #(
|
||||
max*)
|
||||
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
|
||||
# shellcheck disable=SC2039,SC3045
|
||||
MAX_FD=$( ulimit -H -n ) ||
|
||||
warn "Could not query maximum file descriptor limit"
|
||||
esac
|
||||
case $MAX_FD in #(
|
||||
'' | soft) :;; #(
|
||||
*)
|
||||
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
|
||||
# shellcheck disable=SC2039,SC3045
|
||||
ulimit -n "$MAX_FD" ||
|
||||
warn "Could not set maximum file descriptor limit to $MAX_FD"
|
||||
esac
|
||||
fi
|
||||
|
||||
# Collect all arguments for the java command, stacking in reverse order:
|
||||
# * args from the command line
|
||||
# * the main class name
|
||||
# * -classpath
|
||||
# * -D...appname settings
|
||||
# * --module-path (only if needed)
|
||||
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
|
||||
|
||||
# For Cygwin or MSYS, switch paths to Windows format before running java
|
||||
if "$cygwin" || "$msys" ; then
|
||||
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
|
||||
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
|
||||
|
||||
JAVACMD=$( cygpath --unix "$JAVACMD" )
|
||||
|
||||
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
||||
for arg do
|
||||
if
|
||||
case $arg in #(
|
||||
-*) false ;; # don't mess with options #(
|
||||
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
|
||||
[ -e "$t" ] ;; #(
|
||||
*) false ;;
|
||||
esac
|
||||
then
|
||||
arg=$( cygpath --path --ignore --mixed "$arg" )
|
||||
fi
|
||||
# Roll the args list around exactly as many times as the number of
|
||||
# args, so each arg winds up back in the position where it started, but
|
||||
# possibly modified.
|
||||
#
|
||||
# NB: a `for` loop captures its iteration list before it begins, so
|
||||
# changing the positional parameters here affects neither the number of
|
||||
# iterations, nor the values presented in `arg`.
|
||||
shift # remove old arg
|
||||
set -- "$@" "$arg" # push replacement arg
|
||||
done
|
||||
fi
|
||||
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
|
||||
|
||||
# Collect all arguments for the java command:
|
||||
# * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments,
|
||||
# and any embedded shellness will be escaped.
|
||||
# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be
|
||||
# treated as '${Hostname}' itself on the command line.
|
||||
|
||||
set -- \
|
||||
"-Dorg.gradle.appname=$APP_BASE_NAME" \
|
||||
-classpath "$CLASSPATH" \
|
||||
org.gradle.wrapper.GradleWrapperMain \
|
||||
"$@"
|
||||
|
||||
# Stop when "xargs" is not available.
|
||||
if ! command -v xargs >/dev/null 2>&1
|
||||
then
|
||||
die "xargs is not available"
|
||||
fi
|
||||
|
||||
# Use "xargs" to parse quoted args.
|
||||
#
|
||||
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
|
||||
#
|
||||
# In Bash we could simply go:
|
||||
#
|
||||
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
|
||||
# set -- "${ARGS[@]}" "$@"
|
||||
#
|
||||
# but POSIX shell has neither arrays nor command substitution, so instead we
|
||||
# post-process each arg (as a line of input to sed) to backslash-escape any
|
||||
# character that might be a shell metacharacter, then use eval to reverse
|
||||
# that process (while maintaining the separation between arguments), and wrap
|
||||
# the whole thing up as a single "set" statement.
|
||||
#
|
||||
# This will of course break if any of these variables contains a newline or
|
||||
# an unmatched quote.
|
||||
#
|
||||
|
||||
eval "set -- $(
|
||||
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
|
||||
xargs -n1 |
|
||||
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
|
||||
tr '\n' ' '
|
||||
)" '"$@"'
|
||||
|
||||
exec "$JAVACMD" "$@"
|
92
gradlew.bat
vendored
92
gradlew.bat
vendored
|
@ -1,92 +0,0 @@
|
|||
@rem
|
||||
@rem Copyright 2015 the original author or authors.
|
||||
@rem
|
||||
@rem Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@rem you may not use this file except in compliance with the License.
|
||||
@rem You may obtain a copy of the License at
|
||||
@rem
|
||||
@rem https://www.apache.org/licenses/LICENSE-2.0
|
||||
@rem
|
||||
@rem Unless required by applicable law or agreed to in writing, software
|
||||
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@rem See the License for the specific language governing permissions and
|
||||
@rem limitations under the License.
|
||||
@rem
|
||||
|
||||
@if "%DEBUG%"=="" @echo off
|
||||
@rem ##########################################################################
|
||||
@rem
|
||||
@rem Gradle startup script for Windows
|
||||
@rem
|
||||
@rem ##########################################################################
|
||||
|
||||
@rem Set local scope for the variables with windows NT shell
|
||||
if "%OS%"=="Windows_NT" setlocal
|
||||
|
||||
set DIRNAME=%~dp0
|
||||
if "%DIRNAME%"=="" set DIRNAME=.
|
||||
@rem This is normally unused
|
||||
set APP_BASE_NAME=%~n0
|
||||
set APP_HOME=%DIRNAME%
|
||||
|
||||
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
|
||||
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
|
||||
|
||||
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
|
||||
|
||||
@rem Find java.exe
|
||||
if defined JAVA_HOME goto findJavaFromJavaHome
|
||||
|
||||
set JAVA_EXE=java.exe
|
||||
%JAVA_EXE% -version >NUL 2>&1
|
||||
if %ERRORLEVEL% equ 0 goto execute
|
||||
|
||||
echo. 1>&2
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2
|
||||
echo. 1>&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the 1>&2
|
||||
echo location of your Java installation. 1>&2
|
||||
|
||||
goto fail
|
||||
|
||||
:findJavaFromJavaHome
|
||||
set JAVA_HOME=%JAVA_HOME:"=%
|
||||
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
||||
|
||||
if exist "%JAVA_EXE%" goto execute
|
||||
|
||||
echo. 1>&2
|
||||
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2
|
||||
echo. 1>&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the 1>&2
|
||||
echo location of your Java installation. 1>&2
|
||||
|
||||
goto fail
|
||||
|
||||
:execute
|
||||
@rem Setup the command line
|
||||
|
||||
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
||||
|
||||
|
||||
@rem Execute Gradle
|
||||
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
|
||||
|
||||
:end
|
||||
@rem End local scope for the variables with windows NT shell
|
||||
if %ERRORLEVEL% equ 0 goto mainEnd
|
||||
|
||||
:fail
|
||||
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
||||
rem the _cmd.exe /c_ return code!
|
||||
set EXIT_CODE=%ERRORLEVEL%
|
||||
if %EXIT_CODE% equ 0 set EXIT_CODE=1
|
||||
if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
|
||||
exit /b %EXIT_CODE%
|
||||
|
||||
:mainEnd
|
||||
if "%OS%"=="Windows_NT" endlocal
|
||||
|
||||
:omega
|
|
@ -1,7 +0,0 @@
|
|||
# Some additional things to consider:
|
||||
|
||||
## .env not being read by application.yml
|
||||
There is currently not a way that environment variables are read from the .env file which the docker-compose.yml file uses.
|
||||
|
||||
You have to manually set the environment variables in e.g. your IDE
|
||||
|
|
@ -1,40 +0,0 @@
|
|||
plugins {
|
||||
id 'java'
|
||||
id 'org.springframework.boot' version '3.3.3'
|
||||
id 'io.spring.dependency-management' version '1.1.6'
|
||||
}
|
||||
|
||||
group = 'com.mixel'
|
||||
version = '0.0.1-SNAPSHOT'
|
||||
|
||||
java {
|
||||
toolchain {
|
||||
languageVersion = JavaLanguageVersion.of(17)
|
||||
}
|
||||
}
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation 'org.springframework.boot:spring-boot-starter-web'
|
||||
implementation 'org.springframework.boot:spring-boot-starter-data-jpa'
|
||||
// Argon2 password hashing
|
||||
implementation 'org.springframework.security:spring-security-crypto:6.3.3'
|
||||
implementation 'org.bouncycastle:bcprov-jdk15on:1.70'
|
||||
|
||||
// Dotenv manager
|
||||
implementation 'io.github.cdimascio:java-dotenv:5.2.2'
|
||||
|
||||
// Minio S3 Storage
|
||||
implementation 'io.minio:minio:8.5.12'
|
||||
|
||||
runtimeOnly 'org.postgresql:postgresql'
|
||||
testImplementation 'org.springframework.boot:spring-boot-starter-test'
|
||||
testRuntimeOnly 'org.junit.platform:junit-platform-launcher'
|
||||
}
|
||||
|
||||
tasks.named('test') {
|
||||
useJUnitPlatform()
|
||||
}
|
|
@ -1,14 +0,0 @@
|
|||
package com.mixel.docusphere;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
@SpringBootApplication
|
||||
@RestController
|
||||
public class DocuSphereApplication {
|
||||
|
||||
public static void main(String[] args) {
|
||||
SpringApplication.run(DocuSphereApplication.class, args);
|
||||
}
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
package com.mixel.docusphere.config;
|
||||
|
||||
import io.github.cdimascio.dotenv.Dotenv;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Primary;
|
||||
import javax.sql.DataSource;
|
||||
import org.springframework.boot.jdbc.DataSourceBuilder;
|
||||
|
||||
@Configuration
|
||||
public class DataSourceConfig {
|
||||
|
||||
@Bean
|
||||
@Primary
|
||||
public DataSource dataSource() {
|
||||
// Load environment variables from .env file
|
||||
Dotenv dotenv = Dotenv.configure().load();
|
||||
|
||||
// Build the DataSource using Dotenv values
|
||||
DataSourceBuilder<?> dataSourceBuilder = DataSourceBuilder.create()
|
||||
.driverClassName("org.postgresql.Driver")
|
||||
.url(dotenv.get("DB_URL"))
|
||||
.username(dotenv.get("DB_USER"))
|
||||
.password(dotenv.get("DB_PASSWORD"));
|
||||
|
||||
return dataSourceBuilder.build();
|
||||
}
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
package com.mixel.docusphere.config;
|
||||
|
||||
import io.github.cdimascio.dotenv.Dotenv;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
@Configuration
|
||||
public class DotenvConfig {
|
||||
|
||||
private final Dotenv dotenv;
|
||||
|
||||
public DotenvConfig() {
|
||||
this.dotenv = Dotenv.configure()
|
||||
.directory("../") // Adjust path as needed
|
||||
.filename(".env")
|
||||
.ignoreIfMissing()
|
||||
.load();
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
public void init() {
|
||||
dotenv.entries().forEach(entry ->
|
||||
System.setProperty(entry.getKey(), entry.getValue())
|
||||
);
|
||||
System.out.println("Dotenv variables loaded");
|
||||
}
|
||||
|
||||
@Bean
|
||||
public Dotenv dotenv() {
|
||||
return dotenv;
|
||||
}
|
||||
}
|
|
@ -1,60 +0,0 @@
|
|||
package com.mixel.docusphere.controller;
|
||||
|
||||
import com.mixel.docusphere.entity.Document;
|
||||
import com.mixel.docusphere.service.DocumentService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/api/v1/documents")
|
||||
public class DocumentController {
|
||||
|
||||
private final DocumentService documentService;
|
||||
|
||||
@Autowired
|
||||
public DocumentController(DocumentService documentService) {
|
||||
this.documentService = documentService;
|
||||
}
|
||||
|
||||
@GetMapping
|
||||
public List<Document> getAllDocuments() {
|
||||
return documentService.findAll();
|
||||
}
|
||||
|
||||
@GetMapping("/{id}")
|
||||
public ResponseEntity<Document> getDocumentById(@PathVariable UUID id) {
|
||||
Optional<Document> document = documentService.findById(id);
|
||||
return document.map(ResponseEntity::ok).orElseGet(() -> ResponseEntity.notFound().build());
|
||||
}
|
||||
|
||||
@PostMapping
|
||||
public Document createDocument(@RequestBody Document document) {
|
||||
return documentService.save(document);
|
||||
}
|
||||
|
||||
@PutMapping("/{id}")
|
||||
public ResponseEntity<Document> updateDocument(@PathVariable UUID id, @RequestBody Document documentDetails) {
|
||||
Optional<Document> document = documentService.findById(id);
|
||||
if (document.isPresent()) {
|
||||
Document updatedDocument = document.get();
|
||||
updatedDocument.setName(documentDetails.getName());
|
||||
updatedDocument.setS3Path(documentDetails.getS3Path());
|
||||
updatedDocument.setUser(documentDetails.getUser());
|
||||
documentService.save(updatedDocument);
|
||||
return ResponseEntity.ok(updatedDocument);
|
||||
} else {
|
||||
return ResponseEntity.notFound().build();
|
||||
}
|
||||
}
|
||||
|
||||
@DeleteMapping("/{id}")
|
||||
public ResponseEntity<Void> deleteDocument(@PathVariable UUID id) {
|
||||
documentService.deleteById(id);
|
||||
return ResponseEntity.noContent().build();
|
||||
}
|
||||
}
|
|
@ -1,52 +0,0 @@
|
|||
package com.mixel.docusphere.controller;
|
||||
|
||||
import com.mixel.docusphere.dto.UserDTO;
|
||||
import com.mixel.docusphere.entity.User;
|
||||
import com.mixel.docusphere.service.UserService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/api/v1/users")
|
||||
public class UserController {
|
||||
|
||||
private final UserService userService;
|
||||
|
||||
@Autowired
|
||||
public UserController(UserService userService) {
|
||||
this.userService = userService;
|
||||
}
|
||||
|
||||
@GetMapping
|
||||
public List<User> getAllUsers() {
|
||||
return userService.findAll();
|
||||
}
|
||||
|
||||
@GetMapping("/{id}")
|
||||
public ResponseEntity<User> getUserById(@PathVariable UUID id) {
|
||||
Optional<User> user = userService.findById(id);
|
||||
return user.map(ResponseEntity::ok).orElseGet(() -> ResponseEntity.notFound().build());
|
||||
}
|
||||
|
||||
@PostMapping
|
||||
public User createUser(@RequestBody UserDTO userDTO) {
|
||||
return userService.save(userDTO);
|
||||
}
|
||||
|
||||
@PutMapping("/{id}")
|
||||
public ResponseEntity<User> updateUser(@PathVariable UUID id, @RequestBody UserDTO userDTO) {
|
||||
Optional<User> updatedUser = userService.update(id, userDTO);
|
||||
return updatedUser.map(ResponseEntity::ok).orElseGet(() -> ResponseEntity.notFound().build());
|
||||
}
|
||||
|
||||
@DeleteMapping("/{id}")
|
||||
public ResponseEntity<Void> deleteUser(@PathVariable UUID id) {
|
||||
userService.deleteById(id);
|
||||
return ResponseEntity.noContent().build();
|
||||
}
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
package com.mixel.docusphere.dto;
|
||||
|
||||
public class DocumentDTO {
|
||||
private String name;
|
||||
private String description;
|
||||
private String s3Path;
|
||||
private String userId;
|
||||
|
||||
// Getters and Setters
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getS3Path() {
|
||||
return s3Path;
|
||||
}
|
||||
|
||||
public void setS3Path(String s3Path) {
|
||||
this.s3Path = s3Path;
|
||||
}
|
||||
|
||||
public String getUserId() {
|
||||
return userId;
|
||||
}
|
||||
|
||||
public void setUserId(String userId) {
|
||||
this.userId = userId;
|
||||
}
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
package com.mixel.docusphere.dto;
|
||||
|
||||
public class UserDTO {
|
||||
private String username;
|
||||
private String name;
|
||||
private String email;
|
||||
private String password;
|
||||
|
||||
// Getters and Setters
|
||||
public String getUsername() {
|
||||
return username;
|
||||
}
|
||||
|
||||
public void setUsername(String username) {
|
||||
this.username = username;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getEmail() {
|
||||
return email;
|
||||
}
|
||||
|
||||
public void setEmail(String email) {
|
||||
this.email = email;
|
||||
}
|
||||
|
||||
public String getPassword() {
|
||||
return password;
|
||||
}
|
||||
|
||||
public void setPassword(String password) {
|
||||
this.password = password;
|
||||
}
|
||||
}
|
|
@ -1,98 +0,0 @@
|
|||
package com.mixel.docusphere.entity;
|
||||
|
||||
import jakarta.persistence.*;
|
||||
import org.hibernate.annotations.CreationTimestamp;
|
||||
import org.hibernate.annotations.UpdateTimestamp;
|
||||
|
||||
import java.util.UUID;
|
||||
import java.time.LocalDateTime;
|
||||
|
||||
@Entity
|
||||
@Table(name = "Documents")
|
||||
public class Document {
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.UUID)
|
||||
@Column(name = "DocumentID", updatable = false, nullable = false)
|
||||
private UUID documentId;
|
||||
|
||||
@Column(name = "Name", nullable = false)
|
||||
private String name;
|
||||
|
||||
@Column(name = "Description", nullable = true)
|
||||
private String description;
|
||||
|
||||
@Column(name = "S3Path", nullable = false)
|
||||
private String s3Path;
|
||||
|
||||
@ManyToOne
|
||||
@JoinColumn(name = "UserID", nullable = false)
|
||||
private User user;
|
||||
|
||||
@CreationTimestamp
|
||||
@Column(name = "CreatedAt", nullable = false, updatable = false)
|
||||
private LocalDateTime createdAt;
|
||||
|
||||
@UpdateTimestamp
|
||||
@Column(name = "UpdatedAt", nullable = false)
|
||||
private LocalDateTime updatedAt;
|
||||
|
||||
@PrePersist
|
||||
protected void onCreate() {
|
||||
createdAt = LocalDateTime.now();
|
||||
updatedAt = LocalDateTime.now();
|
||||
}
|
||||
|
||||
@PreUpdate
|
||||
protected void onUpdate() {
|
||||
updatedAt = LocalDateTime.now();
|
||||
}
|
||||
|
||||
// Getters and Setters
|
||||
public UUID getDocumentId() {
|
||||
return documentId;
|
||||
}
|
||||
|
||||
public void setDocumentId(UUID documentId) {
|
||||
this.documentId = documentId;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getS3Path() {
|
||||
return s3Path;
|
||||
}
|
||||
|
||||
public void setS3Path(String s3Path) {
|
||||
this.s3Path = s3Path;
|
||||
}
|
||||
|
||||
public User getUser() {
|
||||
return user;
|
||||
}
|
||||
|
||||
public void setUser(User user) {
|
||||
this.user = user;
|
||||
}
|
||||
|
||||
public LocalDateTime getCreatedAt() {
|
||||
return createdAt;
|
||||
}
|
||||
|
||||
public void setCreatedAt(LocalDateTime createdAt) {
|
||||
this.createdAt = createdAt;
|
||||
}
|
||||
|
||||
public LocalDateTime getUpdatedAt() {
|
||||
return updatedAt;
|
||||
}
|
||||
|
||||
public void setUpdatedAt(LocalDateTime updatedAt) {
|
||||
this.updatedAt = updatedAt;
|
||||
}
|
||||
}
|
|
@ -1,119 +0,0 @@
|
|||
package com.mixel.docusphere.entity;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import jakarta.persistence.*;
|
||||
import org.hibernate.annotations.CreationTimestamp;
|
||||
import org.hibernate.annotations.UpdateTimestamp;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.UUID;
|
||||
|
||||
@Entity
|
||||
@Table(name = "Users")
|
||||
public class User {
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.UUID)
|
||||
@Column(name = "UserID", updatable = false, nullable = false)
|
||||
private UUID userId;
|
||||
|
||||
@Column(name = "Username", nullable = false, unique = true)
|
||||
private String username;
|
||||
|
||||
@Column(name = "Name", nullable = false)
|
||||
private String name;
|
||||
|
||||
@Column(name = "Email", nullable = false, unique = true)
|
||||
private String email;
|
||||
|
||||
@JsonIgnore
|
||||
@Column(name = "PasswordHash", nullable = false)
|
||||
private String passwordHash;
|
||||
|
||||
@JsonIgnore
|
||||
@Column(name = "PasswordSalt", nullable = false)
|
||||
private String passwordSalt;
|
||||
|
||||
@CreationTimestamp
|
||||
@Column(name = "CreatedAt", nullable = false, updatable = false)
|
||||
private LocalDateTime createdAt;
|
||||
|
||||
@UpdateTimestamp
|
||||
@Column(name = "UpdatedAt", nullable = false)
|
||||
private LocalDateTime updatedAt;
|
||||
|
||||
@PrePersist
|
||||
protected void onCreate() {
|
||||
createdAt = LocalDateTime.now();
|
||||
updatedAt = LocalDateTime.now();
|
||||
}
|
||||
|
||||
@PreUpdate
|
||||
protected void onUpdate() {
|
||||
updatedAt = LocalDateTime.now();
|
||||
}
|
||||
|
||||
// Getters and Setters
|
||||
public UUID getUserId() {
|
||||
return userId;
|
||||
}
|
||||
|
||||
public void setUserId(UUID userId) {
|
||||
this.userId = userId;
|
||||
}
|
||||
|
||||
public String getUsername() {
|
||||
return username;
|
||||
}
|
||||
|
||||
public void setUsername(String username) {
|
||||
this.username = username;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getEmail() {
|
||||
return email;
|
||||
}
|
||||
|
||||
public void setEmail(String email) {
|
||||
this.email = email;
|
||||
}
|
||||
|
||||
public String getPasswordHash() {
|
||||
return passwordHash;
|
||||
}
|
||||
|
||||
public void setPasswordHash(String passwordHash) {
|
||||
this.passwordHash = passwordHash;
|
||||
}
|
||||
|
||||
public String getPasswordSalt() {
|
||||
return passwordSalt;
|
||||
}
|
||||
|
||||
public void setPasswordSalt(String passwordSalt) {
|
||||
this.passwordSalt = passwordSalt;
|
||||
}
|
||||
|
||||
public LocalDateTime getCreatedAt() {
|
||||
return createdAt;
|
||||
}
|
||||
|
||||
public void setCreatedAt(LocalDateTime createdAt) {
|
||||
this.createdAt = createdAt;
|
||||
}
|
||||
|
||||
public LocalDateTime getUpdatedAt() {
|
||||
return updatedAt;
|
||||
}
|
||||
|
||||
public void setUpdatedAt(LocalDateTime updatedAt) {
|
||||
this.updatedAt = updatedAt;
|
||||
}
|
||||
}
|
|
@ -1,8 +0,0 @@
|
|||
package com.mixel.docusphere.repository;
|
||||
|
||||
import com.mixel.docusphere.entity.Document;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import java.util.UUID;
|
||||
|
||||
public interface DocumentRepository extends JpaRepository<Document, UUID> {
|
||||
}
|
|
@ -1,8 +0,0 @@
|
|||
package com.mixel.docusphere.repository;
|
||||
|
||||
import com.mixel.docusphere.entity.User;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import java.util.UUID;
|
||||
|
||||
public interface UserRepository extends JpaRepository<User, UUID> {
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
package com.mixel.docusphere.service;
|
||||
|
||||
import com.mixel.docusphere.entity.Document;
|
||||
import com.mixel.docusphere.repository.DocumentRepository;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
@Service
|
||||
public class DocumentService {
|
||||
|
||||
@Autowired
|
||||
private DocumentRepository documentRepository;
|
||||
|
||||
public List<Document> findAll() {
|
||||
return documentRepository.findAll();
|
||||
}
|
||||
|
||||
public Optional<Document> findById(UUID id) {
|
||||
return documentRepository.findById(id);
|
||||
}
|
||||
|
||||
public Document save(Document document) {
|
||||
return documentRepository.save(document);
|
||||
}
|
||||
|
||||
public void deleteById(UUID id) {
|
||||
documentRepository.deleteById(id);
|
||||
}
|
||||
}
|
|
@ -1,71 +0,0 @@
|
|||
package com.mixel.docusphere.service;
|
||||
|
||||
import com.mixel.docusphere.dto.UserDTO;
|
||||
import com.mixel.docusphere.entity.User;
|
||||
import com.mixel.docusphere.repository.UserRepository;
|
||||
import org.springframework.security.crypto.argon2.Argon2PasswordEncoder;
|
||||
import org.springframework.security.crypto.keygen.KeyGenerators;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
@Service
|
||||
public class UserService {
|
||||
|
||||
private final UserRepository userRepository;
|
||||
|
||||
private final Argon2PasswordEncoder passwordEncoder;
|
||||
|
||||
// Constructor
|
||||
public UserService(UserRepository userRepository) {
|
||||
this.passwordEncoder = new Argon2PasswordEncoder(16, 32, 1, 4096, 3);
|
||||
this.userRepository = userRepository;
|
||||
}
|
||||
|
||||
public List<User> findAll() {
|
||||
return userRepository.findAll();
|
||||
}
|
||||
|
||||
public Optional<User> findById(UUID id) {
|
||||
return userRepository.findById(id);
|
||||
}
|
||||
|
||||
public User save(UserDTO userDTO) {
|
||||
User user = new User();
|
||||
user.setUsername(userDTO.getUsername());
|
||||
user.setName(userDTO.getName());
|
||||
user.setEmail(userDTO.getEmail());
|
||||
|
||||
isPasswordAlreadySet(userDTO, user);
|
||||
return userRepository.save(user);
|
||||
}
|
||||
|
||||
public Optional<User> update(UUID id, UserDTO userDTO){
|
||||
Optional<User> userOptional = userRepository.findById(id);
|
||||
if (userOptional.isPresent()) {
|
||||
User user = userOptional.get();
|
||||
user.setUsername(userDTO.getUsername());
|
||||
user.setName(userDTO.getName());
|
||||
user.setEmail(userDTO.getEmail());
|
||||
|
||||
isPasswordAlreadySet(userDTO, user);
|
||||
return Optional.of(userRepository.save(user));
|
||||
}
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
private void isPasswordAlreadySet(UserDTO userDTO, User user) {
|
||||
if (userDTO.getPassword() != null && !userDTO.getPassword().isEmpty()) {
|
||||
final String salt = KeyGenerators.string().generateKey();
|
||||
user.setPasswordSalt(salt);
|
||||
final String saltedPassword = salt + userDTO.getPassword();
|
||||
user.setPasswordHash(passwordEncoder.encode(saltedPassword));
|
||||
}
|
||||
}
|
||||
|
||||
public void deleteById(UUID id) {
|
||||
userRepository.deleteById(id);
|
||||
}
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
spring:
|
||||
application:
|
||||
name: DocuSphere
|
||||
|
||||
datasource:
|
||||
driver-class-name: org.postgresql.Driver
|
||||
|
||||
jpa:
|
||||
hibernate:
|
||||
ddl-auto: create-drop # update # Use update later on but use create-drop for testing first
|
||||
show-sql: true
|
||||
properties:
|
||||
hibernate:
|
||||
format_sql: true
|
||||
|
||||
logging:
|
||||
level:
|
||||
org.hibernate.SQL: debug
|
||||
org.hibernate.type.descriptor.sql.BasicBinder: trace
|
||||
org.springframework.core.env: debug
|
||||
|
||||
minio:
|
||||
endpoint: "http://localhost:9000" # Your MinIO endpoint URL
|
||||
access-key: ${MINIO_ACCESS_KEY} # Environment variable for the access key
|
||||
secret-key: ${MINIO_SECRET_KEY} # Environment variable for the secret key
|
||||
bucket-name: docusphere # The bucket name you want to use
|
|
@ -1,13 +0,0 @@
|
|||
package com.mixel.docusphere;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
|
||||
@SpringBootTest
|
||||
class DocuSphereApplicationTests {
|
||||
|
||||
@Test
|
||||
void contextLoads() {
|
||||
}
|
||||
|
||||
}
|
|
@ -1,2 +0,0 @@
|
|||
rootProject.name = 'DocuSphere'
|
||||
include 'server'
|
Loading…
Reference in a new issue