mirror of
https://github.com/Noratrieb/karlheinz.git
synced 2026-01-14 14:35:03 +01:00
database
This commit is contained in:
parent
cc8646baff
commit
90ed6e1533
13 changed files with 224 additions and 711 deletions
1
.env
Normal file
1
.env
Normal file
|
|
@ -0,0 +1 @@
|
|||
DATABASE_URL=postgres://postgres:hugo123@localhost/karldbeinz
|
||||
735
Cargo.lock
generated
735
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
|
@ -10,4 +10,5 @@ edition = "2018"
|
|||
actix-web = "3"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0.64"
|
||||
sqlx = { version = "0.5", features = ["postgres", "macros", "runtime-actix-rustls"] }
|
||||
diesel = { version = "1.4.4", features = ["postgres"] }
|
||||
dotenv = "0.15.0"
|
||||
5
diesel.toml
Normal file
5
diesel.toml
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
# For documentation on how to configure this file,
|
||||
# see diesel.rs/guides/configuring-diesel-cli
|
||||
|
||||
[print_schema]
|
||||
file = "src/schema.rs"
|
||||
0
migrations/.gitkeep
Normal file
0
migrations/.gitkeep
Normal file
6
migrations/00000000000000_diesel_initial_setup/down.sql
Normal file
6
migrations/00000000000000_diesel_initial_setup/down.sql
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
-- This file was automatically created by Diesel to setup helper functions
|
||||
-- and other internal bookkeeping. This file is safe to edit, any future
|
||||
-- changes will be added to existing projects as new migrations.
|
||||
|
||||
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
|
||||
DROP FUNCTION IF EXISTS diesel_set_updated_at();
|
||||
36
migrations/00000000000000_diesel_initial_setup/up.sql
Normal file
36
migrations/00000000000000_diesel_initial_setup/up.sql
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
-- This file was automatically created by Diesel to setup helper functions
|
||||
-- and other internal bookkeeping. This file is safe to edit, any future
|
||||
-- changes will be added to existing projects as new migrations.
|
||||
|
||||
|
||||
|
||||
|
||||
-- Sets up a trigger for the given table to automatically set a column called
|
||||
-- `updated_at` whenever the row is modified (unless `updated_at` was included
|
||||
-- in the modified columns)
|
||||
--
|
||||
-- # Example
|
||||
--
|
||||
-- ```sql
|
||||
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
|
||||
--
|
||||
-- SELECT diesel_manage_updated_at('users');
|
||||
-- ```
|
||||
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
|
||||
BEGIN
|
||||
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
|
||||
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
|
||||
BEGIN
|
||||
IF (
|
||||
NEW IS DISTINCT FROM OLD AND
|
||||
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
|
||||
) THEN
|
||||
NEW.updated_at := current_timestamp;
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
3
migrations/2021-07-02-070233_create_posts/down.sql
Normal file
3
migrations/2021-07-02-070233_create_posts/down.sql
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
-- This file should undo anything in `up.sql`
|
||||
|
||||
DROP TABLE posts;
|
||||
8
migrations/2021-07-02-070233_create_posts/up.sql
Normal file
8
migrations/2021-07-02-070233_create_posts/up.sql
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
-- Your SQL goes here
|
||||
|
||||
CREATE TABLE posts (
|
||||
id SERIAL PRIMARY KEY,
|
||||
author VARCHAR NOT NULL,
|
||||
title VARCHAR NOT NULL,
|
||||
body VARCHAR NOT NULL DEFAULT ''
|
||||
);
|
||||
84
src/main.rs
84
src/main.rs
|
|
@ -1,49 +1,43 @@
|
|||
#[macro_use]
|
||||
extern crate diesel;
|
||||
extern crate dotenv;
|
||||
|
||||
mod models;
|
||||
mod schema;
|
||||
|
||||
use std::error::Error;
|
||||
use std::sync::Mutex;
|
||||
|
||||
use actix_web::{App, Either, get, HttpResponse, HttpServer, post, Responder, web};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::postgres::PgPoolOptions;
|
||||
use diesel::prelude::*;
|
||||
use dotenv::dotenv;
|
||||
use std::env;
|
||||
|
||||
macro_rules! impl_responder {
|
||||
(for $name: ident) => {
|
||||
impl actix_web::Responder for $name {
|
||||
type Error = actix_web::Error;
|
||||
type Future = std::future::Ready<Result<actix_web::HttpResponse, actix_web::Error>>;
|
||||
|
||||
fn respond_to(self, _req: &actix_web::HttpRequest) -> Self::Future {
|
||||
let body = serde_json::to_string(&self).unwrap();
|
||||
|
||||
std::future::ready(Ok(actix_web::HttpResponse::Ok()
|
||||
.content_type("application/json")
|
||||
.body(body)))
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
impl_responder!(for Person);
|
||||
impl_responder!(for Post);
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
struct Person {
|
||||
name: String,
|
||||
age: i32,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
struct Post {
|
||||
#[serde(default)]
|
||||
id: usize,
|
||||
author: String,
|
||||
title: String,
|
||||
content: String,
|
||||
}
|
||||
use crate::models::{Person, Post};
|
||||
use actix_web::{get, post, web, App, Either, HttpResponse, HttpServer, Responder};
|
||||
|
||||
struct AppState {
|
||||
posts: Mutex<Vec<Post>>,
|
||||
hugo: Mutex<Person>,
|
||||
}
|
||||
|
||||
#[get("post")]
|
||||
async fn get_posts() -> impl Responder {
|
||||
use crate::schema::posts::dsl::*;
|
||||
|
||||
let results = web::block(|| {
|
||||
let connection = establish_connection();
|
||||
posts.load::<Post>(&connection)
|
||||
})
|
||||
.await
|
||||
.map_err(|e| {
|
||||
eprintln!("{:?}", e);
|
||||
HttpResponse::InternalServerError().finish()
|
||||
})
|
||||
.map(|vec| HttpResponse::Ok().json(vec));
|
||||
|
||||
results
|
||||
}
|
||||
|
||||
#[get("/post/{id}")]
|
||||
async fn get_post(
|
||||
web::Path(id): web::Path<usize>,
|
||||
|
|
@ -60,7 +54,7 @@ async fn get_post(
|
|||
#[post("/post")]
|
||||
async fn post_post(mut post: web::Json<Post>, data: web::Data<AppState>) -> impl Responder {
|
||||
let mut posts = data.posts.lock().unwrap();
|
||||
post.id = posts.len();
|
||||
post.id = posts.len() as i32;
|
||||
posts.push(post.clone());
|
||||
post
|
||||
}
|
||||
|
|
@ -83,16 +77,16 @@ async fn hugo_post(new_hugo: web::Json<Person>, data: web::Data<AppState>) -> im
|
|||
|
||||
#[actix_web::main]
|
||||
async fn main() -> Result<(), Box<dyn Error>> {
|
||||
let pool = PgPoolOptions::new()
|
||||
/*let pool = PgPoolOptions::new()
|
||||
.max_connections(5)
|
||||
.connect("postgres://postgres:karl@localhost/karlheinz")
|
||||
.await?;
|
||||
.await?;*/
|
||||
|
||||
let posts = vec![Post {
|
||||
id: 0,
|
||||
author: "Hugo Boss".to_string(),
|
||||
title: "I like winning".to_string(),
|
||||
content: "I really like winning. That's why I always win at everything".to_string(),
|
||||
body: "I really like winning. That's why I always win at everything".to_string(),
|
||||
}];
|
||||
|
||||
let data = web::Data::new(AppState {
|
||||
|
|
@ -103,17 +97,25 @@ async fn main() -> Result<(), Box<dyn Error>> {
|
|||
}),
|
||||
});
|
||||
|
||||
println!("Started Server...");
|
||||
HttpServer::new(move || {
|
||||
App::new()
|
||||
.data(pool.clone())
|
||||
// .data(pool.clone())
|
||||
.app_data(data.clone())
|
||||
.service(hugo_post)
|
||||
.service(hugo)
|
||||
.service(get_post)
|
||||
.service(post_post)
|
||||
.service(get_posts)
|
||||
})
|
||||
.bind("127.0.0.1:8080")?
|
||||
.run()
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn establish_connection() -> PgConnection {
|
||||
dotenv().ok();
|
||||
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
|
||||
PgConnection::establish(&database_url).expect(&format!("Error connecting to {}", database_url))
|
||||
}
|
||||
|
|
|
|||
35
src/models.rs
Normal file
35
src/models.rs
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize, Queryable, Clone)]
|
||||
pub struct Post {
|
||||
#[serde(default)]
|
||||
pub id: i32,
|
||||
pub author: String,
|
||||
pub title: String,
|
||||
pub body: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct Person {
|
||||
pub name: String,
|
||||
pub age: i32,
|
||||
}
|
||||
|
||||
macro_rules! impl_responder {
|
||||
(for $name: ident) => {
|
||||
impl actix_web::Responder for $name {
|
||||
type Error = actix_web::Error;
|
||||
type Future = std::future::Ready<Result<actix_web::HttpResponse, actix_web::Error>>;
|
||||
|
||||
fn respond_to(self, _req: &actix_web::HttpRequest) -> Self::Future {
|
||||
let body = serde_json::to_string(&self).unwrap();
|
||||
|
||||
std::future::ready(Ok(actix_web::HttpResponse::Ok()
|
||||
.content_type("application/json")
|
||||
.body(body)))
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
impl_responder!(for Person);
|
||||
impl_responder!(for Post);
|
||||
8
src/schema.rs
Normal file
8
src/schema.rs
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
table! {
|
||||
posts (id) {
|
||||
id -> Int4,
|
||||
author -> Varchar,
|
||||
title -> Varchar,
|
||||
body -> Varchar,
|
||||
}
|
||||
}
|
||||
1
startdb.bat
Normal file
1
startdb.bat
Normal file
|
|
@ -0,0 +1 @@
|
|||
docker run -d -e POSTGRES_PASSWORD=hugo123 -p 5432:5432 --name karldbeinz postgres
|
||||
Loading…
Add table
Add a link
Reference in a new issue