1
0
Fork 0
mirror of https://codeberg.org/Mo8it/git-webhook-client synced 2024-10-18 07:22:39 +00:00

Port to Axum

This commit is contained in:
Mo 2022-12-05 22:28:43 +01:00
parent fe21952585
commit c3bc254924
14 changed files with 375 additions and 332 deletions

5
.gitignore vendored
View file

@ -1,6 +1,7 @@
*.json
*.log
/Cargo.lock /Cargo.lock
/db/ /db/
*.json
*.log
/scripts/ /scripts/
/target/ /target/
*.yaml

View file

@ -1,9 +1,9 @@
[package] [package]
name = "git-webhook-client" name = "git-webhook-client"
version = "0.2.0" version = "0.3.0"
authors = ["Mo Bitar <mo8it@proton.me>"] authors = ["Mo Bitar <mo8it@proton.me>"]
edition = "2021" edition = "2021"
readme = "README.adoc" readme = "README.md"
repository = "https://codeberg.org/Mo8it/git-webhook-client" repository = "https://codeberg.org/Mo8it/git-webhook-client"
license-file = "LICENSE.txt" license-file = "LICENSE.txt"
@ -11,22 +11,19 @@ license-file = "LICENSE.txt"
anyhow = "1.0" anyhow = "1.0"
askama = { git = "https://github.com/djc/askama.git" } askama = { git = "https://github.com/djc/askama.git" }
askama_axum = { git = "https://github.com/djc/askama.git", package = "askama_axum" } askama_axum = { git = "https://github.com/djc/askama.git", package = "askama_axum" }
axum = { version = "0.5", default-features = false, features = [ axum = { version = "0.6", default-features = false, features = ["http1", "tokio", "macros"] }
"http1", axum-extra = { version = "0.4", features = ["spa"] }
"query", bytes = "1.3"
"form", chrono = { version = "0.4", default-features = false, features = ["clock"] }
] } diesel = { version = "2.0", features = ["r2d2", "sqlite", "returning_clauses_for_sqlite_3_35", "without-deprecated"] }
axum-extra = { version = "0.3", features = ["spa"] }
chrono = { version = "0.4", default-features = false }
hex = "0.4" hex = "0.4"
hmac = "0.12" hmac = "0.12"
lettre = "0.10" lettre = { version = "0.10", default-features = false, features = ["smtp-transport", "hostname", "rustls-tls", "pool", "builder"] }
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
serde_yaml = "0.9"
sha2 = "0.10" sha2 = "0.10"
tokio = { version = "1.21", features = ["full"] } tokio = { version = "1.22", features = ["full"] }
tower = { version = "0.4", features = ["limit", "buffer"] }
tower-http = { version = "0.3", features = ["trace"] }
tracing = "0.1" tracing = "0.1"
tracing-appender = "0.2" tracing-appender = "0.2"
tracing-subscriber = "0.3" tracing-subscriber = "0.3"

View file

@ -1,91 +0,0 @@
= Git Webhook Client
Git webhook client that runs commands after a webhook event and shows their output.
Currently, only Gitea is supported. If you want support for Gitlab or Github, then please open an issue.
== Features
* Verify the webhook event with a secret.
* Run a configured command to a specific repository on a webhook event.
* Save the output of the command.
* Show an output by visiting the url of the client.
* Supported configuration for multiple repositories.
* Written in Rust :D
== Getting started
=== Requirements
* `cargo` to compile the source code.
* Development package for SQLite (`sqlite-devel` on Fedora)
=== Configuration
The program looks for the configuration file configured with the environment variable `GWC_CONFIG_FILE` that contains the following:
. `secret`: The secret of the webhook.
. `base_url`: The base_url of the webhook client.
. `hooks`: List of webhooks.
.. `repo_url`: Repository url.
.. `current_dir`: The directory to run the command in.
.. `command`: The command without any arguments.
.. `args`: List of arguments separated by a comma.
==== Example configuration file:
[source, json]
----
{
"secret": "CHANGE_ME!",
"base_url": "https://webhook.mo8it.xyz",
"hooks": [
{
"repo_url": "https://codeberg.org/Mo8it/git-webhook-client",
"current_dir": ".",
"command": "ls",
"args": ["-l", "-a", "test_directory"]
}
]
}
----
==== First setup
* Clone the repository.
* Create the configuration file.
* Run the following to initialize the database:
.
[source, bash]
----
cargo install diesel_cli --no-default-features --features sqlite
DATABASE_URL=PATH/TO/DATABASE/DIRECTORY/db.sqlite diesel_cli migration run
cargo build --release
----
==== Run
After running `cargo build --release`, the binary can be found in the directory `target/release/git-webhook-client`. To run it, you have to specify the environment variable `DATABASE_URL`:
[source, bash]
----
DATABASE_URL=PATH/TO/DATABASE/DIRECTORY/db.sqlite target/release/git-webhook-client
----
==== Setup on the git server
Setup the webhook for the configured repositories on the git server. Don't forget to enter the same secret that you did specify in the configuration file.
==== Show output
After an event, the client responds with a URL that shows the log. The id in that URL is important and specific to this event.
If you want to see the last log, just visit the `base_url` from the configuration.
To see a specific log with an id, visit the URL: `base_url/?id=THE_ID_OF_AN_EVENT`.
You can specify a negative ID to see the last events. `id=-1` corresponds to the last log, `id=-2` corresponds to the log before it and so on.
== Note
This is my first Rust project and I am still learning. If you have any suggestions, just open an issue!

89
README.md Normal file
View file

@ -0,0 +1,89 @@
# Git Webhook Client
Git webhook client that runs commands after a webhook event and shows their output.
Currently, only Gitea is supported. If you want support for Gitlab or Github, then please open an issue.
## Features
- Verify the webhook event with a secret.
- Run a configured command to a specific repository on a webhook event.
- Save the output of the command.
- Show an output by visiting the url of the client.
- Supported configuration for multiple repositories.
- Written in Rust :D
## Getting started
### Requirements
- `cargo` to compile the source code.
- Development package for SQLite (`sqlite-devel` on Fedora)
### Configuration
The program looks for the configuration file configured with the environment variable `GWC_CONFIG_FILE` that contains the following:
<!-- TODO: Adjust to new config -->
1. `secret`: The secret of the webhook.
1. `base_url`: The base_url of the webhook client.
1. `hooks`: List of webhooks.
1. `repo_url`: Repository url.
1. `current_dir`: The directory to run the command in.
1. `command`: The command without any arguments.
1. `args`: List of arguments separated by a comma.
#### Example configuration file:
<!-- TODO: Adjust to new config -->
```yaml
secret: CHANGE_ME!
base_url: https://webhook.mo8it.com
hooks:
repo_url: https://codeberg.org/Mo8it/git-webhook-client
current_dir: .
command: ls
args: ["-l", "-a", "test_directory"]
```
#### First setup
<!-- TODO: Auto migration -->
- Clone the repository.
- Create the configuration file.
- Run the following to initialize the database:
```bash
cargo install diesel_cli --no-default-features --features sqlite
DATABASE_URL=PATH/TO/DATABASE/DIRECTORY/db.sqlite diesel_cli migration run
cargo build --release
```
#### Run
After running `cargo build --release`, the binary can be found in the directory `target/release/git-webhook-client`. To run it, you have to specify the environment variable `DATABASE_URL`:
```bash
DATABASE_URL=PATH/TO/DATABASE/DIRECTORY/db.sqlite target/release/git-webhook-client
```
#### Setup on the git server
Setup the webhook for the configured repositories on the git server. Don't forget to enter the same secret that you did specify in the configuration file.
#### Show output
After an event, the client responds with a URL that shows the log. The id in that URL is important and specific to this event.
If you want to see the last log, just visit the `base_url` from the configuration.
To see a specific log with an id, visit the URL: `base_url/?id=THE_ID_OF_AN_EVENT`.
You can specify a negative ID to see the last events. `id=-1` corresponds to the last log, `id=-2` corresponds to the log before it and so on.
## Note
This is my first Rust project and I am still learning. If you have any suggestions, just open an issue!

View file

@ -4,6 +4,32 @@ use std::env;
use std::fs::File; use std::fs::File;
use std::io::BufReader; use std::io::BufReader;
#[derive(Deserialize)]
pub struct SocketAddress {
pub address: [u8; 4],
pub port: u16,
}
#[derive(Deserialize)]
pub struct EmailServer {
pub server_name: String,
pub email: String,
pub password: String,
}
#[derive(Deserialize)]
pub struct Address {
pub name: String,
pub user: String,
pub domain: String,
}
#[derive(Deserialize)]
pub struct Logging {
pub directory: String,
pub filename: String,
}
#[derive(Deserialize)] #[derive(Deserialize)]
pub struct Hook { pub struct Hook {
pub repo_url: String, pub repo_url: String,
@ -16,7 +42,11 @@ pub struct Hook {
pub struct Config { pub struct Config {
pub secret: String, pub secret: String,
pub base_url: String, pub base_url: String,
pub log_file: String, pub socket_address: SocketAddress,
pub email_server: EmailServer,
pub email_from: Address,
pub email_to: Address,
pub logging: Logging,
pub hooks: Vec<Hook>, pub hooks: Vec<Hook>,
} }
@ -29,8 +59,8 @@ impl Config {
let config_file = File::open(&config_path) let config_file = File::open(&config_path)
.with_context(|| format!("Can not open the config file at the path {config_path}"))?; .with_context(|| format!("Can not open the config file at the path {config_path}"))?;
let config_reader = BufReader::new(config_file); let config_reader = BufReader::new(config_file);
let config: Self = serde_json::from_reader(config_reader) let config: Self = serde_yaml::from_reader(config_reader)
.context("Can not parse the config file as JSON!")?; .context("Can not parse the YAML config file!")?;
Ok(config) Ok(config)
} }

View file

@ -1,9 +1,9 @@
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use chrono::Local; use chrono::offset::Local;
use diesel::prelude::*; use diesel::prelude::*;
use diesel::r2d2::{ConnectionManager, Pool, PooledConnection}; use diesel::r2d2::{ConnectionManager, Pool, PooledConnection};
use log::error;
use std::env; use std::env;
use tracing::error;
use crate::config::Hook; use crate::config::Hook;
use crate::models::{HookLog, NewHookLog}; use crate::models::{HookLog, NewHookLog};

25
src/errors.rs Normal file
View file

@ -0,0 +1,25 @@
use axum::http::StatusCode;
use axum::response::{IntoResponse, Response};
use tracing::error;
pub struct AppError(anyhow::Error);
impl IntoResponse for AppError {
fn into_response(self) -> Response {
error!("{:?}", self.0);
StatusCode::BAD_REQUEST.into_response()
}
}
impl From<anyhow::Error> for AppError {
fn from(err: anyhow::Error) -> Self {
Self(err)
}
}
impl From<&str> for AppError {
fn from(s: &str) -> Self {
Self(anyhow::Error::msg(s.to_string()))
}
}

View file

@ -1,115 +0,0 @@
use hmac::{Hmac, Mac};
use rocket::data::{Data, FromData, Limits, Outcome};
use rocket::http::Status;
use rocket::request::{self, Request};
use serde_json::Value;
use sha2::Sha256;
use crate::states;
pub struct Repo<'r> {
pub clone_url: &'r str,
}
#[rocket::async_trait]
impl<'r> FromData<'r> for Repo<'r> {
type Error = String;
async fn from_data(req: &'r Request<'_>, data: Data<'r>) -> Outcome<'r, Self> {
let payload = match data.open(Limits::JSON).into_bytes().await {
Ok(payload) if payload.is_complete() => payload.into_inner(),
Ok(_) => {
return Outcome::Failure((Status::PayloadTooLarge, "Payload too large".to_string()))
}
Err(e) => return Outcome::Failure((Status::InternalServerError, e.to_string())),
};
let mut received_signatures = req.headers().get("X-GITEA-SIGNATURE");
let received_signature = match received_signatures.next() {
Some(signature) => match hex::decode(signature) {
Ok(signature) => signature,
Err(_) => {
return Outcome::Failure((
Status::BadRequest,
"Can not hex decode the received signature!".to_string(),
))
}
},
None => {
return Outcome::Failure((Status::BadRequest, "Missing signature!".to_string()))
}
};
if received_signatures.next().is_some() {
return Outcome::Failure((
Status::BadRequest,
"Received more than one signature!".to_string(),
));
}
let config_state = match req.rocket().state::<states::Config>() {
Some(state) => state,
None => {
return Outcome::Failure((
Status::BadRequest,
"Can not get the config state!".to_string(),
))
}
};
if !is_valid_signature(&config_state.secret, &received_signature, &payload) {
return Outcome::Failure((Status::BadRequest, "Invalid signature!".to_string()));
}
let json: Value = match serde_json::from_slice(&payload) {
Ok(json) => json,
Err(_) => {
return Outcome::Failure((
Status::BadRequest,
"Can not parse payload into JSON!".to_string(),
))
}
};
let repo = match json.get("repository") {
Some(repo) => repo,
None => {
return Outcome::Failure((
Status::BadRequest,
"Can not get the repository value from the payload!".to_string(),
))
}
};
let clone_url = match repo.get("clone_url") {
Some(url) => url,
None => {
return Outcome::Failure((
Status::BadRequest,
"Can not get value clone_url from repository in the payload!".to_string(),
))
}
};
let clone_url = match clone_url.as_str() {
Some(url) => url.to_string(),
None => {
return Outcome::Failure((
Status::BadRequest,
"The value of clone_url from repository in the payload is not a string!"
.to_string(),
))
}
};
let clone_url = request::local_cache!(req, clone_url);
Outcome::Success(Repo { clone_url })
}
}
fn is_valid_signature(secret: &[u8], received_signature: &[u8], payload: &[u8]) -> bool {
let mut mac =
Hmac::<Sha256>::new_from_slice(secret).expect("Can not generate a mac from the secret!");
mac.update(payload);
let expected_signature = mac.finalize().into_bytes();
received_signature[..] == expected_signature[..]
}

View file

@ -1,30 +1,17 @@
use anyhow::{Context, Result}; use tracing_appender::non_blocking::WorkerGuard;
use simplelog::{ColorChoice, LevelFilter, TermLogger, TerminalMode, WriteLogger}; use tracing_subscriber::filter::LevelFilter;
use std::fs::OpenOptions;
use crate::config; use crate::config;
pub fn init_logger(config: &config::Config) -> Result<()> { pub fn init_logger(logging_config: &config::Logging) -> WorkerGuard {
let logger = if cfg!(debug_assertions) { let file_appender =
TermLogger::init( tracing_appender::rolling::never(&logging_config.directory, &logging_config.filename);
LevelFilter::Debug, let (non_blocking, guard) = tracing_appender::non_blocking(file_appender);
simplelog::Config::default(),
TerminalMode::Mixed,
ColorChoice::Auto,
)
} else {
WriteLogger::init(
LevelFilter::Info,
simplelog::Config::default(),
OpenOptions::new()
.create(true)
.append(true)
.open(&config.log_file)
.with_context(|| format!("Could not open the log file {}", &config.log_file))?,
)
};
logger.context("Could not initialize the logger!")?; tracing_subscriber::fmt()
.with_max_level(LevelFilter::INFO)
.with_writer(non_blocking)
.init();
Ok(()) guard
} }

48
src/mailer.rs Normal file
View file

@ -0,0 +1,48 @@
use anyhow::{Context, Result};
use lettre::address::Address;
use lettre::message::{Mailbox, MessageBuilder};
use lettre::transport::smtp::authentication::Credentials;
use lettre::{Message, SmtpTransport};
use std::mem;
use crate::config;
pub struct Mailer {
mailer: SmtpTransport,
message_builder: MessageBuilder,
}
impl Mailer {
pub fn new(config: &mut config::Config) -> Result<Self> {
let creds = Credentials::new(
mem::take(&mut config.email_server.email),
mem::take(&mut config.email_server.password),
);
let mailer = SmtpTransport::relay(&config.email_server.server_name)
.context("Failed to connect to the email server!")?
.credentials(creds)
.build();
let message_builder = Message::builder()
.from(Mailbox::new(
Some(mem::take(&mut config.email_from.name)),
Address::new(&config.email_from.user, &config.email_from.domain)
.context("Failed to create the From email address!")?,
))
.to(Mailbox::new(
Some(mem::take(&mut config.email_to.name)),
Address::new(&config.email_to.user, &config.email_to.domain)
.context("Failed to create the To email address!")?,
));
Ok(Self {
mailer,
message_builder,
})
}
pub fn send(&self) -> Result<()> {
Ok(())
}
}

View file

@ -1,36 +1,25 @@
mod config; mod config;
mod db; mod db;
mod guards; mod errors;
mod logging; mod logging;
mod mailer;
mod models; mod models;
mod routes; mod routes;
mod schema; mod schema;
mod states; mod states;
mod templates;
use anyhow::Result; use anyhow::Result;
use axum::extract::Extension;
use axum::routing::{get, post}; use axum::routing::{get, post};
use axum::{error_handling::HandleErrorLayer, http::StatusCode, BoxError};
use axum::{Router, Server}; use axum::{Router, Server};
use axum_extra::routing::SpaRouter; use axum_extra::routing::SpaRouter;
use std::net::{IpAddr, Ipv4Addr, SocketAddr}; use std::net::{IpAddr, Ipv4Addr, SocketAddr};
use std::process; use std::process;
use std::sync::Arc; use tracing::info;
use tower_http::trace::{DefaultOnResponse, TraceLayer};
use tracing::Level;
use tracing_appender::non_blocking::WorkerGuard;
fn init() -> Result<Rocket<Build>> { async fn init() -> Result<()> {
let rocket = rocket::build()
.mount("/", rocket::routes![routes::index])
.mount("/api", rocket::routes![routes::trigger])
.manage(states::DB::new()?)
}
async fn init() -> Result<WorkerGuard> {
let mut config = config::Config::new()?; let mut config = config::Config::new()?;
let path_prefix = config.path_prefix.clone(); let mailer = mailer::Mailer::new(&mut config)?;
let mailer = Arc::new(mailer::Mailer::new(&mut config)?);
let address = config.socket_address.address; let address = config.socket_address.address;
let socket_address = SocketAddr::new( let socket_address = SocketAddr::new(
@ -40,37 +29,33 @@ async fn init() -> Result<WorkerGuard> {
config.socket_address.port, config.socket_address.port,
); );
let tracing_worker_gurad = logging::init_logger(&config.logging); let _tracing_gurad = logging::init_logger(&config.logging);
let config = Arc::new(config); let app_state = states::AppState::new(config, mailer)?;
let spa = SpaRouter::new(&format!("{}/static", &path_prefix), "static"); let api_routes = Router::new().route("/trigger", post(routes::trigger));
let api_routes = Router::new()
.route("/submit", post(routes::trigger));
let routes = Router::new() let routes = Router::new()
.route("/", get(routes::index)) .route("/", get(routes::index))
.nest("/api", api_routes); .route("/:id", get(routes::index_id))
.nest("/api", api_routes)
.with_state(app_state);
let app = Router::new() let spa = SpaRouter::new("/static", "static");
.merge(spa)
.merge(routes)
.layer(TraceLayer::new_for_http().on_response(DefaultOnResponse::new().level(Level::INFO)))
.layer(Extension(config))
.layer(Extension(mailer));
let app = Router::new().merge(routes).merge(spa);
info!("Starting server");
Server::bind(&socket_address) Server::bind(&socket_address)
.serve(app.into_make_service()) .serve(app.into_make_service())
.await .await
.unwrap(); .unwrap();
Ok(tracing_worker_gurad) Ok(())
} }
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
let _tracing_worker_gurad = init().await.unwrap_or_else(|e| { init().await.unwrap_or_else(|e| {
eprintln!("{e:?}"); eprintln!("{e:?}");
process::exit(1); process::exit(1);
}); });

View file

@ -1,59 +1,92 @@
use rocket::response::status::BadRequest; use anyhow::Context;
use rocket::{get, post, State}; use askama_axum::IntoResponse;
use rocket_dyn_templates::Template; use axum::extract::{Path, State};
use axum::http::header::HeaderMap;
use axum::response::Response;
use bytes::Bytes;
use hmac::{Hmac, Mac};
use serde_json::Value;
use sha2::Sha256;
use std::process::Command; use std::process::Command;
use std::sync::Arc;
use std::thread; use std::thread;
use tracing::info;
use crate::db; use crate::{db, errors, states, templates};
use crate::guards;
use crate::states;
fn bad_req<E>(err: E) -> BadRequest<String> pub async fn index(State(db_state): State<Arc<states::DB>>) -> Result<Response, errors::AppError> {
where index_id(State(db_state), Path(-1)).await
E: std::fmt::Display,
{
BadRequest(Some(err.to_string()))
} }
#[get("/?<id>")] pub async fn index_id(
pub fn index( State(db_state): State<Arc<states::DB>>,
db_state: &State<states::DB>, Path(id): Path<i32>,
id: Option<i32>, ) -> Result<Response, errors::AppError> {
) -> Result<Template, BadRequest<String>> {
let id = id.unwrap_or(-1);
if id == 0 { if id == 0 {
return Err(bad_req("id=0 not allowed!")); return Err("id=0 not allowed!".into());
} }
let hook_log = match db::get_hook_log(&db_state.pool, id) { let hook_log = db::get_hook_log(&db_state.pool, id)?;
Ok(hl) => hl,
Err(e) => return Err(bad_req(e)),
};
Ok(Template::render("hook_log", hook_log)) info!("Viewed hook log with id: {}", hook_log.id);
let template = templates::HookLog::from(hook_log);
Ok(template.into_response())
} }
#[post("/trigger", format = "json", data = "<repo>")] async fn is_valid_signature(secret: &[u8], received_signature: &[u8], body: &[u8]) -> bool {
pub fn trigger( let mut mac =
repo: guards::Repo, Hmac::<Sha256>::new_from_slice(secret).expect("Can not generate a mac from the secret!");
db_state: &State<states::DB>, mac.update(body);
config_state: &State<states::Config>, let expected_signature = mac.finalize().into_bytes();
) -> Result<String, BadRequest<String>> {
let hook = match config_state.get_hook(repo.clone_url) {
Some(hook) => hook,
None => {
return Err(bad_req(format!(
"No matching repository with url {} in the configuration file.",
repo.clone_url
)))
}
};
let hook_log_id = match db::add_hook_log(&db_state.pool, hook) { received_signature[..] == expected_signature[..]
Ok(hook_log) => hook_log.id, }
Err(e) => return Err(bad_req(e)),
}; pub async fn trigger(
State(db_state): State<Arc<states::DB>>,
State(config_state): State<Arc<states::Config>>,
headers: HeaderMap,
body: Bytes,
) -> Result<Response, errors::AppError> {
info!("Trigger called");
let mut received_signatures = headers.get_all("X-GITEA-SIGNATURE").iter();
let received_signature = received_signatures.next().context("Missing signature!")?;
let received_signature =
hex::decode(received_signature).context("Can not hex decode the received signature!")?;
if received_signatures.next().is_some() {
return Err("Received more than one signature!".into());
}
if !is_valid_signature(&config_state.secret, &received_signature, &body).await {
return Err("Invalid signature!".into());
}
let json: Value =
serde_json::from_slice(&body).context("Can not parse the request body into JSON!")?;
let repo = json
.get("repository")
.context("Can not get the repository value from the request body!")?;
let clone_url = repo
.get("clone_url")
.context("Can not get value clone_url from repository in the request body!")?;
let clone_url = clone_url
.as_str()
.context("The value of clone_url from repository in the request body is not a string!")?;
let hook = config_state.get_hook(clone_url).with_context(|| {
format!("No matching repository with url {clone_url} in the configuration file.")
})?;
let hook_log_id = db::add_hook_log(&db_state.pool, hook)?.id;
{ {
// Spawn and detach a thread that runs the command and fills the output in the log. // Spawn and detach a thread that runs the command and fills the output in the log.
@ -64,8 +97,11 @@ pub fn trigger(
let args = hook.args.clone(); let args = hook.args.clone();
let current_dir = hook.current_dir.clone(); let current_dir = hook.current_dir.clone();
let db_pool = db_state.pool.clone(); let db_pool = db_state.pool.clone();
let clone_url = clone_url.to_string();
thread::spawn(move || { thread::spawn(move || {
info!("Running webhook for Repo: {clone_url}");
let stdout: Vec<u8>; let stdout: Vec<u8>;
let stderr: Vec<u8>; let stderr: Vec<u8>;
let status_code: Option<i32>; let status_code: Option<i32>;
@ -93,5 +129,5 @@ pub fn trigger(
}); });
} }
Ok(format!("{}/?id={}", config_state.base_url, hook_log_id)) Ok(format!("{}/?id={}", config_state.base_url, hook_log_id).into_response())
} }

View file

@ -1,7 +1,8 @@
use crate::config;
use crate::db;
use anyhow::Result; use anyhow::Result;
use axum::extract::FromRef;
use std::sync::Arc;
use crate::{config, db, mailer};
pub struct DB { pub struct DB {
pub pool: db::DBPool, pub pool: db::DBPool,
@ -21,16 +22,35 @@ pub struct Config {
pub hooks: Vec<config::Hook>, pub hooks: Vec<config::Hook>,
} }
impl Config { impl From<config::Config> for Config {
pub fn new(config: config::Config) -> Self { fn from(config: config::Config) -> Self {
Self { Self {
secret: config.secret.as_bytes().to_owned(), secret: config.secret.as_bytes().to_owned(),
base_url: config.base_url, base_url: config.base_url,
hooks: config.hooks, hooks: config.hooks,
} }
} }
}
impl Config {
pub fn get_hook(&self, clone_url: &str) -> Option<&config::Hook> { pub fn get_hook(&self, clone_url: &str) -> Option<&config::Hook> {
self.hooks.iter().find(|&hook| hook.repo_url == clone_url) self.hooks.iter().find(|&hook| hook.repo_url == clone_url)
} }
} }
#[derive(Clone, FromRef)]
pub struct AppState {
pub config: Arc<Config>,
pub mailer: Arc<mailer::Mailer>,
pub db: Arc<DB>,
}
impl AppState {
pub fn new(config: config::Config, mailer: mailer::Mailer) -> Result<Self> {
Ok(Self {
config: Arc::new(Config::from(config)),
mailer: Arc::new(mailer),
db: Arc::new(DB::new()?),
})
}
}

31
src/templates.rs Normal file
View file

@ -0,0 +1,31 @@
use askama::Template;
use crate::models;
#[derive(Template)]
#[template(path = "hook_log.txt")]
pub struct HookLog {
pub id: i32,
pub datetime: String,
pub repo_url: String,
pub command_with_args: String,
pub current_dir: String,
pub stdout: String,
pub stderr: String,
pub status_code: i32,
}
impl From<models::HookLog> for HookLog {
fn from(hook_log: models::HookLog) -> Self {
Self {
id: hook_log.id,
datetime: hook_log.datetime,
repo_url: hook_log.repo_url,
command_with_args: hook_log.command_with_args,
current_dir: hook_log.current_dir,
stdout: hook_log.stdout.unwrap_or_default(),
stderr: hook_log.stderr.unwrap_or_default(),
status_code: hook_log.status_code.unwrap_or_default(),
}
}
}