diff --git a/README.md b/README.md index 5892832..590236b 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ That's it! ## Features | Feature | Status | -|------------------|---------------------| +| ---------------- | ------------------- | | Self Service | WIP | | Catalog Services | WIP | | Auth | Not implemented yet | @@ -34,19 +34,22 @@ That's it! ### Installation +#### Using Docker + Today you can run Torii using Docker Compose. In the future, we will provide a Helm chart to deploy Torii on Kubernetes. +It starts full environment with postgres instance. + ```bash docker-compose up -``` -If you want to run it locally you will need to start Postgres DB, the backend and the frontend separately. - -```bash -# Start Postgres -docker-compose -f docker-compose-dev.yaml up +# Alternatively, you can use tilt +# https://tilt.dev +tilt up ``` +#### Locally + ```bash # Start the backend cd backend @@ -135,16 +138,16 @@ self_service: - my-scripts/environment_management.py - create - --name - - {{name}} # this is a variable that will be replaced by the value of the field 'name' + - {{ name }} # this is a variable that will be replaced by the value of the field 'name' delayed_command: - command: - python - my-scripts/environment_management.py - delete - --name - - {{name}} + - {{ name }} delay: - hours: {{ttl}} # this is a variable that will be replaced by the value of the field 'ttl' + hours: {{ ttl }} # this is a variable that will be replaced by the value of the field 'ttl' ``` In this example, we define a self-service section with a single action called `new-testing-environment`. This action has four fields: @@ -158,171 +161,89 @@ When the developer fills the form and submits it, Torii will run the `post_valid If the script exits with a non-zero exit code, the action will fail. If the script exits with a zero exit code, Torii will run the `delayed_command` script after the specified delay. -[//]: # (### Advanced Configuration) - -[//]: # () - -[//]: # (#### Autocomplete Fetcher) - -[//]: # () - -[//]: # (An autocomplete fetcher is a script that must print a JSON on standard output. The JSON must contain a `results` key that contains a list of) - -[//]: # (values.) - -[//]: # () - -[//]: # (```json) - -[//]: # ({) - -[//]: # ( "results": [) - -[//]: # ( "val 1",) - -[//]: # ( "val 2",) - -[//]: # ( "val 3") - -[//]: # ( ]) - -[//]: # (}) - -[//]: # (```) - -[//]: # () - -[//]: # (Example of autocomplete fetcher in python:) - -[//]: # () - -[//]: # (```python) - -[//]: # (import json) - -[//]: # () - -[//]: # () - -[//]: # (def get_data_from_fake_api():) - -[//]: # ( return [) - -[//]: # ( 'val 1',) - -[//]: # ( 'val 2',) - -[//]: # ( 'val 3',) - -[//]: # ( ]) - -[//]: # () - -[//]: # () - -[//]: # (if __name__ == '__main__':) - -[//]: # ( # do your stuff here) - -[//]: # ( results = get_data_from_fake_api()) - -[//]: # () - -[//]: # ( data = {'results': results}) - -[//]: # () - -[//]: # ( # print json on standard output) - -[//]: # ( print(json.dumps(data))) - -[//]: # (```) - -[//]: # () - -[//]: # (#### Validation Script) - -[//]: # () - -[//]: # (A validation script can be any kind of script. It can be a bash script, a python script, a terraform script, etc. The script must exit with) - -[//]: # (a non-zero exit code if the validation fails.) - -[//]: # () - -[//]: # (```bash) - -[//]: # (#!/bin/bash) - -[//]: # () - -[//]: # (set -e # exit on error) - -[//]: # (# print error on standard error output) - -[//]: # () - -[//]: # (# do your stuff here) - -[//]: # (exit 0) - -[//]: # (```) - -[//]: # () - -[//]: # (#### Post Validation Script) - -[//]: # () - -[//]: # (An post validation script can be any kind of script. It can be a bash script, a python script, a terraform script, etc.) - -[//]: # () - -[//]: # (- The script must exit with a non-zero exit code if the validation fails.) - -[//]: # (- The script must be idempotent. It can be executed multiple times without side effects.) - -[//]: # (- The output of the script must be a JSON that contains the defined model keys with their values. (Torii will update the model with) - -[//]: # ( the values returned by the script)) - -[//]: # () - -[//]: # (```json) - -[//]: # ({) - -[//]: # ( "status": "success",) - -[//]: # ( "url": "https://my-service.com",) - -[//]: # ( "username": "my-username",) - -[//]: # ( "password": "my-password") - -[//]: # (}) - -[//]: # (```) - -[//]: # () - -[//]: # (```bash) - -[//]: # (#!/bin/bash) - -[//]: # () - -[//]: # (set -e # exit on error) - -[//]: # (# print error on standard error output) - -[//]: # () - -[//]: # (# do your stuff here) - -[//]: # (exit 0) - -[//]: # (```) +[//]: # "### Advanced Configuration" +[//]: # +[//]: # "#### Autocomplete Fetcher" +[//]: # +[//]: # "An autocomplete fetcher is a script that must print a JSON on standard output. The JSON must contain a `results` key that contains a list of" +[//]: # "values." +[//]: # +[//]: # "```json" +[//]: # "{" +[//]: # ' "results": [' +[//]: # ' "val 1",' +[//]: # ' "val 2",' +[//]: # ' "val 3"' +[//]: # " ]" +[//]: # "}" +[//]: # "```" +[//]: # +[//]: # "Example of autocomplete fetcher in python:" +[//]: # +[//]: # "```python" +[//]: # "import json" +[//]: # +[//]: # +[//]: # "def get_data_from_fake_api():" +[//]: # " return [" +[//]: # " 'val 1'," +[//]: # " 'val 2'," +[//]: # " 'val 3'," +[//]: # " ]" +[//]: # +[//]: # +[//]: # "if __name__ == '__main__':" +[//]: # " # do your stuff here" +[//]: # " results = get_data_from_fake_api()" +[//]: # +[//]: # " data = {'results': results}" +[//]: # +[//]: # " # print json on standard output" +[//]: # " print(json.dumps(data))" +[//]: # "```" +[//]: # +[//]: # "#### Validation Script" +[//]: # +[//]: # "A validation script can be any kind of script. It can be a bash script, a python script, a terraform script, etc. The script must exit with" +[//]: # "a non-zero exit code if the validation fails." +[//]: # +[//]: # "```bash" +[//]: # "#!/bin/bash" +[//]: # +[//]: # "set -e # exit on error" +[//]: # "# print error on standard error output" +[//]: # +[//]: # "# do your stuff here" +[//]: # "exit 0" +[//]: # "```" +[//]: # +[//]: # "#### Post Validation Script" +[//]: # +[//]: # "An post validation script can be any kind of script. It can be a bash script, a python script, a terraform script, etc." +[//]: # +[//]: # "- The script must exit with a non-zero exit code if the validation fails." +[//]: # "- The script must be idempotent. It can be executed multiple times without side effects." +[//]: # "- The output of the script must be a JSON that contains the defined model keys with their values. (Torii will update the model with" +[//]: # " the values returned by the script)" +[//]: # +[//]: # "```json" +[//]: # "{" +[//]: # ' "status": "success",' +[//]: # ' "url": "https://my-service.com",' +[//]: # ' "username": "my-username",' +[//]: # ' "password": "my-password"' +[//]: # "}" +[//]: # "```" +[//]: # +[//]: # "```bash" +[//]: # "#!/bin/bash" +[//]: # +[//]: # "set -e # exit on error" +[//]: # "# print error on standard error output" +[//]: # +[//]: # "# do your stuff here" +[//]: # "exit 0" +[//]: # "```" ## Design @@ -333,6 +254,14 @@ Please refer to the [DESIGN.md](./DESIGN.md) file for more information. Torii is still in early development. If you want to contribute, please open an issue or a pull request. We will improve the contribution guidelines as soon as possible. +### Linting + +Use the rust formatter for linting the code + +```bash +cargo fmt --all +``` + ## Motivation Today you have the choice between three options to build your Internal Developer Portal: @@ -347,7 +276,7 @@ Today you have the choice between three options to build your Internal Developer Torii is a simple, powerful, and extensible open-source Internal Developer Portal that aims to be the best of all worlds. It's easy to extend and customize, it's free, and you have control over the codebase. ---- +--- Curious to understand in more detail the motivation behind Torii? Read these articles: diff --git a/Tiltfile b/Tiltfile new file mode 100644 index 0000000..8ac8a97 --- /dev/null +++ b/Tiltfile @@ -0,0 +1 @@ +docker_compose("docker-compose.yaml") diff --git a/backend/Dockerfile b/backend/Dockerfile index 3d1d8a2..04bce68 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.77.2-bookworm as builder +FROM rust:1.79-bookworm as builder WORKDIR /app diff --git a/backend/Dockerfile.dev b/backend/Dockerfile.dev new file mode 100644 index 0000000..d43add1 --- /dev/null +++ b/backend/Dockerfile.dev @@ -0,0 +1,10 @@ +FROM rust:1.79-bookworm + +WORKDIR /app + +RUN apt update && \ + apt install -y libssl-dev python3 nodejs && \ + ln -s /usr/bin/python3 /usr/bin/python & \ + cargo install cargo-watch + +COPY . . diff --git a/backend/examples/config.yaml b/backend/examples/config.yaml index 32b552c..d819fee 100644 --- a/backend/examples/config.yaml +++ b/backend/examples/config.yaml @@ -85,6 +85,18 @@ self_service: - bash - examples/dumb_script_ok.sh # AND then this one output_model: string (optional) # model name + - slug: get-temperature-paris + name: Get current temperature in Paris + description: Script to detect and return the current tempearture in celsius in Paris + icon: sun + icon_color: rose + fields: [] + post_validate: + - command: + - node + - examples/node_get_weather_paris.js + timeout: 5 + models: - name: string description: string (optional) diff --git a/backend/examples/node_get_weather_paris.js b/backend/examples/node_get_weather_paris.js new file mode 100644 index 0000000..6853dea --- /dev/null +++ b/backend/examples/node_get_weather_paris.js @@ -0,0 +1,12 @@ +console.log('Executing script node get weather Paris'); + +for(let i = 0; i < 20; i++) { + console.error(`Log error ${i}`); +} + +fetch('https://wttr.in/Paris?format=j1') + .then(res => res.json()) + .then(data => { + console.log(`Currently it's ${data.current_condition.at(0).temp_C}°C in Paris.`); + console.log('Finished!'); + }); diff --git a/backend/src/app_config.rs b/backend/src/app_config.rs index e69de29..8b13789 100644 --- a/backend/src/app_config.rs +++ b/backend/src/app_config.rs @@ -0,0 +1 @@ + diff --git a/backend/src/cli.rs b/backend/src/cli.rs index 5e8aa79..1eb83a8 100644 --- a/backend/src/cli.rs +++ b/backend/src/cli.rs @@ -10,5 +10,4 @@ pub struct CLI { /// Torii configuration file #[clap(short, long, value_name = "configuration file")] pub config: PathBuf, - } diff --git a/backend/src/database.rs b/backend/src/database.rs index 048a470..7b6b365 100644 --- a/backend/src/database.rs +++ b/backend/src/database.rs @@ -1,8 +1,8 @@ -use std::str::FromStr; - use serde::{Deserialize, Serialize}; -use sqlx::{Executor, Pool, Postgres}; use sqlx::types::Uuid; +use sqlx::{Executor, Pool, Postgres}; +use std::str::FromStr; +use tracing::debug; use crate::errors::QError; @@ -37,6 +37,15 @@ CREATE TABLE IF NOT EXISTS self_service_runs CREATE INDEX IF NOT EXISTS self_service_runs_section_slug_idx ON self_service_runs (section_slug); CREATE INDEX IF NOT EXISTS self_service_runs_action_slug_idx ON self_service_runs (action_slug); + +-- create a table to store logs from self services runs +CREATE TABLE IF NOT EXISTS self_service_runs_logs ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL, + self_service_runs_id UUID NOT NULL, + message TEXT NOT NULL, + is_stderr BOOLEAN NOT NULL +); "#; #[derive(sqlx::FromRow)] @@ -51,6 +60,15 @@ pub struct SelfServiceRun { tasks: serde_json::Value, } +#[derive(sqlx::FromRow)] +pub struct SelfServiceRunLog { + id: Uuid, + created_at: chrono::NaiveDateTime, + self_service_runs_id: Uuid, + message: String, + is_stderr: bool, +} + #[derive(sqlx::Type, Clone, Serialize, Deserialize, Debug)] #[sqlx(rename_all = "SCREAMING_SNAKE_CASE")] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] @@ -81,6 +99,18 @@ impl SelfServiceRun { } } +impl SelfServiceRunLog { + pub fn to_json(&self) -> SelfServiceRunLogJson { + SelfServiceRunLogJson { + id: self.id.to_string(), + created_at: self.created_at.to_string(), + self_service_runs_id: self.self_service_runs_id.to_string(), + is_stderr: self.is_stderr, + message: self.message.to_string(), + } + } +} + #[derive(Serialize, Deserialize, Debug)] pub struct SelfServiceRunJson { pub id: String, @@ -97,6 +127,7 @@ pub struct SelfServiceRunJson { pub struct SelfServiceRunLogJson { pub id: String, pub created_at: String, + pub self_service_runs_id: String, pub is_stderr: bool, pub message: String, } @@ -114,55 +145,65 @@ pub async fn list_self_service_runs_by_section_and_action_slugs( section_slug: &str, action_slug: &str, ) -> Result, QError> { - Ok( - sqlx::query_as::<_, SelfServiceRun>( - r#" + Ok(sqlx::query_as::<_, SelfServiceRun>( + r#" SELECT * FROM self_service_runs WHERE section_slug = $1 AND action_slug = $2 ORDER BY created_at DESC - "# - ) - .bind(section_slug) - .bind(action_slug) - .fetch_all(pg_pool) - .await? + "#, ) + .bind(section_slug) + .bind(action_slug) + .fetch_all(pg_pool) + .await?) } pub async fn list_self_service_runs_by_section_slug( pg_pool: &Pool, section_slug: &str, ) -> Result, QError> { - Ok( - sqlx::query_as::<_, SelfServiceRun>( - r#" + Ok(sqlx::query_as::<_, SelfServiceRun>( + r#" SELECT * FROM self_service_runs WHERE section_slug = $1 ORDER BY created_at DESC - "# - ) - .bind(section_slug) - .fetch_all(pg_pool) - .await? + "#, ) + .bind(section_slug) + .fetch_all(pg_pool) + .await?) } pub async fn list_self_service_runs( pg_pool: &Pool, ) -> Result, QError> { - Ok( - sqlx::query_as::<_, SelfServiceRun>( - r#" + Ok(sqlx::query_as::<_, SelfServiceRun>( + r#" SELECT * FROM self_service_runs ORDER BY created_at DESC - "# - ) - .fetch_all(pg_pool) - .await? + "#, ) + .fetch_all(pg_pool) + .await?) +} + +pub async fn get_self_service_runs( + pg_pool: &Pool, + id: &str, +) -> Result { + Ok(sqlx::query_as::<_, SelfServiceRun>( + r#" + SELECT * + FROM self_service_runs + WHERE id = $1 + "#, + ) + .bind(Uuid::from_str(id).unwrap()) + .fetch_one(pg_pool) + .await?) } pub async fn insert_self_service_run( @@ -173,22 +214,44 @@ pub async fn insert_self_service_run( input_payload: &serde_json::Value, tasks: &serde_json::Value, ) -> Result { - Ok( - sqlx::query_as::<_, SelfServiceRun>( - r#" + debug!("Insert self service run with value {}", input_payload); + + Ok(sqlx::query_as::<_, SelfServiceRun>( + r#" INSERT INTO self_service_runs (section_slug, action_slug, status, input_payload, tasks) VALUES ($1, $2, $3, $4, $5) RETURNING * - "# - ) - .bind(section_slug) - .bind(action_slug) - .bind(status) - .bind(input_payload) - .bind(tasks) - .fetch_one(pg_pool) - .await? + "#, ) + .bind(section_slug) + .bind(action_slug) + .bind(status) + .bind(input_payload) + .bind(tasks) + .fetch_one(pg_pool) + .await?) +} + +pub async fn insert_self_service_run_log( + pg_pool: &Pool, + self_service_runs_id: &str, + message: String, + is_stderr: bool, +) -> Result { + debug!("Insert self service run log {}", message); + + Ok(sqlx::query_as::<_, SelfServiceRunLog>( + r#" + INSERT INTO self_service_runs_logs (self_service_runs_id, message, is_stderr) + VALUES ($1, $2, $3) + RETURNING * + "#, + ) + .bind(Uuid::from_str(self_service_runs_id).unwrap()) + .bind(message) + .bind(is_stderr) + .fetch_one(pg_pool) + .await?) } pub async fn update_self_service_run( @@ -197,19 +260,33 @@ pub async fn update_self_service_run( status: Status, tasks: &serde_json::Value, ) -> Result { - Ok( - sqlx::query_as::<_, SelfServiceRun>( - r#" + Ok(sqlx::query_as::<_, SelfServiceRun>( + r#" UPDATE self_service_runs SET status = $1, tasks = $2 WHERE id = $3 RETURNING * - "# - ) - .bind(status) - .bind(tasks) - .bind(Uuid::from_str(id).unwrap()) - .fetch_one(pg_pool) - .await? + "#, + ) + .bind(status) + .bind(tasks) + .bind(Uuid::from_str(id).unwrap()) + .fetch_one(pg_pool) + .await?) +} + +pub async fn list_logs_by_self_service_run_id( + pg_pool: &Pool, + id: &str, +) -> Result, QError> { + Ok(sqlx::query_as::<_, SelfServiceRunLog>( + r#" + SELECT * + FROM self_service_runs_logs + where self_service_runs_id = $1; + "#, ) + .bind(Uuid::from_str(id).unwrap()) + .fetch_all(pg_pool) + .await?) } diff --git a/backend/src/main.rs b/backend/src/main.rs index f6c5a9d..51831ff 100644 --- a/backend/src/main.rs +++ b/backend/src/main.rs @@ -1,31 +1,35 @@ -use std::env; -use std::fs::File; -use std::sync::Arc; - -use axum::{Extension, Router}; +use crate::cli::CLI; +use crate::database::init_database; +use crate::self_service::controllers::{ + exec_self_service_section_action_post_validate_scripts, + exec_self_service_section_action_validate_scripts, get_self_service_runs_by_id, + list_self_service_section_actions, list_self_service_section_run_logs, + list_self_service_section_runs, list_self_service_section_runs_by_section_and_action_slugs, + list_self_service_section_runs_by_section_slug, list_self_service_sections, +}; +use crate::self_service::services::BackgroundWorkerTask; +use crate::yaml_config::YamlConfig; use axum::http::{Method, StatusCode, Uri}; use axum::routing::{get, post}; +use axum::{Extension, Router}; use clap::Parser; use sqlx::postgres::PgPoolOptions; +use std::env; +use std::fs::File; +use std::sync::Arc; use tower_http::cors::{AllowHeaders, Any, CorsLayer}; -use tracing::{error, info}; use tracing::log::warn; +use tracing::{error, info}; use tracing_subscriber::layer::SubscriberExt; use tracing_subscriber::util::SubscriberInitExt; -use crate::cli::CLI; -use crate::database::init_database; -use crate::self_service::controllers::{exec_self_service_section_action_post_validate_scripts, exec_self_service_section_action_validate_scripts, list_self_service_section_actions, list_self_service_section_run_logs, list_self_service_section_runs, list_self_service_section_runs_by_section_and_action_slugs, list_self_service_section_runs_by_section_slug, list_self_service_sections}; -use crate::self_service::services::BackgroundWorkerTask; -use crate::yaml_config::YamlConfig; - -mod yaml_config; mod app_config; -mod errors; mod cli; mod constants; -mod self_service; mod database; +mod errors; +mod self_service; +mod yaml_config; pub async fn unknown_route(uri: Uri) -> (StatusCode, String) { let message = format!("unknown route for {uri}"); @@ -78,7 +82,9 @@ async fn main() { let pg_pool = match PgPoolOptions::new() .max_connections(5) .acquire_timeout(std::time::Duration::from_secs(5)) - .connect(&connection_string).await { + .connect(&connection_string) + .await + { Ok(pool) => pool, Err(err) => { error!("failed to connect to database: {}", err); @@ -109,21 +115,49 @@ async fn main() { .route("/", get(|| async { "OK" })) .route("/healthz", get(|| async { "OK" })) .route("/selfServiceSections", get(list_self_service_sections)) - .route("/selfServiceSections/runs", get(list_self_service_section_runs)) - .route("/selfServiceSectionsRuns/:slug/logs", get(list_self_service_section_run_logs)) - .route("/selfServiceSections/:slug/actions", get(list_self_service_section_actions)) - .route("/selfServiceSections/:slug/runs", get(list_self_service_section_runs_by_section_slug)) - .route("/selfServiceSections/:slug/actions/:slug/validate", post(exec_self_service_section_action_validate_scripts)) - .route("/selfServiceSections/:slug/actions/:slug/execute", post(exec_self_service_section_action_post_validate_scripts)) - .route("/selfServiceSections/:slug/actions/:slug/runs", get(list_self_service_section_runs_by_section_and_action_slugs)) + .route("/selfServiceSections/:id", get(get_self_service_runs_by_id)) + .route( + "/selfServiceSections/:slug/actions", + get(list_self_service_section_actions), + ) + .route( + "/selfServiceSections/:slug/actions/:slug/execute", + post(exec_self_service_section_action_post_validate_scripts), + ) + .route( + "/selfServiceSections/:slug/actions/:slug/runs", + get(list_self_service_section_runs_by_section_and_action_slugs), + ) + .route( + "/selfServiceSections/:slug/actions/:slug/validate", + post(exec_self_service_section_action_validate_scripts), + ) + .route( + "/selfServiceSections/:slug/runs", + get(list_self_service_section_runs_by_section_slug), + ) + .route( + "/selfServiceSections/runs", + get(list_self_service_section_runs), + ) + .route( + "/selfServiceSectionsRuns/:slug/logs", + get(list_self_service_section_run_logs), + ) .layer(Extension(yaml_config)) .layer(Extension(tx)) .layer(Extension(pg_pool)) .layer( CorsLayer::new() .allow_origin(Any) - .allow_methods(vec![Method::GET, Method::POST, Method::PUT, Method::DELETE, Method::OPTIONS]) - .allow_headers(AllowHeaders::any()) + .allow_methods(vec![ + Method::GET, + Method::POST, + Method::PUT, + Method::DELETE, + Method::OPTIONS, + ]) + .allow_headers(AllowHeaders::any()), ); //.route("/catalog/:id", get(catalog::get_catalog_by_id)) //.route("/catalog", post(catalog::create_catalog)); diff --git a/backend/src/self_service/controllers.rs b/backend/src/self_service/controllers.rs index 5eac44e..409cd83 100644 --- a/backend/src/self_service/controllers.rs +++ b/backend/src/self_service/controllers.rs @@ -1,39 +1,100 @@ use std::sync::Arc; -use axum::{debug_handler, Extension, Json}; use axum::extract::Path; use axum::http::StatusCode; -use chrono::{NaiveDateTime, Utc}; +use axum::{debug_handler, Extension, Json}; use tokio::sync::mpsc::Sender; -use tracing::error; +use tracing::{debug, error, info}; use crate::database; -use crate::database::{insert_self_service_run, SelfServiceRunJson, SelfServiceRunLogJson, Status}; -use crate::self_service::{check_json_payload_against_yaml_config_fields, execute_command, ExecValidateScriptRequest, find_self_service_section_by_slug, get_self_service_section_and_action, JobResponse, ResultsResponse}; +use crate::database::{ + insert_self_service_run, list_logs_by_self_service_run_id, SelfServiceRunJson, + SelfServiceRunLogJson, Status, +}; use crate::self_service::services::BackgroundWorkerTask; -use crate::yaml_config::{SelfServiceSectionActionYamlConfig, SelfServiceSectionYamlConfig, YamlConfig}; +use crate::self_service::ResultResponse; +use crate::self_service::{ + check_json_payload_against_yaml_config_fields, execute_command, + find_self_service_section_by_slug, get_self_service_section_and_action, + ExecValidateScriptRequest, JobResponse, ResultsResponse, +}; +use crate::yaml_config::{ + SelfServiceSectionActionYamlConfig, SelfServiceSectionYamlConfig, YamlConfig, +}; #[debug_handler] pub async fn list_self_service_sections( Extension(yaml_config): Extension>, -) -> (StatusCode, Json>) { - (StatusCode::OK, Json(ResultsResponse { message: None, results: yaml_config.self_service.sections.clone() })) +) -> ( + StatusCode, + Json>, +) { + ( + StatusCode::OK, + Json(ResultsResponse { + message: None, + results: yaml_config.self_service.sections.clone(), + }), + ) +} + +#[debug_handler] +pub async fn get_self_service_runs_by_id( + Extension(pg_pool): Extension>, + Path(id): Path, +) -> (StatusCode, Json>) { + match database::get_self_service_runs(&pg_pool, &id).await { + Ok(self_service) => ( + StatusCode::OK, + Json(ResultResponse { + message: None, + result: Some(self_service.to_json()), + }), + ), + Err(err) => { + error!("Failed to get self service: {:?}", err); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ResultResponse { + message: Some(err.to_string()), + result: None, + }), + ) + } + } } #[debug_handler] pub async fn list_self_service_section_actions( Extension(yaml_config): Extension>, Path(section_slug): Path, -) -> (StatusCode, Json>) { - let section = match find_self_service_section_by_slug(&yaml_config.self_service.sections, section_slug.as_str()) { +) -> ( + StatusCode, + Json>, +) { + let section = match find_self_service_section_by_slug( + &yaml_config.self_service.sections, + section_slug.as_str(), + ) { Some(section) => section, - None => return (StatusCode::NOT_FOUND, Json(ResultsResponse { - message: Some(format!("Self service section '{}' not found", section_slug)), - results: vec![], - })) + None => { + return ( + StatusCode::NOT_FOUND, + Json(ResultsResponse { + message: Some(format!("Self service section '{}' not found", section_slug)), + results: vec![], + }), + ) + } }; - (StatusCode::OK, Json(ResultsResponse { message: None, results: section.actions.clone().unwrap_or(vec![]) })) + ( + StatusCode::OK, + Json(ResultsResponse { + message: None, + results: section.actions.clone().unwrap_or(vec![]), + }), + ) } #[debug_handler] @@ -41,13 +102,32 @@ pub async fn list_self_service_section_runs_by_section_and_action_slugs( Extension(pg_pool): Extension>, Path((section_slug, action_slug)): Path<(String, String)>, ) -> (StatusCode, Json>) { - match database::list_self_service_runs_by_section_and_action_slugs(&pg_pool, §ion_slug, &action_slug).await { - Ok(action_execution_statuses) => { - (StatusCode::OK, Json(ResultsResponse { message: None, results: action_execution_statuses.iter().map(|x| x.to_json()).collect() })) - } + match database::list_self_service_runs_by_section_and_action_slugs( + &pg_pool, + §ion_slug, + &action_slug, + ) + .await + { + Ok(action_execution_statuses) => ( + StatusCode::OK, + Json(ResultsResponse { + message: None, + results: action_execution_statuses + .iter() + .map(|x| x.to_json()) + .collect(), + }), + ), Err(err) => { error!("failed to list action execution statuses: {:?}", err); - (StatusCode::INTERNAL_SERVER_ERROR, Json(ResultsResponse { message: Some(err.to_string()), results: vec![] })) + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ResultsResponse { + message: Some(err.to_string()), + results: vec![], + }), + ) } } } @@ -58,12 +138,25 @@ pub async fn list_self_service_section_runs_by_section_slug( Path(section_slug): Path, ) -> (StatusCode, Json>) { match database::list_self_service_runs_by_section_slug(&pg_pool, §ion_slug).await { - Ok(action_execution_statuses) => { - (StatusCode::OK, Json(ResultsResponse { message: None, results: action_execution_statuses.iter().map(|x| x.to_json()).collect() })) - } + Ok(action_execution_statuses) => ( + StatusCode::OK, + Json(ResultsResponse { + message: None, + results: action_execution_statuses + .iter() + .map(|x| x.to_json()) + .collect(), + }), + ), Err(err) => { error!("failed to list action execution statuses: {:?}", err); - (StatusCode::INTERNAL_SERVER_ERROR, Json(ResultsResponse { message: Some(err.to_string()), results: vec![] })) + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ResultsResponse { + message: Some(err.to_string()), + results: vec![], + }), + ) } } } @@ -73,12 +166,22 @@ pub async fn list_self_service_section_runs( Extension(pg_pool): Extension>, ) -> (StatusCode, Json>) { match database::list_self_service_runs(&pg_pool).await { - Ok(self_service_runs) => { - (StatusCode::OK, Json(ResultsResponse { message: None, results: self_service_runs.iter().map(|x| x.to_json()).collect() })) - } + Ok(self_service_runs) => ( + StatusCode::OK, + Json(ResultsResponse { + message: None, + results: self_service_runs.iter().map(|x| x.to_json()).collect(), + }), + ), Err(err) => { error!("failed to list action execution statuses: {:?}", err); - (StatusCode::INTERNAL_SERVER_ERROR, Json(ResultsResponse { message: Some(err.to_string()), results: vec![] })) + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ResultsResponse { + message: Some(err.to_string()), + results: vec![], + }), + ) } } } @@ -86,33 +189,40 @@ pub async fn list_self_service_section_runs( #[debug_handler] pub async fn list_self_service_section_run_logs( Extension(pg_pool): Extension>, - Path(logs_slug): Path, + Path(task_id): Path, ) -> (StatusCode, Json>) { - - // mock data for now - let mut data = vec![]; - - for i in 1..1000 { - data.push(SelfServiceRunLogJson { - id: i.to_string(), - created_at: "2021-08-01T00:00:00Z".to_string(), - is_stderr: i % 2 == 0, - message: if i % 2 == 0 { format!("this is an error message {}", i) } else { format!("this is a log message {}", i) }, - }); + info!("List logs from self service run id={}", task_id); + + match list_logs_by_self_service_run_id(&pg_pool, &task_id).await { + Ok(logs) => ( + StatusCode::OK, + Json(ResultsResponse { + message: None, + results: logs.iter().map(|x| x.to_json()).collect(), + }), + ), + Err(err) => { + error!("failed to list action execution statuses: {:?}", err); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ResultsResponse { + message: Some(err.to_string()), + results: vec![], + }), + ) + } } - - (StatusCode::OK, Json(ResultsResponse { - message: None, - results: data, - })) } #[debug_handler] pub async fn exec_self_service_section_action_validate_scripts( Extension(yaml_config): Extension>, Path((section_slug, action_slug)): Path<(String, String)>, + Extension(pg_pool): Extension>, Json(req): Json, ) -> (StatusCode, Json) { + debug!("validate"); + let _ = match check_json_payload_against_yaml_config_fields( section_slug.as_str(), action_slug.as_str(), @@ -120,22 +230,39 @@ pub async fn exec_self_service_section_action_validate_scripts( &yaml_config, ) { Ok(x) => x, - Err(err) => return (StatusCode::BAD_REQUEST, Json(JobResponse { - message: Some(err), - })) + Err(err) => { + return ( + StatusCode::BAD_REQUEST, + Json(JobResponse { message: Some(err) }), + ) + } }; - let (_, action) = match get_self_service_section_and_action(&yaml_config, section_slug.as_str(), action_slug.as_str()) { + let (_, action) = match get_self_service_section_and_action( + &yaml_config, + section_slug.as_str(), + action_slug.as_str(), + ) { Ok((section, action)) => (section, action), - Err(err) => return err + Err(err) => return err, }; for cmd in action.validate.as_ref().unwrap_or(&vec![]) { - let _ = match execute_command(cmd, req.payload.to_string().as_str()).await { + let _ = match execute_command( + pg_pool.to_owned(), + cmd, + req.payload.to_string().as_str(), + "uuid", + ) + .await + { Ok(_) => (), - Err(err) => return (StatusCode::BAD_REQUEST, Json(JobResponse { - message: Some(err), - })) + Err(err) => { + return ( + StatusCode::BAD_REQUEST, + Json(JobResponse { message: Some(err) }), + ) + } }; } @@ -150,6 +277,8 @@ pub async fn exec_self_service_section_action_post_validate_scripts( Path((section_slug, action_slug)): Path<(String, String)>, Json(req): Json, ) -> (StatusCode, Json) { + info!("Self service execute"); + let _ = match check_json_payload_against_yaml_config_fields( section_slug.as_str(), action_slug.as_str(), @@ -157,117 +286,137 @@ pub async fn exec_self_service_section_action_post_validate_scripts( &yaml_config, ) { Ok(x) => x, - Err(err) => return (StatusCode::BAD_REQUEST, Json(JobResponse { - message: Some(err), - })) + Err(err) => { + return ( + StatusCode::BAD_REQUEST, + Json(JobResponse { message: Some(err) }), + ) + } }; - let service = match get_self_service_section_and_action(&yaml_config, section_slug.as_str(), action_slug.as_str()) { + let service = match get_self_service_section_and_action( + &yaml_config, + section_slug.as_str(), + action_slug.as_str(), + ) { Ok((_, service)) => service, - Err(err) => return err + Err(err) => return err, }; - let ces = match insert_self_service_run( + let self_service_run = match insert_self_service_run( &pg_pool, §ion_slug, &action_slug, Status::Queued, &req.payload, &serde_json::Value::Array(vec![]), - ).await { - Ok(ces) => ces, - Err(err) => return (StatusCode::INTERNAL_SERVER_ERROR, Json(JobResponse { - message: Some(err.to_string()), - })) + ) + .await + { + Ok(self_service_run) => self_service_run, + Err(err) => { + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(JobResponse { + message: Some(err.to_string()), + }), + ) + } }; + info!("Insertion OK - {}", self_service_run.id()); + // execute post validate scripts - let _ = tx.send(BackgroundWorkerTask::new( - ces.id(), - service.clone(), - req, - )).await.unwrap_or_else(|err| { - error!("failed to send task to background worker: {}", err); - // TODO change catalog execution status to Failure - }); - - (StatusCode::NO_CONTENT, Json(JobResponse { message: Some("workflow executed".to_string()) })) + let _ = tx + .send(BackgroundWorkerTask::new( + self_service_run.id(), + service.clone(), + req, + )) + .await + .unwrap_or_else(|err| { + error!("failed to send task to background worker: {}", err); + // TODO change catalog execution status to Failure + }); + + ( + StatusCode::CREATED, + Json(JobResponse { + message: Some("workflow executed".to_string()), + }), + ) } #[cfg(test)] mod tests { use std::sync::Arc; - use axum::{Extension, Json}; use axum::extract::Path; use axum::http::StatusCode; + use axum::{Extension, Json}; use crate::self_service::controllers::exec_self_service_section_action_validate_scripts; use crate::self_service::ExecValidateScriptRequest; - use crate::yaml_config::{SelfServiceSectionActionFieldYamlConfig, SelfServiceSectionActionPostValidateYamlConfig, SelfServiceSectionActionValidateYamlConfig, SelfServiceSectionActionYamlConfig, SelfServiceSectionYamlConfig, SelfServiceYamlConfig, YamlConfig}; use crate::yaml_config::ActionFieldType::Text; + use crate::yaml_config::{ + SelfServiceSectionActionFieldYamlConfig, SelfServiceSectionActionPostValidateYamlConfig, + SelfServiceSectionActionValidateYamlConfig, SelfServiceSectionActionYamlConfig, + SelfServiceSectionYamlConfig, SelfServiceYamlConfig, YamlConfig, + }; fn get_yaml_config() -> YamlConfig { YamlConfig { self_service: SelfServiceYamlConfig { - sections: vec![ - SelfServiceSectionYamlConfig { - slug: "section-1".to_string(), - name: "Section 1".to_string(), + sections: vec![SelfServiceSectionYamlConfig { + slug: "section-1".to_string(), + name: "Section 1".to_string(), + description: None, + actions: Some(vec![SelfServiceSectionActionYamlConfig { + slug: "action-1".to_string(), + name: "Action 1".to_string(), description: None, - actions: Some(vec![ - SelfServiceSectionActionYamlConfig { - slug: "action-1".to_string(), - name: "Action 1".to_string(), + icon: None, + icon_color: None, + fields: Some(vec![ + SelfServiceSectionActionFieldYamlConfig { + slug: "field-1".to_string(), + title: "Field 1".to_string(), + description: None, + placeholder: None, + type_: Text, + default: None, + required: Some(true), + autocomplete_fetcher: None, + }, + SelfServiceSectionActionFieldYamlConfig { + slug: "field-2".to_string(), + title: "Field 2".to_string(), description: None, - icon: None, - icon_color: None, - fields: Some(vec![ - SelfServiceSectionActionFieldYamlConfig { - slug: "field-1".to_string(), - title: "Field 1".to_string(), - description: None, - placeholder: None, - type_: Text, - default: None, - required: Some(true), - autocomplete_fetcher: None, - }, - SelfServiceSectionActionFieldYamlConfig { - slug: "field-2".to_string(), - title: "Field 2".to_string(), - description: None, - placeholder: None, - type_: Text, - default: None, - required: None, - autocomplete_fetcher: None, - }, - ]), - validate: Some(vec![ - SelfServiceSectionActionValidateYamlConfig { - timeout: None, - command: vec![ - "python3".to_string(), - "examples/validation_script_ok.py".to_string(), - ], - }, - ]), - post_validate: Some(vec![ - SelfServiceSectionActionPostValidateYamlConfig { - timeout: None, - command: vec![ - "python3".to_string(), - "examples/validation_script_ok.py".to_string(), - ], - output_model: None, - }, - ]), + placeholder: None, + type_: Text, + default: None, + required: None, + autocomplete_fetcher: None, }, ]), - }, - ], - } + validate: Some(vec![SelfServiceSectionActionValidateYamlConfig { + timeout: None, + command: vec![ + "python3".to_string(), + "examples/validation_script_ok.py".to_string(), + ], + }]), + post_validate: Some(vec![SelfServiceSectionActionPostValidateYamlConfig { + timeout: None, + command: vec![ + "python3".to_string(), + "examples/validation_script_ok.py".to_string(), + ], + output_model: None, + }]), + }]), + }], + }, } } @@ -280,11 +429,12 @@ mod tests { Path(("section-1".to_string(), "action-1".to_string())), Json(ExecValidateScriptRequest { payload: serde_json::json!({ - "field-1": "value-1", - "field-2": "value-2", - }) + "field-1": "value-1", + "field-2": "value-2", + }), }), - ).await; + ) + .await; assert_eq!(status_code, StatusCode::OK); assert_eq!(job_response.message, None); @@ -295,24 +445,32 @@ mod tests { let mut yaml_config = get_yaml_config(); // add a failing validation script - yaml_config.self_service.sections[0].actions.as_mut().unwrap()[0].validate.as_mut().unwrap().push(SelfServiceSectionActionValidateYamlConfig { - timeout: None, - command: vec![ - "python3".to_string(), - "examples/validation_script_ko.py".to_string(), - ], - }); + yaml_config.self_service.sections[0] + .actions + .as_mut() + .unwrap()[0] + .validate + .as_mut() + .unwrap() + .push(SelfServiceSectionActionValidateYamlConfig { + timeout: None, + command: vec![ + "python3".to_string(), + "examples/validation_script_ko.py".to_string(), + ], + }); let (status_code, job_response) = exec_self_service_section_action_validate_scripts( Extension(Arc::from(yaml_config)), Path(("section-1".to_string(), "action-1".to_string())), Json(ExecValidateScriptRequest { payload: serde_json::json!({ - "field-1": "value-1", - "field-2": "value-2", - }) + "field-1": "value-1", + "field-2": "value-2", + }), }), - ).await; + ) + .await; assert_eq!(status_code, StatusCode::BAD_REQUEST); assert_eq!(job_response.message.as_ref().unwrap().is_empty(), false); diff --git a/backend/src/self_service/mod.rs b/backend/src/self_service/mod.rs index 09baa86..3ac00d8 100644 --- a/backend/src/self_service/mod.rs +++ b/backend/src/self_service/mod.rs @@ -3,11 +3,18 @@ use std::time::Duration; use axum::http::StatusCode; use axum::Json; use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use tokio::io::AsyncBufReadExt; +use tokio::io::BufReader; use tokio::process; use tokio::time::timeout; use tracing::debug; -use crate::yaml_config::{ExternalCommand, SelfServiceSectionActionYamlConfig, SelfServiceSectionYamlConfig, YamlConfig}; +use crate::database::insert_self_service_run_log; +use crate::yaml_config::{ + ExternalCommand, SelfServiceSectionActionYamlConfig, SelfServiceSectionYamlConfig, YamlConfig, +}; +use std::process::Stdio; pub mod controllers; pub mod services; @@ -18,6 +25,12 @@ pub struct ResultsResponse { results: Vec, } +#[derive(Serialize, Deserialize)] +pub struct ResultResponse { + message: Option, + result: Option, +} + #[derive(Debug, Serialize, Deserialize, PartialEq)] pub struct JobResponse { message: Option, @@ -35,16 +48,30 @@ pub struct JobOutputResult { pub execution_time_in_millis: u128, } -fn find_self_service_section_by_slug<'a>(sections: &'a Vec, section_slug: &str) -> Option<&'a SelfServiceSectionYamlConfig> { +fn find_self_service_section_by_slug<'a>( + sections: &'a Vec, + section_slug: &str, +) -> Option<&'a SelfServiceSectionYamlConfig> { sections.iter().find(|section| section.slug == section_slug) } -fn find_self_service_action_by_slug<'a>(section: &'a SelfServiceSectionYamlConfig, action_slug: &str) -> Option<&'a SelfServiceSectionActionYamlConfig> { - section.actions.as_ref().unwrap().iter().find(|action| action.slug == action_slug) +fn find_self_service_action_by_slug<'a>( + section: &'a SelfServiceSectionYamlConfig, + action_slug: &str, +) -> Option<&'a SelfServiceSectionActionYamlConfig> { + section + .actions + .as_ref() + .unwrap() + .iter() + .find(|action| action.slug == action_slug) } /// Extract the job output from the environment variable TORII_JSON_OUTPUT and reset it to an empty JSON object -fn consume_job_output_result_from_json_output_env(action_slug: &str, execution_time: u128) -> JobOutputResult { +fn consume_job_output_result_from_json_output_env( + action_slug: &str, + execution_time: u128, +) -> JobOutputResult { let job_output_result = match std::env::var("TORII_JSON_OUTPUT") { Ok(json_output) => JobOutputResult { one_liner_command: action_slug.to_string(), @@ -55,7 +82,7 @@ fn consume_job_output_result_from_json_output_env(action_slug: &str, execution_t one_liner_command: action_slug.to_string(), output: serde_json::json!({}), execution_time_in_millis: execution_time, - } + }, }; // reset the environment variable @@ -70,25 +97,26 @@ fn check_json_payload_against_yaml_config_fields( json_payload: &serde_json::Value, yaml_config: &YamlConfig, ) -> Result<(), String> { - let section = match find_self_service_section_by_slug(&yaml_config.self_service.sections, section_slug) { - Some(section) => section, - None => return Err(format!("Self service section '{}' not found", section_slug)) - }; + let section = + match find_self_service_section_by_slug(&yaml_config.self_service.sections, section_slug) { + Some(section) => section, + None => return Err(format!("Self service section '{}' not found", section_slug)), + }; let action = match find_self_service_action_by_slug(section, action_slug) { Some(action) => action, - None => return Err(format!("Action '{}' not found", action_slug)) + None => return Err(format!("Action '{}' not found", action_slug)), }; let fields = match action.fields.as_ref() { Some(fields) => fields, - None => return Err(format!("Action '{}' has no fields", action_slug)) + None => return Err(format!("Action '{}' has no fields", action_slug)), }; for field in fields { let field_value = match json_payload.get(field.slug.as_str()) { Some(field_value) => field_value, - None => return Err(format!("Field '{}' not found in payload", field.slug)) + None => return Err(format!("Field '{}' not found in payload", field.slug)), }; if field.required.unwrap_or(false) && field_value.is_null() { @@ -100,20 +128,32 @@ fn check_json_payload_against_yaml_config_fields( } async fn execute_command( + pg_pool: Arc, external_command: &T, json_payload: &str, -) -> Result where T: ExternalCommand { + task_id: &str, +) -> Result +where + T: ExternalCommand, +{ let cmd_one_line = external_command.get_command().join(" "); - debug!("executing validate script '{}' with payload '{}'", &cmd_one_line, json_payload); + debug!( + "executing validate script '{}' with payload '{}'", + &cmd_one_line, json_payload + ); if external_command.get_command().len() == 1 { - return Err(format!("Validate script '{}' is invalid. \ + return Err(format!( + "Validate script '{}' is invalid. \ Be explicit on the command to execute, e.g. 'python examples/validation_script.py'", - external_command.get_command()[0])); + external_command.get_command()[0] + )); } let mut cmd = process::Command::new(&external_command.get_command()[0]); + cmd.stdout(Stdio::piped()); + cmd.stderr(Stdio::piped()); for arg in external_command.get_command()[1..].iter() { cmd.arg(arg); @@ -126,10 +166,63 @@ async fn execute_command( let mut child = match cmd.spawn() { Ok(child) => child, - Err(err) => return Err(format!("Validate script '{}' failed: {}", &cmd_one_line, err)) + Err(err) => { + return Err(format!( + "Validate script '{}' failed: {}", + &cmd_one_line, err + )) + } }; - // get stdout and stderr from child and forward it in real time to the upper function + let stdout = child + .stdout + .take() + .expect("child did not have a handle to stdout"); + let stderr = child + .stderr + .take() + .expect("child did not have a handle to stderr"); + + let mut stdout_reader = BufReader::new(stdout).lines(); + let mut stderr_reader = BufReader::new(stderr).lines(); + + loop { + tokio::select! { + result = stdout_reader.next_line() => { + match result { + Ok(Some(line)) => { + match insert_self_service_run_log(&pg_pool, task_id, line.clone(), false).await { + Ok(x) => x, + Err(_err) => break, + }; + println!("Stdout: {}", line.clone()) + }, + Err(_) => break, + _ => (), + } + } + result = stderr_reader.next_line() => { + match result { + Ok(Some(line)) => { + match insert_self_service_run_log(&pg_pool, task_id, line.clone(), true).await { + Ok(x) => x, + Err(_err) => break, + }; + println!("Stderr: {}", line.clone()) + }, + Err(_) => break, + _ => (), + } + } + result = child.wait() => { + match result { + Ok(exit_code) => println!("33 Child process exited with {}", exit_code), + _ => (), + } + break + } + }; + } let exit_status = match timeout(Duration::from_secs(external_command.get_timeout()), child.wait()).await { Ok(exit_status) => exit_status, @@ -147,38 +240,62 @@ async fn execute_command( }.unwrap(); if !exit_status.success() { - return Err(format!("Validate script '{}' failed: {:?}", &cmd_one_line, exit_status)); + return Err(format!( + "Validate script '{}' failed: {:?}", + &cmd_one_line, exit_status + )); } - Ok(consume_job_output_result_from_json_output_env(cmd_one_line.as_str(), start.elapsed().as_millis())) + Ok(consume_job_output_result_from_json_output_env( + cmd_one_line.as_str(), + start.elapsed().as_millis(), + )) } fn get_self_service_section_and_action<'a>( yaml_config: &'a YamlConfig, section_slug: &str, action_slug: &str, -) -> Result<(&'a SelfServiceSectionYamlConfig, &'a SelfServiceSectionActionYamlConfig), (StatusCode, Json)> { - let section = match find_self_service_section_by_slug(&yaml_config.self_service.sections, section_slug) { - Some(section) => section, - None => return Err((StatusCode::NOT_FOUND, Json(JobResponse { - message: Some(format!("Self service section '{}' not found", section_slug)), - }))) - }; +) -> Result< + ( + &'a SelfServiceSectionYamlConfig, + &'a SelfServiceSectionActionYamlConfig, + ), + (StatusCode, Json), +> { + let section = + match find_self_service_section_by_slug(&yaml_config.self_service.sections, section_slug) { + Some(section) => section, + None => { + return Err(( + StatusCode::NOT_FOUND, + Json(JobResponse { + message: Some(format!("Self service section '{}' not found", section_slug)), + }), + )) + } + }; let action = match find_self_service_action_by_slug(section, action_slug) { Some(action) => action, - None => return Err((StatusCode::NOT_FOUND, Json(JobResponse { - message: Some(format!("Action '{}' not found", action_slug)), - }))) + None => { + return Err(( + StatusCode::NOT_FOUND, + Json(JobResponse { + message: Some(format!("Action '{}' not found", action_slug)), + }), + )) + } }; Ok((section, action)) } - #[cfg(test)] mod tests { - use crate::self_service::{find_self_service_action_by_slug, find_self_service_section_by_slug}; + use crate::self_service::{ + find_self_service_action_by_slug, find_self_service_section_by_slug, + }; use crate::yaml_config::{SelfServiceSectionActionYamlConfig, SelfServiceSectionYamlConfig}; #[test] @@ -198,9 +315,18 @@ mod tests { }, ]; - assert_eq!(find_self_service_section_by_slug(§ions, "section-1"), Some(§ions[0])); - assert_eq!(find_self_service_section_by_slug(§ions, "section-2"), Some(§ions[1])); - assert_eq!(find_self_service_section_by_slug(§ions, "section-3"), None); + assert_eq!( + find_self_service_section_by_slug(§ions, "section-1"), + Some(§ions[0]) + ); + assert_eq!( + find_self_service_section_by_slug(§ions, "section-2"), + Some(§ions[1]) + ); + assert_eq!( + find_self_service_section_by_slug(§ions, "section-3"), + None + ); } #[test] @@ -233,8 +359,14 @@ mod tests { ]), }; - assert_eq!(find_self_service_action_by_slug(§ion, "action-1"), Some(§ion.actions.as_ref().unwrap()[0])); - assert_eq!(find_self_service_action_by_slug(§ion, "action-2"), Some(§ion.actions.as_ref().unwrap()[1])); + assert_eq!( + find_self_service_action_by_slug(§ion, "action-1"), + Some(§ion.actions.as_ref().unwrap()[0]) + ); + assert_eq!( + find_self_service_action_by_slug(§ion, "action-2"), + Some(§ion.actions.as_ref().unwrap()[1]) + ); assert_eq!(find_self_service_action_by_slug(§ion, "action-3"), None); } } diff --git a/backend/src/self_service/services.rs b/backend/src/self_service/services.rs index 9303833..330f32f 100644 --- a/backend/src/self_service/services.rs +++ b/backend/src/self_service/services.rs @@ -3,11 +3,13 @@ use std::sync::Arc; use serde::{Deserialize, Serialize}; use sqlx::{Pool, Postgres}; use tokio::sync::mpsc::Receiver; -use tracing::error; +use tracing::{debug, error, info}; -use crate::database::{Status, update_self_service_run}; +use crate::database::{update_self_service_run, Status}; use crate::self_service::{execute_command, ExecValidateScriptRequest, JobOutputResult}; -use crate::yaml_config::{SelfServiceSectionActionPostValidateYamlConfig, SelfServiceSectionActionYamlConfig}; +use crate::yaml_config::{ + SelfServiceSectionActionPostValidateYamlConfig, SelfServiceSectionActionYamlConfig, +}; #[derive(Serialize, Deserialize)] pub struct BackgroundWorkerTask { @@ -22,6 +24,7 @@ impl BackgroundWorkerTask { self_service_section_action_yaml_config: SelfServiceSectionActionYamlConfig, req: ExecValidateScriptRequest, ) -> Self { + debug!("BackgroundWorkerTask instantied"); Self { execution_status_id, self_service_section_action_yaml_config, @@ -38,26 +41,50 @@ pub struct TaskPayload { post_validate_output: Option, } -pub async fn background_worker(mut rx: Receiver, pg_pool: Arc>) { +pub async fn background_worker( + mut rx: Receiver, + pg_pool: Arc>, +) { while let Some(task) = rx.recv().await { + info!("task id {}", task.execution_status_id.as_str()); + let r = update_self_service_run( &pg_pool, task.execution_status_id.as_str(), Status::Running, &serde_json::Value::Array(vec![]), - ).await; + ) + .await; + + info!("{}", "2"); if let Err(err) = r { error!("failed to update action execution status: {}", err); continue; } + info!("{}", "3"); + let mut tasks = Vec::::new(); + info!("{}", "4"); let mut last_task_value = serde_json::Value::Array(vec![]); - - for cmd in task.self_service_section_action_yaml_config.post_validate.as_ref().unwrap_or(&vec![]) { - let job_output_result = match execute_command(cmd, task.req.payload.to_string().as_str()).await { + info!("{}", "5"); + for cmd in task + .self_service_section_action_yaml_config + .post_validate + .as_ref() + .unwrap_or(&vec![]) + { + info!("{}", cmd); + let job_output_result = match execute_command( + pg_pool.clone(), + cmd, + task.req.payload.to_string().as_str(), + task.execution_status_id.as_str(), + ) + .await + { Ok(job_output_result) => job_output_result, Err(err) => { let task_payload = TaskPayload { @@ -74,7 +101,8 @@ pub async fn background_worker(mut rx: Receiver, pg_pool: task.execution_status_id.as_str(), Status::Failure, &serde_json::to_value(tasks.clone()).unwrap(), - ).await; + ) + .await; break; } @@ -98,7 +126,8 @@ pub async fn background_worker(mut rx: Receiver, pg_pool: task.execution_status_id.as_str(), Status::Running, &last_task_value, - ).await; + ) + .await; } let _ = update_self_service_run( @@ -106,6 +135,7 @@ pub async fn background_worker(mut rx: Receiver, pg_pool: task.execution_status_id.as_str(), Status::Success, &last_task_value, - ).await; + ) + .await; } } diff --git a/backend/src/yaml_config.rs b/backend/src/yaml_config.rs index b2262e8..b236d27 100644 --- a/backend/src/yaml_config.rs +++ b/backend/src/yaml_config.rs @@ -132,7 +132,6 @@ pub trait ExternalCommand { } } - #[derive(Debug, PartialEq, Serialize, Deserialize, Clone)] #[serde(rename_all = "snake_case")] pub struct SelfServiceSectionActionValidateYamlConfig { diff --git a/docker-compose-dev.yaml b/docker-compose-dev.yaml deleted file mode 100644 index 0080208..0000000 --- a/docker-compose-dev.yaml +++ /dev/null @@ -1,9 +0,0 @@ -services: - postgres: - image: postgres:16-alpine - environment: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: torii - ports: - - 5432:5432 diff --git a/docker-compose.yaml b/docker-compose.yaml index 4d39526..95fbb98 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,33 +1,36 @@ services: frontend: - build: - context: ./frontend - dockerfile: Dockerfile - ports: - - "5173:80" + build: + context: frontend + dockerfile: Dockerfile.dev + command: npx vite --host 0.0.0.0 --port 8080 + ports: + - 8080:8080 + volumes: + - $PWD/frontend:/app + - torii-frontend-node-modules:/app/node_modules depends_on: backend: condition: service_started + backend: - build: - context: ./backend - dockerfile: Dockerfile - restart: no - command: - -c /app/examples/config.yaml + build: + context: backend + dockerfile: Dockerfile.dev + command: cargo watch -x 'run -- --config examples/config.yaml' ports: - '9999:9999' volumes: - - type: bind - source: ./backend - target: /tmp + - $PWD/backend:/app healthcheck: test: [ "CMD", "curl", "-f", "http://localhost:9999" ] environment: - DB_CONNECTION_URL=postgresql://postgres:postgres@postgres:5432/torii + - RUST_LOG=4 depends_on: postgres: condition: service_healthy + postgres: image: postgres:16-alpine environment: @@ -41,3 +44,8 @@ services: interval: 10s timeout: 5s retries: 5 + start_interval: 1s + +volumes: + torii-frontend-node-modules: + \ No newline at end of file diff --git a/frontend/Dockerfile.dev b/frontend/Dockerfile.dev new file mode 100644 index 0000000..f021864 --- /dev/null +++ b/frontend/Dockerfile.dev @@ -0,0 +1,7 @@ +FROM node:20-alpine3.19 as builder + +WORKDIR /app + +COPY . . + +RUN npm install diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 5229e77..4f6628e 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -17,6 +17,8 @@ "@tanstack/react-query": "^5.13.4", "@tanstack/react-query-devtools": "^5.29.0", "@tanstack/react-table": "^8.16.0", + "@xterm/addon-fit": "^0.10.0", + "@xterm/xterm": "^5.5.0", "class-variance-authority": "^0.7.0", "clsx": "^2.0.0", "dayjs": "^1.11.10", @@ -30,9 +32,11 @@ "react-error-boundary": "^4.0.13", "react-hook-form": "^7.51.2", "react-router-dom": "^6.20.1", + "react-xtermjs": "^1.0.8", "sort-by": "^1.2.0", "tailwind-merge": "^2.1.0", "tailwindcss-animate": "^1.0.7", + "ts-pattern": "^5.2.0", "wonka": "^6.3.4", "yup": "^1.4.0" }, @@ -2047,6 +2051,21 @@ "vite": "^4.2.0 || ^5.0.0" } }, + "node_modules/@xterm/addon-fit": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/@xterm/addon-fit/-/addon-fit-0.10.0.tgz", + "integrity": "sha512-UFYkDm4HUahf2lnEyHvio51TNGiLK66mqP2JoATy7hRZeXaGMRDr00JiSF7m63vR5WKATF605yEggJKsw0JpMQ==", + "license": "MIT", + "peerDependencies": { + "@xterm/xterm": "^5.0.0" + } + }, + "node_modules/@xterm/xterm": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@xterm/xterm/-/xterm-5.5.0.tgz", + "integrity": "sha512-hqJHYaQb5OptNunnyAnkHyM8aCjZ1MEIDTQu1iIbbTD/xops91NB5yq1ZK/dC2JDbVWtF23zUtl9JE2NqwT87A==", + "license": "MIT" + }, "node_modules/acorn": { "version": "8.11.2", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.2.tgz", @@ -2308,9 +2327,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001568", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001568.tgz", - "integrity": "sha512-vSUkH84HontZJ88MiNrOau1EBrCqEQYgkC5gIySiDlpsm8sGVrhU7Kx4V6h0tnqaHzIHZv08HlJIwPbL4XL9+A==", + "version": "1.0.30001643", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001643.tgz", + "integrity": "sha512-ERgWGNleEilSrHM6iUz/zJNSQTP8Mr21wDWpdgvRwcTXGAq6jMtOUPP4dqFPTdKqZ2wKTdtB+uucZ3MRpAUSmg==", "dev": true, "funding": [ { @@ -2325,7 +2344,8 @@ "type": "github", "url": "https://github.com/sponsors/ai" } - ] + ], + "license": "CC-BY-4.0" }, "node_modules/chalk": { "version": "2.4.2", @@ -4500,6 +4520,12 @@ "react": "^16.8.0 || ^17.0.0 || ^18.0.0" } }, + "node_modules/react-xtermjs": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/react-xtermjs/-/react-xtermjs-1.0.8.tgz", + "integrity": "sha512-UbgXkAUuZrvg4rUZwdfJSBU8U4iK80mMuO5Uo0cJTP4NPorF4QNqZJ+43kx8aFVMnlOR/B1Dkpnw/NckZ4hEGA==", + "license": "ISC" + }, "node_modules/read-cache": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", @@ -4967,6 +4993,12 @@ "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz", "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==" }, + "node_modules/ts-pattern": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ts-pattern/-/ts-pattern-5.2.0.tgz", + "integrity": "sha512-aGaSpOlDcns7ZoeG/OMftWyQG1KqPVhgplhJxNCvyIXqWrumM5uIoOSarw/hmmi/T1PnuQ/uD8NaFHvLpHicDg==", + "license": "MIT" + }, "node_modules/tslib": { "version": "2.6.2", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", diff --git a/frontend/package.json b/frontend/package.json index 7a75b61..2766df9 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -19,6 +19,8 @@ "@tanstack/react-query": "^5.13.4", "@tanstack/react-query-devtools": "^5.29.0", "@tanstack/react-table": "^8.16.0", + "@xterm/addon-fit": "^0.10.0", + "@xterm/xterm": "^5.5.0", "class-variance-authority": "^0.7.0", "clsx": "^2.0.0", "dayjs": "^1.11.10", @@ -32,9 +34,11 @@ "react-error-boundary": "^4.0.13", "react-hook-form": "^7.51.2", "react-router-dom": "^6.20.1", + "react-xtermjs": "^1.0.8", "sort-by": "^1.2.0", "tailwind-merge": "^2.1.0", "tailwindcss-animate": "^1.0.7", + "ts-pattern": "^5.2.0", "wonka": "^6.3.4", "yup": "^1.4.0" }, diff --git a/frontend/src/components/Nav.tsx b/frontend/src/components/Nav.tsx index a75661a..10cdbcc 100644 --- a/frontend/src/components/Nav.tsx +++ b/frontend/src/components/Nav.tsx @@ -39,9 +39,9 @@ export function MobileNav({ routes, userMenu }: MobileNavProps) {
-
+
@@ -56,11 +56,11 @@ export function MobileNav({ routes, userMenu }: MobileNavProps) {
diff --git a/frontend/src/components/Table.tsx b/frontend/src/components/Table.tsx index a8dfbb4..3ec8dab 100644 --- a/frontend/src/components/Table.tsx +++ b/frontend/src/components/Table.tsx @@ -1,4 +1,4 @@ -import { ArrowLongUpIcon } from "@heroicons/react/24/outline"; +import { ArrowLongUpIcon } from "@heroicons/react/24/outline" import { ColumnDef, OnChangeFn, @@ -10,63 +10,63 @@ import { getExpandedRowModel, getSortedRowModel, useReactTable, -} from "@tanstack/react-table"; -import clsx from "clsx"; -import { Fragment, useEffect } from "react"; +} from "@tanstack/react-table" +import clsx from "clsx" +import { Fragment, useEffect } from "react" // https://github.com/TanStack/table/issues/4382 // eslint-disable-next-line @typescript-eslint/no-explicit-any -export type TableColumns = ColumnDef[]; +export type TableColumns = ColumnDef[] -export type TableRowMode = "default" | "condensed"; +export type TableRowMode = "default" | "condensed" export interface TableProps { - data: T[]; - columns: TableColumns; - renderSubComponent?: (props: { row: Row }) => React.ReactElement; - sorting?: SortingState; - onSortingChange?: OnChangeFn; - manualSorting?: boolean; + data: T[] + columns: TableColumns + renderSubComponent?: (props: { row: Row }) => React.ReactElement + sorting?: SortingState + onSortingChange?: OnChangeFn + manualSorting?: boolean /** * If using expandable rows, this function will be called for each row to determine if it should be expanded or not. */ - expandCondition?: (row: Row) => boolean; + expandCondition?: (row: Row) => boolean } /** Only used for client side sorting */ export function sortHelper(rows: T[], sorting: SortingState) { - const inputRows = [...rows]; + const inputRows = [...rows] if (sorting.length > 0) { inputRows.sort((a, b) => { - const sort = sorting[0]; - const aVal = a[sort.id as keyof T]; - const bVal = b[sort.id as keyof T]; + const sort = sorting[0] + const aVal = a[sort.id as keyof T] + const bVal = b[sort.id as keyof T] if (aVal === bVal) { - return 0; + return 0 } if (sort.desc) { - return aVal! > bVal! ? -1 : 1; + return aVal! > bVal! ? -1 : 1 } else { - return aVal! > bVal! ? 1 : -1; + return aVal! > bVal! ? 1 : -1 } - }); + }) } - return inputRows; + return inputRows } export function Table(props: TableProps) { - const { data, columns, renderSubComponent, expandCondition } = props; + const { data, columns, renderSubComponent, expandCondition } = props const sortingChanged: OnChangeFn = ( sortingState: Updater, ) => { - props.onSortingChange && props.onSortingChange(sortingState); - }; + props.onSortingChange && props.onSortingChange(sortingState) + } const table = useReactTable({ data, @@ -79,17 +79,17 @@ export function Table(props: TableProps) { getSortedRowModel: getSortedRowModel(), manualSorting: props.manualSorting ?? false, onSortingChange: sortingChanged, - }); + }) useEffect(() => { if (expandCondition) { - const rows = table.getRowModel().rows; + const rows = table.getRowModel().rows for (const row of rows) { - const val = expandCondition(row); - row.toggleExpanded(val); + const val = expandCondition(row) + row.toggleExpanded(val) } } - }, [table, expandCondition]); + }, [table, expandCondition]) return ( @@ -166,7 +166,7 @@ export function Table(props: TableProps) { cell.getContext(), )} - ); + ) })} {row.getIsExpanded() && renderSubComponent && ( @@ -178,11 +178,11 @@ export function Table(props: TableProps) { )} - ); + ) })}
- ); + ) } -export default Table; +export default Table diff --git a/frontend/src/lib/utils.ts b/frontend/src/lib/utils.ts index 3cf64a8..36a21bc 100644 --- a/frontend/src/lib/utils.ts +++ b/frontend/src/lib/utils.ts @@ -10,14 +10,14 @@ export function classNames(...classes: any[]): string { } export function millisToHumanTime(duration: number): string { - let milliseconds = Math.floor((duration % 1000) / 100); - let seconds = Math.floor((duration / 1000) % 60); - let minutes = Math.floor((duration / (1000 * 60)) % 60); - let hours = Math.floor((duration / (1000 * 60 * 60)) % 24); + const milliseconds = Math.floor((duration % 1000) / 100); + const seconds = Math.floor((duration / 1000) % 60); + const minutes = Math.floor((duration / (1000 * 60)) % 60); + const hours = Math.floor((duration / (1000 * 60 * 60)) % 24); - let s_hours = hours < 10 ? "0" + hours : hours; - let s_minutes = minutes < 10 ? "0" + minutes : minutes; - let s_seconds = seconds < 10 ? "0" + seconds : seconds; + const s_hours = hours < 10 ? "0" + hours : hours; + const s_minutes = minutes < 10 ? "0" + minutes : minutes; + const s_seconds = seconds < 10 ? "0" + seconds : seconds; return s_hours + ":" + s_minutes + ":" + s_seconds + "." + milliseconds; } diff --git a/frontend/src/main.tsx b/frontend/src/main.tsx index 21e09dc..044c1b5 100644 --- a/frontend/src/main.tsx +++ b/frontend/src/main.tsx @@ -8,6 +8,7 @@ import { Providers } from "./providers"; import App from "./pages/app"; import { RunHistory } from "./pages/self-service/run-history/run-history"; import CatalogList from "./pages/self-service/catalog-list/catalog-list"; +import { Details } from "./pages/self-service/details/details"; const router = createBrowserRouter([ { @@ -27,6 +28,10 @@ const router = createBrowserRouter([ path: "/self-service/run-history", element: , }, + { + path: "/self-service/:taskId/details", + element:
, + }, ], }, ], diff --git a/frontend/src/pages/self-service/catalog-list/service-card.tsx b/frontend/src/pages/self-service/catalog-list/service-card.tsx index 72e92d9..9a6988f 100644 --- a/frontend/src/pages/self-service/catalog-list/service-card.tsx +++ b/frontend/src/pages/self-service/catalog-list/service-card.tsx @@ -4,6 +4,7 @@ import { Menu, Transition } from "@headlessui/react"; import { EllipsisHorizontalIcon, PlusIcon, + SunIcon, TrashIcon, } from "@heroicons/react/24/outline"; import clsx from "clsx"; @@ -24,18 +25,20 @@ export default function ServiceCard({ const getIcon = useCallback((icon: string) => { switch (icon?.toLowerCase()) { case "target": - return