diff --git a/.circleci/config.yml b/.circleci/config.yml index cd604ac85..9802f4855 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -4,18 +4,61 @@ orbs: docker-buildx: sensu/docker-buildx@1.1.1 aws-ecr: circleci/aws-ecr@8.1.2 -jobs: - build-test-and-push: - resource_class: xlarge +executors: + docker-rust: + docker: + - image: cimg/rust:1.62.1 + image-ubuntu: machine: image: ubuntu-2204:2022.04.1 docker_layer_caching: true + +# sscache steps are from this guide +# https://medium.com/@edouard.oger/rust-caching-on-circleci-using-sccache-c996344f0115 +commands: + restore-cargo-cache: + steps: + # Restore cargo cache before installing anything with the cargo command (ie cargo install ...) + - restore_cache: + name: Restore cargo cache + keys: + - cargo-{{ checksum "Cargo.lock" }} + - run: + name: Install sccache + command: | + ls ~/.cargo/bin/sccache || cargo install sccache + # This configures Rust to use sccache. + echo 'export "RUSTC_WRAPPER"="sccache"' >> $BASH_ENV + sccache --version + - restore_cache: + name: Restore sccache cache + key: sccache-cache-{{ .Environment.CIRCLE_JOB }} + save-cargo-cache: + steps: + - run: + name: Sccache stats + command: sccache --show-stats + - save_cache: + name: Save sccache cache + # We use {{ epoch }} to always upload a fresh cache: + # Of course, restore_cache will not find this exact key, + # but it will fall back to the closest key (aka the most recent). + # See https://discuss.circleci.com/t/add-mechanism-to-update-existing-cache-key/9014/13 + key: sccache-cache-{{ .Environment.CIRCLE_JOB }}-{{ epoch }} + paths: + - "~/.cache/sccache" + - save_cache: + name: Save cargo cache + key: cargo-{{ checksum "Cargo.lock" }}-{{ epoch }} + paths: + - ~/.cargo + restore-buildx-cache: steps: - - checkout - docker-buildx/install: version: 0.8.2 qemu-user-static-version: 7.0.0-7 - restore_cache: + name: Restore buildx cache keys: # Try lock cache first - docker-buildx-{{ checksum "./Cargo.lock" }} @@ -24,54 +67,207 @@ jobs: # Fallback to main cache - docker-buildx-main - run: - command: | - export BUILDX_CACHE=/tmp/cache/buildx - sudo mkdir -p $BUILDX_CACHE && sudo chown -R circleci:circleci $BUILDX_CACHE - make images + name: Configure buildx cache + command: export BUILDX_CACHE=/tmp/cache/buildx + save-buildx-cache: + steps: - save_cache: + name: Save buildx cache paths: - "/tmp/cache/buildx" - key: docker-buildx-{{ checksum "./Cargo.lock" }} + key: docker-buildx-{{ checksum "./Cargo.lock" }}-{{ epoch }} when: always + apply-patches: + steps: - run: + name: Patch service command: | - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y - - run: - name: Run the E2E tests - command: | - mkdir -p ~/.cargo cat\<< EOF > ~/.cargo/config.toml [patch.crates-io] shuttle-service = { path = "$PWD/service" } EOF + install-rust: + steps: + - run: + name: Install Rust + command: | + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + sudo apt update && sudo apt install -y libssl1.1 +jobs: + workspace-fmt: + executor: docker-rust + steps: + - checkout + - restore-cargo-cache + - run: cargo fmt --all --check + - run: cargo install cargo-sort + - run: cargo sort --check --workspace + - run: cargo check --workspace --all-targets + - save-cargo-cache + workspace-clippy: + parameters: + framework: + description: "Framework to activate" + type: string + executor: docker-rust + steps: + - checkout + - restore-cargo-cache + - run: | + cargo clippy --tests \ + --all-targets \ + --features="codegen,loader,sqlx-integration,sqlx-postgres,mongodb-integration,secrets,<< parameters.framework >>" \ + --no-deps -- \ + --D warnings \ + -A clippy::let-unit-value \ + -A clippy::format-push-string + - save-cargo-cache + check-standalone: + parameters: + path: + description: "Path to crate external from workspace" + type: string + executor: docker-rust + steps: + - checkout + - restore-cargo-cache + - apply-patches + - run: cargo fmt --all --check --manifest-path << parameters.path >>/Cargo.toml + - run: cargo install cargo-sort + - run: cargo sort --check << parameters.path >> + - run: | + cargo clippy --tests \ + --all-targets \ + --manifest-path << parameters.path >>/Cargo.toml \ + --no-deps -- \ + --D warnings \ + -A clippy::let-unit-value \ + -A clippy::format-push-string + - save-cargo-cache + service-test: + # Using an image since tests will start a docker container + executor: image-ubuntu + steps: + - install-rust + - checkout + - restore-cargo-cache + - run: + name: Run unit tests + command: cargo test --package shuttle-service --features="codegen,loader,secrets" --lib -- --nocapture + - run: + name: Run integration tests + command: cargo test --package shuttle-service --features="codegen,loader,secrets" --test '*' -- --nocapture + - save-cargo-cache + platform-test: + parameters: + crate: + description: "Crate to test" + type: string + # Using an image since tests will start a docker container + executor: image-ubuntu + steps: + - install-rust + - checkout + - restore-cargo-cache + - apply-patches + - run: + name: Run unit tests + command: cargo test --package << parameters.crate >> --all-features --lib -- --nocapture + - run: + name: Run integration tests + # Only run integration tests if there are any + command: | + set +o pipefail + (cargo test --package << parameters.crate >> --all-features --test '*' -- --list 2>&1 | grep -q "no test target matches pattern") && echo "nothing to test" || cargo test --package << parameters.crate >> --all-features --test '*' -- --nocapture + - save-cargo-cache + e2e-test: + resource_class: xlarge + executor: image-ubuntu + steps: + - install-rust + - checkout + - restore-buildx-cache + - run: + name: Make images for tests + command: | + sudo mkdir -p $BUILDX_CACHE && sudo chown -R circleci:circleci $BUILDX_CACHE + make images + - save-buildx-cache + - apply-patches + - run: + name: Run the E2E tests + command: | make down docker volume create shuttle-backend-vol cd e2e; BUILDX_CACHE=/tmp/cache/buildx SHUTTLE_API_KEY=test-key cargo test -- --nocapture - - when: - condition: - equal: [ main, << pipeline.git.branch >> ] - steps: - - aws-ecr/ecr-login: - aws-access-key-id: AWS_ACCESS_KEY_ID - aws-secret-access-key: AWS_SECRET_ACCESS_KEY - public-registry: true - - run: - command: | - make clean - PUSH=true BUILDX_CACHE=/tmp/cache/buildx PLATFORMS=linux/amd64,linux/arm64 make images - - save_cache: - paths: - - "/tmp/cache/buildx" - key: docker-buildx-{{ checksum "./Cargo.lock" }} - when: always - save_cache: paths: - "/tmp/cache/buildx" key: docker-buildx-{{ .Branch }} when: always + build-and-push: + resource_class: xlarge + executor: image-ubuntu + steps: + - checkout + - restore-buildx-cache + - aws-ecr/ecr-login: + aws-access-key-id: AWS_ACCESS_KEY_ID + aws-secret-access-key: AWS_SECRET_ACCESS_KEY + public-registry: true + - run: + name: Make and push images + command: | + make clean + PUSH=true PLATFORMS=linux/amd64,linux/arm64 make images + - save-buildx-cache + workflows: version: 2 - build-test-and-push: + ci: jobs: - - build-test-and-push + - workspace-fmt + - workspace-clippy: + name: workspace-clippy-<< matrix.framework >> + requires: + - workspace-fmt + matrix: + parameters: + framework: ["web-axum", "web-rocket", "web-poem", "web-tide", "web-tower"] + - check-standalone: + matrix: + parameters: + path: + - examples/axum/hello-world + - examples/axum/websocket + - examples/poem/hello-world + - examples/poem/mongodb + - examples/poem/postgres + - examples/rocket/authentication + - examples/rocket/hello-world + - examples/rocket/postgres + - examples/rocket/url-shortener + - examples/tide/hello-world + - examples/tide/postgres + - examples/tower/hello-world + - service-test: + requires: + - workspace-clippy + - platform-test: + requires: + - workspace-clippy + matrix: + parameters: + crate: ["shuttle-api", "cargo-shuttle", "shuttle-codegen", "shuttle-common", "shuttle-proto", "shuttle-provisioner"] + - e2e-test: + requires: + - service-test + - platform-test + - check-standalone + - build-and-push: + requires: + - e2e-test + filters: + branches: + only: main diff --git a/.github/workflows/fmt.yml b/.github/workflows/fmt.yml deleted file mode 100644 index 490f3bda9..000000000 --- a/.github/workflows/fmt.yml +++ /dev/null @@ -1,62 +0,0 @@ -name: cargo-fmt - -on: - pull_request: - branches: [main] - paths: ["**/*.rs"] - - workflow_dispatch: - -jobs: - fmt_test: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions-rs/toolchain@v1 - with: - toolchain: stable - override: true - components: rustfmt - - run: cargo fmt --all -- --check - cargo_sort_test: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions-rs/toolchain@v1 - with: - toolchain: stable - override: true - components: rustfmt - - run: cargo install cargo-sort - - run: cargo sort --check --workspace - clippy_test: - strategy: - fail-fast: true - matrix: - features: ["web-axum", "web-rocket", "web-tide", "web-tower", web-poem] - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions-rs/toolchain@v1 - with: - toolchain: stable - override: true - components: clippy - - uses: actions/cache@v3 - with: - path: | - ~/.cargo/bin/ - ~/.cargo/registry/index/ - ~/.cargo/registry/cache/ - ~/.cargo/git/db/ - target/ - key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} - - name: Run test with ${{ matrix.features}} - run: | - cargo clippy --tests \ - --all-targets \ - --features="codegen,loader,sqlx-integration,sqlx-postgres,mongodb-integration,secrets,${{ matrix.features }}" \ - --no-deps -- \ - --D warnings \ - -A clippy::let-unit-value \ - -A clippy::format-push-string diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml deleted file mode 100644 index 54a517261..000000000 --- a/.github/workflows/test.yml +++ /dev/null @@ -1,62 +0,0 @@ -name: cargo-test - -on: - push: - branches: [main] - paths: ["**/*.rs", "**/Cargo.toml", "**/Cargo.lock"] - pull_request: - branches: [main] - paths: ["**/*.rs", "**/Cargo.toml", "**/Cargo.lock"] - workflow_dispatch: - -jobs: - service_tests: - runs-on: ubuntu-latest - strategy: - matrix: - features: ["web-axum", "web-rocket", "web-tide", "web-tower", "web-poem"] - steps: - - uses: actions/checkout@v2 - - uses: actions-rs/toolchain@v1 - with: - toolchain: stable - override: true - - uses: actions/cache@v3 - with: - path: | - ~/.cargo/bin/ - ~/.cargo/registry/index/ - ~/.cargo/registry/cache/ - ~/.cargo/git/db/ - **/target/ - key: ${{ runner.os }}-cargo-${{ hashFiles('./Cargo.lock') }} - - name: cargo test --package shuttle-service - run: cargo test --package shuttle-service --features="codegen,loader,secrets,${{ matrix.features }}" - platform_tests: - strategy: - matrix: - crate: ["shuttle-provisioner", "shuttle-api", "cargo-shuttle", "shuttle-common"] - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions-rs/toolchain@v1 - with: - toolchain: stable - override: true - - uses: actions/cache@v3 - with: - path: | - ~/.cargo/bin/ - ~/.cargo/registry/index/ - ~/.cargo/registry/cache/ - ~/.cargo/git/db/ - **/target/ - key: ${{ runner.os }}-cargo-${{ hashFiles('./Cargo.lock') }} - - name: Patch shuttle-service - run: | - cat< ~/.cargo/config.toml - [patch.crates-io] - shuttle-service = { path = "$PWD/service" } - EOF - - name: cargo test ${{ matrix.crate }} --all-features - run: cargo test --package ${{ matrix.crate }} --all-features -- --nocapture diff --git a/Cargo.lock b/Cargo.lock index 81e7b9ba2..8680d84e5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -240,15 +240,6 @@ dependencies = [ "event-listener", ] -[[package]] -name = "async-mutex" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479db852db25d9dbf6204e6cb6253698f175c15726470f78af0d918e99d6156e" -dependencies = [ - "event-listener", -] - [[package]] name = "async-process" version = "1.4.0" @@ -4603,7 +4594,6 @@ name = "shuttle-api" version = "0.4.2" dependencies = [ "anyhow", - "async-mutex", "async-trait", "base64 0.13.0", "cargo", diff --git a/README.md b/README.md index 66215c65c..de65ce8fc 100644 --- a/README.md +++ b/README.md @@ -9,8 +9,8 @@ language - - build status + + build status discord diff --git a/api/Cargo.toml b/api/Cargo.toml index a82c81fff..c3e0eb39b 100644 --- a/api/Cargo.toml +++ b/api/Cargo.toml @@ -6,7 +6,6 @@ publish = false [dependencies] anyhow = "1.0.57" -async-mutex = "1.4.0" async-trait = "0.1.56" base64 = "0.13.0" cargo = "0.62.0" diff --git a/api/src/lib.rs b/api/src/lib.rs new file mode 100644 index 000000000..2eb89c91e --- /dev/null +++ b/api/src/lib.rs @@ -0,0 +1,229 @@ +#[macro_use] +extern crate rocket; + +#[macro_use] +extern crate log; + +mod args; +mod auth; +mod auth_admin; +mod build; +mod deployment; +mod factory; +mod proxy; +mod router; + +use std::collections::HashMap; +use std::net::IpAddr; +use std::sync::Arc; + +use auth_admin::Admin; +use clap::Parser; +pub use deployment::MAX_DEPLOYS; +use factory::ShuttleFactory; +use rocket::serde::json::Json; +use rocket::{tokio, Build, Data, Rocket, State}; +use shuttle_common::project::ProjectName; +use shuttle_common::{DeploymentApiError, DeploymentMeta, Port}; +use shuttle_service::SecretStore; +use uuid::Uuid; + +use crate::args::Args; +use crate::auth::{ApiKey, AuthorizationError, ScopedUser, User, UserDirectory}; +use crate::build::{BuildSystem, FsBuildSystem}; +use crate::deployment::DeploymentSystem; + +type ApiResult = Result, E>; + +/// Find user by username and return it's API Key. +/// if user does not exist create it and update `users` state to `users.toml`. +/// Finally return user's API Key. +#[post("/users/")] +async fn get_or_create_user( + user_directory: &State, + username: String, + _admin: Admin, +) -> Result { + user_directory.get_or_create(username) +} + +/// Status API to be used to check if the service is alive +#[get("/status")] +async fn status() -> String { + String::from("Ok") +} + +#[get("/version")] +async fn version() -> String { + String::from(shuttle_service::VERSION) +} + +#[get("/<_>/deployments/")] +async fn get_deployment( + state: &State, + id: Uuid, + _user: ScopedUser, +) -> ApiResult { + info!("[GET_DEPLOYMENT, {}, {}]", _user.name(), _user.scope()); + let deployment = state.deployment_manager.get_deployment(&id).await?; + Ok(Json(deployment)) +} + +#[delete("/<_>/deployments/")] +async fn delete_deployment( + state: &State, + id: Uuid, + _user: ScopedUser, +) -> ApiResult { + info!("[DELETE_DEPLOYMENT, {}, {}]", _user.name(), _user.scope()); + // TODO why twice? + let _deployment = state.deployment_manager.get_deployment(&id).await?; + let deployment = state.deployment_manager.kill_deployment(&id).await?; + Ok(Json(deployment)) +} + +#[get("/<_>")] +async fn get_project( + state: &State, + user: ScopedUser, +) -> ApiResult { + info!("[GET_PROJECT, {}, {}]", user.name(), user.scope()); + + let deployment = state + .deployment_manager + .get_deployment_for_project(user.scope()) + .await?; + + Ok(Json(deployment)) +} + +#[delete("/<_>")] +async fn delete_project( + state: &State, + user: ScopedUser, +) -> ApiResult { + info!("[DELETE_PROJECT, {}, {}]", user.name(), user.scope()); + + let deployment = state + .deployment_manager + .kill_deployment_for_project(user.scope()) + .await?; + Ok(Json(deployment)) +} + +#[post("/", data = "")] +async fn create_project( + state: &State, + user_directory: &State, + crate_file: Data<'_>, + project_name: ProjectName, + user: User, +) -> ApiResult { + info!("[CREATE_PROJECT, {}, {}]", &user.name, &project_name); + + if !user + .projects + .iter() + .any(|my_project| *my_project == project_name) + { + user_directory.create_project_if_not_exists(&user.name, &project_name)?; + } + let deployment = state + .deployment_manager + .deploy(crate_file, project_name) + .await?; + Ok(Json(deployment)) +} + +#[post("//secrets", data = "")] +async fn project_secrets( + state: &State, + secrets: Json>, + project_name: ProjectName, + user: ScopedUser, +) -> ApiResult { + info!("[PROJECT_SECRETS, {}, {}]", user.name(), &project_name); + + let deployment = state + .deployment_manager + .get_deployment_for_project(user.scope()) + .await?; + + if let Some(database_deployment) = &deployment.database_deployment { + let conn_str = database_deployment.connection_string_private(); + let conn = sqlx::PgPool::connect(&conn_str) + .await + .map_err(|e| DeploymentApiError::Internal(e.to_string()))?; + + let map = secrets.into_inner(); + for (key, value) in map.iter() { + conn.set_secret(key, value) + .await + .map_err(|e| DeploymentApiError::BadRequest(e.to_string()))?; + } + } + + Ok(Json(deployment)) +} + +struct ApiState { + deployment_manager: Arc, +} + +//noinspection ALL +pub async fn rocket() -> Rocket { + env_logger::Builder::new() + .filter_module("rocket", log::LevelFilter::Warn) + .filter_module("_", log::LevelFilter::Warn) + .filter_module("shuttle_api", log::LevelFilter::Debug) + .filter_module("shuttle_service", log::LevelFilter::Debug) + .init(); + + let args: Args = Args::parse(); + let build_system = FsBuildSystem::initialise(args.path).unwrap(); + let deployment_manager = Arc::new( + DeploymentSystem::new( + Box::new(build_system), + args.proxy_fqdn.to_string(), + args.provisioner_address, + args.provisioner_port, + ) + .await, + ); + + start_proxy(args.bind_addr, args.proxy_port, deployment_manager.clone()).await; + + let state = ApiState { deployment_manager }; + + let user_directory = + UserDirectory::from_user_file().expect("could not initialise user directory"); + + let config = rocket::Config { + address: args.bind_addr, + port: args.api_port, + ..Default::default() + }; + rocket::custom(config) + .mount( + "/projects", + routes![ + delete_deployment, + get_deployment, + delete_project, + create_project, + get_project, + project_secrets + ], + ) + .mount("/", routes![get_or_create_user, status, version]) + .manage(state) + .manage(user_directory) +} + +async fn start_proxy( + bind_addr: IpAddr, + proxy_port: Port, + deployment_manager: Arc, +) { + tokio::spawn(async move { proxy::start(bind_addr, proxy_port, deployment_manager).await }); +} diff --git a/api/src/main.rs b/api/src/main.rs index 8c6ca2b8c..3599575a0 100644 --- a/api/src/main.rs +++ b/api/src/main.rs @@ -1,174 +1,4 @@ -#[macro_use] -extern crate rocket; - -#[macro_use] -extern crate log; - -mod args; -mod auth; -mod auth_admin; -mod build; -mod deployment; -mod factory; -mod proxy; -mod router; - -use std::collections::HashMap; -use std::net::IpAddr; -use std::sync::Arc; - -use auth_admin::Admin; -use clap::Parser; -use deployment::MAX_DEPLOYS; -use factory::ShuttleFactory; -use rocket::serde::json::Json; -use rocket::{tokio, Build, Data, Rocket, State}; -use shuttle_common::project::ProjectName; -use shuttle_common::{DeploymentApiError, DeploymentMeta, Port}; -use shuttle_service::SecretStore; -use uuid::Uuid; - -use crate::args::Args; -use crate::auth::{ApiKey, AuthorizationError, ScopedUser, User, UserDirectory}; -use crate::build::{BuildSystem, FsBuildSystem}; -use crate::deployment::DeploymentSystem; - -type ApiResult = Result, E>; - -/// Find user by username and return it's API Key. -/// if user does not exist create it and update `users` state to `users.toml`. -/// Finally return user's API Key. -#[post("/users/")] -async fn get_or_create_user( - user_directory: &State, - username: String, - _admin: Admin, -) -> Result { - user_directory.get_or_create(username) -} - -/// Status API to be used to check if the service is alive -#[get("/status")] -async fn status() -> String { - String::from("Ok") -} - -#[get("/version")] -async fn version() -> String { - String::from(shuttle_service::VERSION) -} - -#[get("/<_>/deployments/")] -async fn get_deployment( - state: &State, - id: Uuid, - _user: ScopedUser, -) -> ApiResult { - info!("[GET_DEPLOYMENT, {}, {}]", _user.name(), _user.scope()); - let deployment = state.deployment_manager.get_deployment(&id).await?; - Ok(Json(deployment)) -} - -#[delete("/<_>/deployments/")] -async fn delete_deployment( - state: &State, - id: Uuid, - _user: ScopedUser, -) -> ApiResult { - info!("[DELETE_DEPLOYMENT, {}, {}]", _user.name(), _user.scope()); - // TODO why twice? - let _deployment = state.deployment_manager.get_deployment(&id).await?; - let deployment = state.deployment_manager.kill_deployment(&id).await?; - Ok(Json(deployment)) -} - -#[get("/<_>")] -async fn get_project( - state: &State, - user: ScopedUser, -) -> ApiResult { - info!("[GET_PROJECT, {}, {}]", user.name(), user.scope()); - - let deployment = state - .deployment_manager - .get_deployment_for_project(user.scope()) - .await?; - - Ok(Json(deployment)) -} - -#[delete("/<_>")] -async fn delete_project( - state: &State, - user: ScopedUser, -) -> ApiResult { - info!("[DELETE_PROJECT, {}, {}]", user.name(), user.scope()); - - let deployment = state - .deployment_manager - .kill_deployment_for_project(user.scope()) - .await?; - Ok(Json(deployment)) -} - -#[post("/", data = "")] -async fn create_project( - state: &State, - user_directory: &State, - crate_file: Data<'_>, - project_name: ProjectName, - user: User, -) -> ApiResult { - info!("[CREATE_PROJECT, {}, {}]", &user.name, &project_name); - - if !user - .projects - .iter() - .any(|my_project| *my_project == project_name) - { - user_directory.create_project_if_not_exists(&user.name, &project_name)?; - } - let deployment = state - .deployment_manager - .deploy(crate_file, project_name) - .await?; - Ok(Json(deployment)) -} - -#[post("//secrets", data = "")] -async fn project_secrets( - state: &State, - secrets: Json>, - project_name: ProjectName, - user: ScopedUser, -) -> ApiResult { - info!("[PROJECT_SECRETS, {}, {}]", user.name(), &project_name); - - let deployment = state - .deployment_manager - .get_deployment_for_project(user.scope()) - .await?; - - if let Some(database_deployment) = &deployment.database_deployment { - let conn_str = database_deployment.connection_string_private(); - let conn = sqlx::PgPool::connect(&conn_str) - .await - .map_err(|e| DeploymentApiError::Internal(e.to_string()))?; - - let map = secrets.into_inner(); - for (key, value) in map.iter() { - conn.set_secret(key, value) - .await - .map_err(|e| DeploymentApiError::BadRequest(e.to_string()))?; - } - } - - Ok(Json(deployment)) -} - -struct ApiState { - deployment_manager: Arc, -} +use shuttle_api::{rocket, MAX_DEPLOYS}; fn main() -> Result<(), Box> { tokio::runtime::Builder::new_multi_thread() @@ -182,61 +12,3 @@ fn main() -> Result<(), Box> { Ok(()) }) } - -//noinspection ALL -async fn rocket() -> Rocket { - env_logger::Builder::new() - .filter_module("rocket", log::LevelFilter::Warn) - .filter_module("_", log::LevelFilter::Warn) - .filter_module("shuttle_api", log::LevelFilter::Debug) - .filter_module("shuttle_service", log::LevelFilter::Debug) - .init(); - - let args: Args = Args::parse(); - let build_system = FsBuildSystem::initialise(args.path).unwrap(); - let deployment_manager = Arc::new( - DeploymentSystem::new( - Box::new(build_system), - args.proxy_fqdn.to_string(), - args.provisioner_address, - args.provisioner_port, - ) - .await, - ); - - start_proxy(args.bind_addr, args.proxy_port, deployment_manager.clone()).await; - - let state = ApiState { deployment_manager }; - - let user_directory = - UserDirectory::from_user_file().expect("could not initialise user directory"); - - let config = rocket::Config { - address: args.bind_addr, - port: args.api_port, - ..Default::default() - }; - rocket::custom(config) - .mount( - "/projects", - routes![ - delete_deployment, - get_deployment, - delete_project, - create_project, - get_project, - project_secrets - ], - ) - .mount("/", routes![get_or_create_user, status, version]) - .manage(state) - .manage(user_directory) -} - -async fn start_proxy( - bind_addr: IpAddr, - proxy_port: Port, - deployment_manager: Arc, -) { - tokio::spawn(async move { proxy::start(bind_addr, proxy_port, deployment_manager).await }); -} diff --git a/cargo-shuttle/tests/integration/run.rs b/cargo-shuttle/tests/integration/run.rs index 8a8592dba..08f67727c 100644 --- a/cargo-shuttle/tests/integration/run.rs +++ b/cargo-shuttle/tests/integration/run.rs @@ -23,12 +23,14 @@ async fn cargo_shuttle_run(working_directory: &str) -> u16 { cmd: Command::Run(run_args), }); + let working_directory_clone = working_directory.clone(); + tokio::spawn(async move { sleep(Duration::from_secs(600)).await; println!( "run test for '{}' took too long. Did it fail to shutdown?", - working_directory.display() + working_directory_clone.display() ); exit(1); }); @@ -42,6 +44,10 @@ async fn cargo_shuttle_run(working_directory: &str) -> u16 { .await) .is_err() { + println!( + "waiting for '{}' to start up...", + working_directory.display() + ); sleep(Duration::from_millis(350)).await; } diff --git a/examples/poem/postgres/Cargo.toml b/examples/poem/postgres/Cargo.toml index 90bf4b7c5..e38b6c608 100644 --- a/examples/poem/postgres/Cargo.toml +++ b/examples/poem/postgres/Cargo.toml @@ -8,5 +8,5 @@ edition = "2021" [dependencies] poem = "1.3.35" serde = "1.0" -sqlx = { version = "0.5", features = ["runtime-tokio-native-tls", "postgres"] } shuttle-service = { version = "0.4.2", features = ["sqlx-postgres", "secrets", "web-poem"] } +sqlx = { version = "0.5", features = ["runtime-tokio-native-tls", "postgres"] } diff --git a/examples/rocket/authentication/Cargo.toml b/examples/rocket/authentication/Cargo.toml index ba776d767..b20dda60a 100644 --- a/examples/rocket/authentication/Cargo.toml +++ b/examples/rocket/authentication/Cargo.toml @@ -7,8 +7,8 @@ edition = "2021" [dependencies] chrono = "0.4" -jsonwebtoken = {version = "8", default-features = false } +jsonwebtoken = { version = "8", default-features = false } lazy_static = "1.4" -rocket = {version = "0.5.0-rc.1", features = ["json"] } -serde = {version = "1.0", features = ["derive"] } +rocket = { version = "0.5.0-rc.1", features = ["json"] } +serde = { version = "1.0", features = ["derive"] } shuttle-service = { version = "0.4.2", features = ["web-rocket"] } diff --git a/examples/rocket/authentication/src/claims.rs b/examples/rocket/authentication/src/claims.rs index 90a5b14d9..878d7d40d 100644 --- a/examples/rocket/authentication/src/claims.rs +++ b/examples/rocket/authentication/src/claims.rs @@ -91,7 +91,7 @@ impl Claims { } /// Converts this claims into a token string - pub(crate) fn to_token(mut self) -> Result> { + pub(crate) fn into_token(mut self) -> Result> { let expiration = Utc::now() .checked_add_signed(*TOKEN_EXPIRATION) .expect("failed to create an expiration time") @@ -128,7 +128,7 @@ mod tests { #[test] fn to_token_and_back() { let claim = Claims::from_name("test runner"); - let token = claim.to_token().unwrap(); + let token = claim.into_token().unwrap(); let token = format!("Bearer {token}"); let claim = Claims::from_authorization(&token).unwrap(); diff --git a/examples/rocket/authentication/src/lib.rs b/examples/rocket/authentication/src/lib.rs index ec144bd7e..5fb1b103e 100644 --- a/examples/rocket/authentication/src/lib.rs +++ b/examples/rocket/authentication/src/lib.rs @@ -63,7 +63,7 @@ fn login(login: Json) -> Result, Custom