Skip to content

Commit

Permalink
fix: clippy and fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
varex83 committed Dec 12, 2024
1 parent f630b05 commit 431907d
Show file tree
Hide file tree
Showing 8 changed files with 77 additions and 61 deletions.
56 changes: 33 additions & 23 deletions api/src/handlers/compile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,14 @@ use crate::errors::{ApiError, Result};
use crate::handlers::process::{do_process_command, fetch_process_result};
use crate::handlers::types::{ApiCommand, ApiCommandResult};
use crate::handlers::types::{FileContentMap, ScarbCompileResponse};
use crate::handlers::utils::{do_metered_action, get_files_recursive, init_directories, AutoCleanUp};
use crate::handlers::utils::{get_files_recursive, init_directories, AutoCleanUp};
use crate::handlers::{STATUS_COMPILATION_FAILED, STATUS_SUCCESS, STATUS_UNKNOWN_ERROR};
use crate::metrics::{Metrics, COMPILATION_LABEL_VALUE};
use crate::metrics::Metrics;
use crate::rate_limiter::RateLimited;
use crate::worker::WorkerEngine;
use rocket::serde::json;
use rocket::serde::json::Json;
use rocket::{tokio, State};
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use std::process::{Command, Stdio};
use tracing::instrument;
Expand All @@ -37,7 +36,12 @@ pub async fn compile_async(
engine: &State<WorkerEngine>,
) -> String {
tracing::info!("/compile/{:?}", request_json.0.file_names());
do_process_command(ApiCommand::Compile { compilation_request: request_json.0 }, engine)
do_process_command(
ApiCommand::Compile {
compilation_request: request_json.0,
},
engine,
)
}

#[instrument(skip(engine))]
Expand All @@ -54,12 +58,19 @@ pub async fn get_compile_result(process_id: &str, engine: &State<WorkerEngine>)
})
}

async fn ensure_scarb_toml(mut compilation_request: CompilationRequest) -> Result<CompilationRequest> {

async fn ensure_scarb_toml(
mut compilation_request: CompilationRequest,
) -> Result<CompilationRequest> {
// Check if Scarb.toml exists in the root
if !compilation_request.has_scarb_toml() {
// number of files cairo files in the request
if compilation_request.files.iter().filter(|f| f.file_name.ends_with(".cairo")).count() != 1 {
if compilation_request
.files
.iter()
.filter(|f| f.file_name.ends_with(".cairo"))
.count()
!= 1
{
return Err(ApiError::InvalidRequest);
}

Expand All @@ -70,7 +81,11 @@ async fn ensure_scarb_toml(mut compilation_request: CompilationRequest) -> Resul
});

// change the name of the file to the first cairo file to src/lib.cairo
let first_cairo_file = compilation_request.files.iter_mut().find(|f| f.file_name.ends_with(".cairo")).unwrap();
let first_cairo_file = compilation_request
.files
.iter_mut()
.find(|f| f.file_name.ends_with(".cairo"))
.unwrap();
first_cairo_file.file_name = "src/lib.cairo".to_string();
}

Expand Down Expand Up @@ -99,7 +114,6 @@ pub async fn do_compile(
dirs: vec![&temp_dir],
};


let mut compile = Command::new("scarb");
compile
.current_dir(&temp_dir)
Expand All @@ -109,18 +123,15 @@ pub async fn do_compile(

tracing::debug!("Executing scarb command: {:?}", compile);

let result = tokio::time::timeout(
std::time::Duration::from_secs(300),
async {
compile
.spawn()
.map_err(ApiError::FailedToExecuteCommand)?
.wait_with_output()
.map_err(ApiError::FailedToReadOutput)
},
)
.await
.map_err(|_| ApiError::CompilationTimeout)??;
let result = tokio::time::timeout(std::time::Duration::from_secs(300), async {
compile
.spawn()
.map_err(ApiError::FailedToExecuteCommand)?
.wait_with_output()
.map_err(ApiError::FailedToReadOutput)
})
.await
.map_err(|_| ApiError::CompilationTimeout)??;

let file_content_map_array = get_files_recursive(&PathBuf::from(&temp_dir).join("target/dev"))?;

Expand All @@ -136,7 +147,7 @@ pub async fn do_compile(
Some(_) => STATUS_COMPILATION_FAILED,
None => STATUS_UNKNOWN_ERROR,
}
.to_string();
.to_string();

auto_clean_up.clean_up().await;

Expand All @@ -146,4 +157,3 @@ pub async fn do_compile(
status,
}))
}

4 changes: 2 additions & 2 deletions api/src/handlers/mod.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
pub mod scarb_version;
pub mod compile;
pub mod process;
pub mod scarb_test;
pub mod scarb_version;
pub mod types;
pub mod utils;
pub mod compile;

use tracing::info;
use tracing::instrument;
Expand Down
8 changes: 2 additions & 6 deletions api/src/handlers/process.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,16 +47,12 @@ pub fn do_process_command(command: ApiCommand, engine: &State<WorkerEngine>) ->
}
}

pub fn fetch_process_result<F>(
process_id: &str,
engine: &State<WorkerEngine>,
do_work: F,
) -> String
pub fn fetch_process_result<F>(process_id: &str, engine: &State<WorkerEngine>, do_work: F) -> String
where
F: FnOnce(Result<&ApiCommandResult, &ApiError>) -> String,
{
// get status of process by ID
match Uuid::parse_str(&process_id) {
match Uuid::parse_str(process_id) {
Ok(process_uuid) => {
if engine.arc_process_states.contains_key(&process_uuid) {
match engine
Expand Down
18 changes: 9 additions & 9 deletions api/src/handlers/scarb_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,21 +71,21 @@ pub async fn do_scarb_test(remix_file_path: PathBuf) -> Result<Json<ScarbTestRes
&remix_file_path,
)
+ &String::from_utf8(output.stderr)
.map_err(ApiError::UTF8Error)?
.replace(
&file_path
.to_str()
.ok_or(ApiError::FailedToParseString)?
.to_string(),
&remix_file_path,
);
.map_err(ApiError::UTF8Error)?
.replace(
&file_path
.to_str()
.ok_or(ApiError::FailedToParseString)?
.to_string(),
&remix_file_path,
);

let status = match output.status.code() {
Some(0) => "Success",
Some(_) => "SierraCompilationFailed",
None => "UnknownError",
}
.to_string();
.to_string();

Ok(Json(ScarbTestResponse { message, status }))
}
9 changes: 4 additions & 5 deletions api/src/handlers/scarb_version.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
use rocket::tokio::fs::read_dir;
use rocket::State;
use std::path::Path;
use std::process::{Command, Stdio};
use tracing::{error, info, instrument};

use crate::errors::{ApiError, Result};
use crate::handlers::process::{do_process_command, fetch_process_result};
use crate::handlers::types::{ApiCommand, ApiCommandResult};
use crate::rate_limiter::RateLimited;
use crate::utils::lib::{CAIRO_COMPILERS_DIR, DEFAULT_CAIRO_DIR};
use crate::worker::WorkerEngine;

#[instrument(skip(engine, _rate_limited))]
Expand Down Expand Up @@ -36,14 +33,16 @@ pub async fn get_scarb_version_result(process_id: &str, engine: &State<WorkerEng
/// ## Note
/// (default Cairo version will be used)
pub fn do_cairo_version() -> Result<String> {
let mut version_caller = Command::new("scarb")
let version_caller = Command::new("scarb")
.arg("--version")
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
.map_err(ApiError::FailedToExecuteCommand)?;

let output = version_caller.wait_with_output().map_err(ApiError::FailedToReadOutput)?;
let output = version_caller
.wait_with_output()
.map_err(ApiError::FailedToReadOutput)?;

if output.status.success() {
let result = String::from_utf8_lossy(&output.stdout).to_string();
Expand Down
32 changes: 23 additions & 9 deletions api/src/handlers/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,12 @@ use crate::errors::{ApiError, Result};
use crate::metrics::{Metrics, COMPILATION_LABEL_VALUE};

use super::types::{CompilationRequest, FileContentMap, Successful};
use super::{compile::do_compile, scarb_test::do_scarb_test, scarb_version::do_cairo_version, types::{ApiCommand, ApiCommandResult}};
use super::{
compile::do_compile,
scarb_test::do_scarb_test,
scarb_version::do_cairo_version,
types::{ApiCommand, ApiCommandResult},
};

#[instrument]
#[post("/on-plugin-launched")]
Expand All @@ -18,7 +23,7 @@ pub async fn on_plugin_launched() {
}

pub(crate) async fn do_metered_action<T: Successful>(
action: impl Future<Output=Result<Json<T>>>,
action: impl Future<Output = Result<Json<T>>>,
action_label_value: &str,
metrics: &Metrics,
) -> Result<Json<T>> {
Expand Down Expand Up @@ -101,7 +106,6 @@ impl AutoCleanUp<'_> {
}
}


pub async fn dispatch_command(command: ApiCommand, metrics: &Metrics) -> Result<ApiCommandResult> {
match command {
ApiCommand::ScarbVersion => match do_cairo_version() {
Expand All @@ -113,12 +117,14 @@ pub async fn dispatch_command(command: ApiCommand, metrics: &Metrics) -> Result<
Ok(result) => Ok(ApiCommandResult::ScarbTest(result.into_inner())),
Err(e) => Err(e),
},
ApiCommand::Compile { compilation_request } => match do_metered_action(
do_compile(compilation_request, &metrics),
ApiCommand::Compile {
compilation_request,
} => match do_metered_action(
do_compile(compilation_request, metrics),
COMPILATION_LABEL_VALUE,
metrics,
)
.await
.await
{
Ok(result) => Ok(ApiCommandResult::Compile(result.into_inner())),
Err(e) => Err(e),
Expand All @@ -139,7 +145,10 @@ pub async fn create_temp_dir() -> Result<PathBuf> {
}

pub async fn init_directories(compilation_request: CompilationRequest) -> Result<String> {
println!("init_directories, compilation_request: {:?}", compilation_request);
println!(
"init_directories, compilation_request: {:?}",
compilation_request
);

let temp_dir = create_temp_dir().await?;

Expand All @@ -160,11 +169,16 @@ pub async fn init_directories(compilation_request: CompilationRequest) -> Result
println!("init_directories, temp_dir: {:?}", temp_dir);

// check the path content
println!("init_directories, temp_dir content: {:?}", tokio::fs::read_dir(&temp_dir).await);
println!(
"init_directories, temp_dir content: {:?}",
tokio::fs::read_dir(&temp_dir).await
);

temp_dir
.to_str()
.ok_or_else(|| ApiError::FailedToInitializeDirectories("Failed to convert path to string".to_string()))
.ok_or_else(|| {
ApiError::FailedToInitializeDirectories("Failed to convert path to string".to_string())
})
.map(|s| s.to_string())
}

Expand Down
4 changes: 1 addition & 3 deletions api/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,7 @@ use anyhow::Context;
use handlers::compile::{compile_async, get_compile_result};
use handlers::process::get_process_status;
use handlers::scarb_test::{get_scarb_test_result, scarb_test_async};
use handlers::scarb_version::{
get_scarb_version_result, scarb_version_async,
};
use handlers::scarb_version::{get_scarb_version_result, scarb_version_async};
use handlers::utils::on_plugin_launched;
use handlers::{health, who_is_this};
use prometheus::Registry;
Expand Down
7 changes: 3 additions & 4 deletions api/src/worker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ impl WorkerEngine {
arc_process_states,
process_timestamps_to_purge,
)
.await;
.await;
})));
}
}
Expand All @@ -125,7 +125,7 @@ impl WorkerEngine {
arc_process_states,
process_timestamps_to_purge,
)
.await;
.await;
})));
}

Expand Down Expand Up @@ -213,7 +213,7 @@ impl WorkerEngine {
match dispatch_command(command, &metrics).await {
Ok(result) => {
println!("Command completed: {:?}", result);

arc_process_states
.insert(process_id, ProcessState::Completed(result));

Expand All @@ -229,7 +229,6 @@ impl WorkerEngine {

arc_process_states.insert(process_id, ProcessState::Error(e));


arc_timestamps_to_purge
.push((
process_id,
Expand Down

0 comments on commit 431907d

Please sign in to comment.