Skip to content

Commit

Permalink
refactor: API to have a consistent response format
Browse files Browse the repository at this point in the history
  • Loading branch information
varex83 committed Dec 16, 2024
1 parent 895ea92 commit 9789c3c
Show file tree
Hide file tree
Showing 8 changed files with 254 additions and 146 deletions.
8 changes: 8 additions & 0 deletions api/src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,14 @@ pub enum ApiError {
InvalidRequest,
#[error("Version not allowed")]
VersionNotAllowed,
#[error("Failed to get Scarb version: {0}")]
ScarbVersionNotFound(String),
#[error("Not found")]
NotFound(String),
#[error("Failed to fetch releases")]
FailedToFetchReleases(reqwest::Error),
#[error("Failed to parse releases")]
FailedToParseReleases(reqwest::Error),
}

pub type Result<T, E = ApiError> = std::result::Result<T, E>;
36 changes: 19 additions & 17 deletions api/src/handlers/allowed_versions.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
use crate::errors::Result;
use crate::errors::{ApiError, Result};
use lazy_static::lazy_static;
use rocket::http::Status;
use rocket::serde::json::Json;
use semver::Version;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::sync::RwLock;
use std::time::SystemTime;
use tracing::instrument;

use super::types::ApiResponse;

#[derive(Debug, Deserialize)]
struct GitHubRelease {
tag_name: String,
Expand All @@ -27,15 +27,17 @@ lazy_static! {
});
}

async fn fetch_github_releases() -> Result<Vec<String>, Box<dyn std::error::Error>> {
async fn fetch_github_releases() -> Result<Vec<String>> {
let client = reqwest::Client::new();
let releases: Vec<GitHubRelease> = client
.get("https://api.github.com/repos/starkware-libs/cairo/releases")
.header("User-Agent", "starknet-remix-plugin")
.send()
.await?
.await
.map_err(ApiError::FailedToFetchReleases)?
.json()
.await?;
.await
.map_err(ApiError::FailedToParseReleases)?;

let mut version_map: HashMap<(u64, u64), Version> = HashMap::new();

Expand Down Expand Up @@ -90,19 +92,19 @@ pub async fn start_version_updater() {

#[instrument]
#[get("/allowed-versions")]
pub async fn get_allowed_versions() -> (Status, Json<Vec<String>>) {
match do_get_allowed_versions().await {
Ok(versions) => (Status::Ok, versions),
Err(e) => (Status::InternalServerError, Json(vec![e.to_string()])),
}
pub async fn get_allowed_versions() -> ApiResponse<Vec<String>> {
do_get_allowed_versions().await
}

pub async fn is_version_allowed(version: &str) -> bool {
let allowed_versions = do_get_allowed_versions().await.unwrap_or(Json(vec![]));
allowed_versions.contains(&version.to_string())
let allowed_versions = do_get_allowed_versions().await;
allowed_versions
.data
.unwrap_or_default()
.contains(&version.to_string())
}

pub async fn do_get_allowed_versions() -> Result<Json<Vec<String>>> {
pub async fn do_get_allowed_versions() -> ApiResponse<Vec<String>> {
let should_fetch = {
let cache = CACHED_VERSIONS.read().unwrap();
cache.versions.is_empty()
Expand All @@ -113,11 +115,11 @@ pub async fn do_get_allowed_versions() -> Result<Json<Vec<String>>> {
let mut cache = CACHED_VERSIONS.write().unwrap();
cache.versions = versions;
cache.last_updated = SystemTime::now();
return Ok(Json(cache.versions.clone()));
return ApiResponse::ok(cache.versions.clone());
}
return Ok(Json(vec![]));
return ApiResponse::ok(vec![]);
}

let cache = CACHED_VERSIONS.read().unwrap();
Ok(Json(cache.versions.clone()))
ApiResponse::ok(cache.versions.clone())
}
41 changes: 15 additions & 26 deletions api/src/handlers/compile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,19 @@ use crate::errors::{ApiError, Result};
use crate::handlers::allowed_versions::is_version_allowed;
use crate::handlers::process::{do_process_command, fetch_process_result};
use crate::handlers::types::{ApiCommand, ApiCommandResult};
use crate::handlers::types::{FileContentMap, ScarbCompileResponse};
use crate::handlers::types::{CompileResponse, FileContentMap};
use crate::handlers::utils::{get_files_recursive, init_directories, AutoCleanUp};
use crate::handlers::{STATUS_COMPILATION_FAILED, STATUS_SUCCESS, STATUS_UNKNOWN_ERROR};
use crate::metrics::Metrics;
use crate::rate_limiter::RateLimited;
use crate::worker::WorkerEngine;
use rocket::http::Status;
use rocket::serde::json;
use rocket::serde::json::Json;
use rocket::{tokio, State};
use std::path::PathBuf;
use std::process::{Command, Stdio};
use tracing::instrument;

use super::types::CompilationRequest;
use super::types::{ApiResponse, CompilationRequest};

pub fn scarb_toml_with_version(version: &str) -> String {
format!(
Expand Down Expand Up @@ -45,7 +43,7 @@ pub async fn compile_async(
request_json: Json<CompilationRequest>,
_rate_limited: RateLimited,
engine: &State<WorkerEngine>,
) -> String {
) -> ApiResponse<String> {
tracing::info!("/compile/{:?}", request_json.0.file_names());
do_process_command(
ApiCommand::Compile {
Expand All @@ -56,26 +54,18 @@ pub async fn compile_async(
}

#[instrument(skip(engine))]
#[get("/compile-result/<process_id>")]
#[get("/compile-async/<process_id>")]
pub async fn get_compile_result(
process_id: &str,
engine: &State<WorkerEngine>,
) -> (Status, String) {
) -> ApiResponse<CompileResponse> {
tracing::info!("/compile-result/{:?}", process_id);
fetch_process_result(process_id, engine, |result| match result {
Ok(ApiCommandResult::Compile(compile_result)) => (
Status::Ok,
json::to_string(&compile_result)
.unwrap_or_else(|e| format!("Failed to fetch result: {:?}", e)),
),
Err(err) => (
Status::InternalServerError,
format!("Failed to fetch result: {:?}", err),
),
_ => (
Status::InternalServerError,
"Result is not available".to_string(),
),
Ok(ApiCommandResult::Compile(compile_result)) => ApiResponse::ok(compile_result.clone()),
Err(err) => {
ApiResponse::internal_server_error(format!("Failed to fetch result: {:?}", err))
}
_ => ApiResponse::internal_server_error("Result is not available".to_string()),
})
}

Expand Down Expand Up @@ -127,7 +117,7 @@ async fn ensure_scarb_toml(
pub async fn do_compile(
compilation_request: CompilationRequest,
_metrics: &Metrics,
) -> Result<Json<ScarbCompileResponse>> {
) -> Result<CompileResponse> {
// Verify version is in the allowed versions
if !is_version_allowed(compilation_request.version.as_deref().unwrap_or("")).await {
return Err(ApiError::VersionNotAllowed);
Expand Down Expand Up @@ -180,9 +170,8 @@ pub async fn do_compile(

auto_clean_up.clean_up().await;

Ok(Json(ScarbCompileResponse {
artifacts: file_content_map_array,
message,
status,
}))
Ok(ApiResponse::ok(file_content_map_array)
.with_status(status)
.with_code(200)
.with_message(message))
}
54 changes: 26 additions & 28 deletions api/src/handlers/process.rs
Original file line number Diff line number Diff line change
@@ -1,60 +1,59 @@
use crate::errors::ApiError;
use crate::handlers::types::{ApiCommand, ApiCommandResult};
use crate::worker::{ProcessState, WorkerEngine};
use rocket::http::Status;
use rocket::State;
use serde::Serialize;
use tracing::{info, instrument};
use uuid::Uuid;

use super::types::ApiResponse;

#[instrument(skip(engine))]
#[get("/process_status/<process_id>")]
pub async fn get_process_status(process_id: String, engine: &State<WorkerEngine>) -> String {
pub async fn get_process_status(
process_id: String,
engine: &State<WorkerEngine>,
) -> ApiResponse<String> {
info!("/process_status/{:?}", process_id);
// get status of process by ID
match Uuid::parse_str(&process_id) {
Ok(process_uuid) => {
if engine.arc_process_states.contains_key(&process_uuid) {
format!(
"{:}",
ApiResponse::ok(
engine
.arc_process_states
.get(&process_uuid)
.unwrap()
.value()
.to_string(),
)
} else {
// TODO can we return HTTP status code here?
format!("Process with id={} not found", process_id)
ApiResponse::not_found(format!("Process with id={} not found", process_id))
}
}
Err(e) => {
// TODO can we return HTTP status code here?
e.to_string()
}
Err(e) => ApiResponse::bad_request(e.to_string()),
}
}

pub fn do_process_command(command: ApiCommand, engine: &State<WorkerEngine>) -> String {
pub fn do_process_command(
command: ApiCommand,
engine: &State<WorkerEngine>,
) -> ApiResponse<String> {
// queue the new Scarb command
match engine.enqueue_command(command) {
Ok(uuid) => {
// return the process ID
format!("{}", uuid)
}
Err(e) => {
// TODO can we return HTTP status code here?
e
}
Ok(uuid) => ApiResponse::ok(uuid.to_string()),
Err(e) => ApiResponse::internal_server_error(e.to_string()),
}
}

pub fn fetch_process_result<F>(
pub fn fetch_process_result<F, T>(
process_id: &str,
engine: &State<WorkerEngine>,
handle_result: F,
) -> (Status, String)
) -> ApiResponse<T>
where
F: FnOnce(Result<&ApiCommandResult, &ApiError>) -> (Status, String),
F: FnOnce(Result<&ApiCommandResult, &ApiError>) -> ApiResponse<T>,
T: Serialize,
{
// get status of process by ID
match Uuid::parse_str(process_id) {
Expand All @@ -68,15 +67,14 @@ where
{
ProcessState::Completed(result) => handle_result(Ok(result)),
ProcessState::Error(e) => handle_result(Err(e)),
_ => (
Status::InternalServerError,
"Result not available".to_string(),
),
_ => {
handle_result(Err(&ApiError::NotFound("Result not available".to_string())))
}
}
} else {
(Status::NotFound, "Process id not found".to_string())
handle_result(Err(&ApiError::NotFound("Process id not found".to_string())))
}
}
Err(e) => (Status::BadRequest, e.to_string()),
Err(e) => handle_result(Err(&ApiError::NotFound(e.to_string()))),
}
}
41 changes: 17 additions & 24 deletions api/src/handlers/scarb_test.rs
Original file line number Diff line number Diff line change
@@ -1,56 +1,45 @@
use crate::errors::ApiError;
use crate::errors::Result;
use crate::handlers::process::{do_process_command, fetch_process_result};
use crate::handlers::types::{ApiCommand, ApiCommandResult, ScarbTestResponse};
use crate::handlers::types::{ApiCommand, ApiCommandResult, TestResponse};
use crate::rate_limiter::RateLimited;
use crate::utils::lib::get_file_path;
use crate::worker::WorkerEngine;
use rocket::http::Status;
use rocket::serde::json;
use rocket::serde::json::Json;
use rocket::State;
use std::path::PathBuf;
use std::process::{Command, Stdio};
use tracing::{debug, info, instrument};

use super::types::ApiResponse;

#[instrument(skip(engine, _rate_limited))]
#[get("/scarb-test-async/<remix_file_path..>")]
#[post("/scarb-test-async/<remix_file_path..>")]
pub async fn scarb_test_async(
remix_file_path: PathBuf,
engine: &State<WorkerEngine>,
_rate_limited: RateLimited,
) -> String {
) -> ApiResponse<String> {
info!("/scarb-test-async/{:?}", remix_file_path);
do_process_command(ApiCommand::ScarbTest { remix_file_path }, engine)
}

#[instrument(skip(engine))]
#[get("/scarb-test-result/<process_id>")]
#[get("/scarb-test-async/<process_id>")]
pub async fn get_scarb_test_result(
process_id: &str,
engine: &State<WorkerEngine>,
) -> (Status, String) {
info!("/scarb-test-result/{:?}", process_id);
) -> TestResponse {
info!("/scarb-test-async/{:?}", process_id);
fetch_process_result(process_id, engine, |result| match result {
Ok(ApiCommandResult::ScarbTest(scarb_result)) => (
Status::Ok,
json::to_string(&scarb_result)
.unwrap_or_else(|e| format!("Failed to fetch result: {:?}", e)),
),
Err(err) => (
Status::InternalServerError,
format!("Failed to fetch result: {:?}", err),
),
_ => (
Status::InternalServerError,
"Result not available".to_string(),
),
Ok(ApiCommandResult::Test(test_result)) => test_result.clone(),
Err(e) => ApiResponse::not_found(e.to_string()),
_ => ApiResponse::internal_server_error("Result not available".to_string()),
})
}

/// Run Scarb to test a project
///
pub async fn do_scarb_test(remix_file_path: PathBuf) -> Result<Json<ScarbTestResponse>> {
pub async fn do_scarb_test(remix_file_path: PathBuf) -> Result<TestResponse> {
let remix_file_path = remix_file_path
.to_str()
.ok_or(ApiError::FailedToParseString)?
Expand Down Expand Up @@ -100,5 +89,9 @@ pub async fn do_scarb_test(remix_file_path: PathBuf) -> Result<Json<ScarbTestRes
}
.to_string();

Ok(Json(ScarbTestResponse { message, status }))
Ok(ApiResponse::ok(())
.with_status(status)
.with_code(200)
.with_message(message)
.with_timestamp(chrono::Utc::now().to_rfc3339()))
}
Loading

0 comments on commit 9789c3c

Please sign in to comment.