diff --git a/.env b/.env index 2eaf909..a1cac5e 100644 --- a/.env +++ b/.env @@ -3,4 +3,6 @@ FILE_MODE=public CONTRACT_ADDRESS=0x045f59310ADD88E6d23ca58A0Fa7A55BEE6d2a611 CHAIN=sepolia API_DOMAIN=40YcwfNEsn -VITE_API_ENDPOINT=http://localhost:7000 \ No newline at end of file +VITE_API_ENDPOINT=http://localhost:3600 +BACKEND_VERSION=1.2.0 +FRONTEND_VERSION=1.2.0 \ No newline at end of file diff --git a/Cargo.toml b/Cargo.toml index b8c9aab..f7ba20b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "aqua-container" -version = "0.1.0" +version = "1.2.0" edition = "2021" description = "Aquachain File Container" repository = "https://github.com/inblockio/aqua-container" diff --git a/src/controllers/api_controller.rs b/src/controllers/api_controller.rs index 61f5a0a..d7c1cdb 100644 --- a/src/controllers/api_controller.rs +++ b/src/controllers/api_controller.rs @@ -1,6 +1,8 @@ -use crate::models::input::{DeleteInput, RevisionInput, UpdateConfigurationInput, WitnessInput}; +use crate::models::input::{ + DeleteInput, MergeInput, RevisionInput, UpdateConfigurationInput, WitnessInput, +}; use crate::models::page_data::{ApiResponse, PageDataContainer}; -use crate::models::PagesDataTable; +use crate::models::NewPagesTable; use crate::models::{file::FileInfo, page_data}; use crate::util::{ check_if_page_data_revision_are_okay, check_or_generate_domain, compute_content_hash, @@ -40,8 +42,8 @@ use futures::{Stream, TryStreamExt}; use serde::{Deserialize, Serialize}; extern crate serde_json_path_to_error as serde_json; use crate::db::pages_db::{ - delete_all_data, delete_all_user_files, delete_page_data, - fetch_all_pages_data_per_user, fetch_page_data, insert_page_data, update_page_data, + delete_all_data, delete_all_user_files, delete_page_data, fetch_all_pages_data_per_user, + fetch_page_data, insert_page_data, update_page_data, }; use dotenv::{dotenv, vars}; use sha3::{Digest, Sha3_512}; @@ -149,7 +151,7 @@ pub async fn fetch_explorer_files( for row in page_data { pages.push(FileInfo { - id: row.id.unwrap().try_into().unwrap(), + id: row.id.try_into().unwrap(), name: row.name, extension: row.extension, page_data: row.page_data, @@ -317,6 +319,294 @@ pub async fn explorer_file_verify_hash_upload( (StatusCode::BAD_REQUEST, Json(res)) } +pub async fn explorer_import_aqua_chain( + State(server_database): State, + headers: HeaderMap, + mut multipart: Multipart, +) -> (StatusCode, Json) { + tracing::debug!("explorer_import_aqua_chain fn"); + let mut log_data: Vec = Vec::new(); + let mut res: ApiResponse = ApiResponse { + logs: log_data, + file: None, + files: Vec::new(), + }; + + // Extract the 'metamask_address' header + let metamask_address = match headers.get("metamask_address") { + Some(value) => match value.to_str() { + Ok(key) => key, + Err(err) => { + tracing::error!("headers get error {} ", err); + // return (StatusCode::BAD_REQUEST, Json(json!({"error": "Invalid metamask_address header"}))) + + res.logs + .push(format!("Error: Meta mask public key error: {:?}", err)); + return (StatusCode::BAD_REQUEST, Json(res)); + } + }, + None => { + tracing::debug!("metamask_address header missing "); + // return (StatusCode::BAD_REQUEST, Json(json!({"error": "metamask_address header missing"}))) + res.logs + .push("Error: Meta mask public key missing".to_string()); + return (StatusCode::BAD_REQUEST, Json(res)); + } + }; + + let mut account: Option = None; + let mut aqua_json: Option> = None; + + // Process only two fields: account and file + for _ in 0..2 { + let field = match multipart.next_field().await { + Ok(Some(field)) => field, + Ok(None) => break, + Err(e) => { + tracing::error!("Multipart error: {}", e); + res.logs.push(format!("Multipart error: {}", e)); + return (StatusCode::BAD_REQUEST, Json(res)); + } + }; + + let name = match field.name() { + Some(name) => name.to_string(), + None => { + tracing::error!("Field name missing"); + res.logs.push("Field name missing".to_string()); + return (StatusCode::BAD_REQUEST, Json(res)); + } + }; + + tracing::debug!("Processing field: {}", name); + match name.as_str() { + "account" => { + account = match field.text().await { + Ok(text) => Some(text), + Err(e) => { + tracing::error!("Failed to read account field: {}", e); + res.logs + .push(format!("Failed to read account field: {}", e)); + return (StatusCode::BAD_REQUEST, Json(res)); + } + }; + } + "file" => { + // Read the file content + let file_content = match field.bytes().await { + Ok(content) => content, + Err(e) => { + tracing::error!("Failed to read file content: {}", e); + res.logs.push(format!("Failed to read file content: {}", e)); + return (StatusCode::BAD_REQUEST, Json(res)); + } + }; + + // Parse JSON content into AquaData struct + aqua_json = + match serde_json::from_slice::>(&file_content) { + Ok(data) => Some(data), + Err(e) => { + tracing::error!("Failed to parse JSON: {}", e); + res.logs.push(format!("Failed to parse JSON: {}", e)); + return (StatusCode::BAD_REQUEST, Json(res)); + } + }; + } + _ => { + tracing::warn!("Unexpected field: {}", name); + } + } + } + + // Verify we have both account and file + let account = match account { + Some(acc) => acc, + None => { + tracing::error!("Account information missing"); + res.logs.push("Account information missing".to_string()); + return (StatusCode::BAD_REQUEST, Json(res)); + } + }; + + // Verify we have both account and file + let aqua_json = match aqua_json { + Some(acc) => acc, + None => { + tracing::error!("Aqua JSON data erorr"); + res.logs.push("Aqua JSON data erorr".to_string()); + return (StatusCode::BAD_REQUEST, Json(res)); + } + }; + + // tracing::debug!( + // "Processing aqua upload Account {} - File data {:#?} ", + // account, + // aqua_json, + // ); + + let mut mode = "private".to_string(); + let file_mode = env::var("FILE_MODE").unwrap_or_default(); + + if !file_mode.is_empty() { + mode = file_mode; + } + + let start = SystemTime::now(); + let timestamp = start + .duration_since(UNIX_EPOCH) + .expect("Time went backwards") + .as_secs(); + println!("Current Unix timestamp: {}", timestamp); + + let mut file_name = format!("{}", timestamp); + + let chain: Option<&HashChain> = aqua_json.pages.get(0); + + if (chain.is_none()) { + // tracing::error!("Aqua JSON data erorr first chain not found"); + res.logs + .push("Aqua data data erorr first chain not found".to_string()); + return (StatusCode::BAD_REQUEST, Json(res)); + } + + let genesis_revision = chain + .unwrap() + .revisions + .iter() + .find_map(|(hash, revision)| { + if hash.to_string() == chain.unwrap().genesis_hash { + tracing::info!("Found geneisis revision"); + Some(revision) + } else { + tracing::error!("genesis revision not found"); + None + } + }); + if (genesis_revision.is_none()) { + // tracing::error!("Aqua JSON data erorr genesis revision not found"); + res.logs + .push("Aqua data data erorr genesis revision not found".to_string()); + return (StatusCode::BAD_REQUEST, Json(res)); + } + if (genesis_revision.unwrap().content.file.is_none()) { + tracing::error!("Aqua JSON data erorr genesis revision does not contain file info"); + res.logs + .push("Aqua data data erorr genesis revision does not contain file info".to_string()); + return (StatusCode::BAD_REQUEST, Json(res)); + } + let file_name = genesis_revision + .unwrap() + .content + .file + .clone() + .unwrap() + .filename; + + let path = std::path::Path::new(&file_name); + let mut content_type: String = String::from(""); + + // Check if the file has an extension + if (path.extension().is_some()) { + tracing::error!("Aqua JSON generating file type from extension"); + res.logs + .push("Aqua JSON generating file type from extension".to_string()); + + match get_content_type(&file_name) { + Some(data) => { + content_type = data; + } + None => { + content_type = "unknown".to_string(); + } + } + } else { + tracing::error!("Aqua JSON generating file type from content bytes"); + res.logs + .push("Aqua data data erorr genesis type from content bytes".to_string()); + + let file_data_info = get_file_info( + genesis_revision + .unwrap() + .content + .file + .clone() + .unwrap() + .data + .to_string(), + ); + + content_type = match file_data_info { + Ok(data) => { + tracing::error!( + "file type found {} the gerenal result is {:#?}", + data.file_type, + data + ); + data.file_type + } + Err(err) => { + tracing::error!("Failed infer file type {}", err); + "unknown".to_string() + } + }; + } + + // Convert struct to JSON string + let json_string = match serde_json::to_string(&aqua_json) { + Ok(json) => json, + Err(e) => { + tracing::error!("Failed to serialize page data: {}", e); + res.logs + .push(format!("Failed to serialize page data: {}", e)); + return (StatusCode::INTERNAL_SERVER_ERROR, Json(res)); + } + }; + + let naive_datetime: NaiveDateTime = Utc::now().naive_utc(); + let datetime_string = naive_datetime.format("%Y-%m-%d %H:%M:%S").to_string(); + + let db_data_model = NewPagesTable { + name: file_name, + extension: content_type, + page_data: json_string, + mode, + owner: metamask_address.to_string(), + is_shared: false, + created_at: datetime_string, + }; + + let mut conn = match server_database.pool.get() { + Ok(connection) => connection, + Err(e) => { + res.logs + .push("Failed to get database connection".to_string()); + + println!("Error Fetching connection {:#?}", res); + return (StatusCode::INTERNAL_SERVER_ERROR, Json(res)); + } + }; + + let insert_result = insert_page_data(db_data_model.clone(), &mut conn); + if insert_result.is_err() { + let err = insert_result.err().unwrap(); + tracing::error!("Failed to insert page: {}", err); + res.logs.push(format!("Failed to insert page: {}", err)); + return (StatusCode::INTERNAL_SERVER_ERROR, Json(res)); + } + + let file_info = FileInfo { + id: insert_result.unwrap(), //record.id, + name: db_data_model.name, + extension: db_data_model.extension, + page_data: db_data_model.page_data, + mode: db_data_model.mode, + owner: db_data_model.owner.to_string(), + }; + res.file = Some(file_info); + return (StatusCode::CREATED, Json(res)); +} + pub async fn explorer_aqua_file_upload( State(server_database): State, headers: HeaderMap, @@ -561,14 +851,17 @@ pub async fn explorer_aqua_file_upload( } }; - let db_data_model = PagesDataTable { - id: 0, + let naive_datetime: NaiveDateTime = Utc::now().naive_utc(); + let datetime_string = naive_datetime.format("%Y-%m-%d %H:%M:%S").to_string(); + + let db_data_model = NewPagesTable { name: file_name, extension: content_type, page_data: json_string, - mode: mode, + mode, owner: metamask_address.to_string(), is_shared: false, + created_at: datetime_string, }; let mut conn = match server_database.pool.get() { @@ -837,14 +1130,17 @@ pub async fn explorer_file_upload( mode = file_mode; } - let db_data_model = PagesDataTable { - id: 0, + let naive_datetime: NaiveDateTime = Utc::now().naive_utc(); + let datetime_string = naive_datetime.format("%Y-%m-%d %H:%M:%S").to_string(); + + let db_data_model = NewPagesTable { name: file_name, extension: content_type, page_data: json_string, - mode: mode, + mode, owner: metamask_address.to_string(), is_shared: false, + created_at: datetime_string, }; let mut conn = match server_database.pool.get() { @@ -889,7 +1185,7 @@ pub async fn explorer_sign_revision( tracing::debug!("explorer_sign_revision"); // Get the name parameter from the input - if input.file_id == 0 { + if input.file_id == 0 { log_data.push("Error : file name is empty".to_string()); let res: ApiResponse = ApiResponse { logs: log_data, @@ -1079,6 +1375,69 @@ pub async fn explorer_sign_revision( let mut new_data = page_data.clone(); new_data.page_data = page_data_new.clone(); + // let mut conn = match server_database.pool.get() { + // Ok(connection) => connection, + // Err(e) => { + // log_data.push("Failed data not found in database".to_string()); + + // log_data.push("Failed to get database connection".to_string()); + // let res: ApiResponse = ApiResponse { + // logs: log_data, + // file: None, + // files: Vec::new(), + // }; + + // println!("Error Fetching connection {:#?}", res); + // return (StatusCode::INTERNAL_SERVER_ERROR, Json(res)); + // } + // }; + + // let insert_result = insert_page_data(db_data_model.clone(), & mut conn); + // let page_data_result = fetch_page_data(input.filename, & mut conn); + + let update_result = update_page_data(new_data.clone(), &mut conn); + if update_result.is_err() { + let e = update_result.err().unwrap(); + tracing::error!("Failed to update page data: {:?}", e); + log_data.push(format!("Failed to update page data : {:?}", e)); + + let res: ApiResponse = ApiResponse { + logs: log_data, + file: None, + files: Vec::new(), + }; + return (StatusCode::INTERNAL_SERVER_ERROR, Json(res)); + } + + let file_info = FileInfo { + id: new_data.id as i64, + name: new_data.name, + extension: new_data.extension, + page_data: page_data_new.clone(), + owner: new_data.owner, + mode: new_data.mode, + }; + + let res: ApiResponse = ApiResponse { + logs: log_data, + file: Some(file_info), + files: Vec::new(), + }; + return (StatusCode::OK, Json(res)); +} + +pub async fn explorer_merge_chain( + State(server_database): State, + Json(input): Json, +) -> (StatusCode, Json) { + let mut log_data: Vec = Vec::new(); + + tracing::debug!("explorer_merge_chain"); + + let file_id = input.file_id; + let last_identical_revision_hash = input.last_identical_revision_hash; + let revisions_to_import = input.revisions_to_import; + let mut conn = match server_database.pool.get() { Ok(connection) => connection, Err(e) => { @@ -1097,7 +1456,98 @@ pub async fn explorer_sign_revision( }; // let insert_result = insert_page_data(db_data_model.clone(), & mut conn); - // let page_data_result = fetch_page_data(input.filename, & mut conn); + let page_data_result = fetch_page_data(file_id, &mut conn); + + if page_data_result.is_err() { + tracing::error!("Failed not found ",); + + log_data.push("Failed data not found in database".to_string()); + + let res: ApiResponse = ApiResponse { + logs: log_data, + file: None, + files: Vec::new(), + }; + return (StatusCode::NOT_FOUND, Json(res)); + } + let page_data = page_data_result.unwrap(); + + let deserialized: PageDataContainer = + match serde_json::from_str(&page_data.page_data) { + Ok(data) => { + log_data.push("Success : parse page data".to_string()); + data + } + Err(e) => { + tracing::error!("Failed to parse page data record: {:?}", e); + log_data.push(format!("error : Failed to parse page data record: {:?}", e)); + if let Some(source) = e.source() { + tracing::info!("Source: {}", source); + } else { + tracing::info!("Source NOT FOUND "); + } + let res: ApiResponse = ApiResponse { + logs: log_data, + file: None, + files: Vec::new(), + }; + return (StatusCode::INTERNAL_SERVER_ERROR, Json(res)); + } + }; + + let mut doc = deserialized; + let len = doc.pages[0].revisions.len(); + + let page_revisions = &doc.pages[0].revisions; + + // Find the index of the last identical revision hash + let last_index = page_revisions + .iter() + .position(|(hash, _)| format!("{:?}", hash) == last_identical_revision_hash) + .ok_or_else(|| { + format!( + "Hash {} not found in existing revisions", + last_identical_revision_hash + ) + }).unwrap(); + + // Create a new vector with revisions up to the last identical hash + let mut new_revisions = page_revisions[..=last_index].to_vec(); + + // Append the new revisions, mapping them to (verification_hash, revision) tuples + new_revisions.extend(revisions_to_import.into_iter().map(|revision| { + let hash = revision.metadata.verification_hash.clone(); + (hash, revision) + })); + + // Replace the original revisions with the new combined vector + doc.pages[0].revisions = new_revisions; + + // Serialize the updated document + let page_data_new = match serde_json::to_string(&doc) { + Ok(data) => { + log_data.push("revision serialized successfully".to_string()); + + data + } + Err(e) => { + tracing::error!("Failed to serialize updated page data: {:?}", e); + + log_data.push(format!("Failed to serialize updated page data : {:?}", e)); + + let res: ApiResponse = ApiResponse { + logs: log_data, + file: None, + files: Vec::new(), + }; + + return (StatusCode::INTERNAL_SERVER_ERROR, Json(res)); + } + }; + + let mut new_data = page_data.clone(); + new_data.page_data = page_data_new.clone(); + let update_result = update_page_data(new_data.clone(), &mut conn); if update_result.is_err() { @@ -1114,14 +1564,14 @@ pub async fn explorer_sign_revision( } let file_info = FileInfo { - id: new_data.id, + id: new_data.id as i64, name: new_data.name, extension: new_data.extension, page_data: page_data_new.clone(), owner: new_data.owner, mode: new_data.mode, - }; + let res: ApiResponse = ApiResponse { logs: log_data, file: Some(file_info), @@ -1569,13 +2019,14 @@ pub async fn explorer_witness_file( } let file_info = FileInfo { - id: new_data.id, + id: new_data.id as i64, name: new_data.name, extension: new_data.extension, page_data: page_data_new, owner: new_data.owner, mode: new_data.mode, }; + let res: ApiResponse = ApiResponse { logs: log_data, file: Some(file_info), diff --git a/src/controllers/mod.rs b/src/controllers/mod.rs index 424e074..f82c654 100644 --- a/src/controllers/mod.rs +++ b/src/controllers/mod.rs @@ -1,4 +1,5 @@ pub mod api_controller; pub mod user_profile_controller; pub mod auth_controller; -pub mod share_controller; \ No newline at end of file +pub mod share_controller; +pub mod versions_controller; \ No newline at end of file diff --git a/src/controllers/share_controller.rs b/src/controllers/share_controller.rs index c0b1bf4..1ffd539 100644 --- a/src/controllers/share_controller.rs +++ b/src/controllers/share_controller.rs @@ -52,7 +52,7 @@ pub async fn get_share_data( println!("Error Fetching connection {:#?}", res); return (StatusCode::INTERNAL_SERVER_ERROR, Json(res)); } - }; + }; let share_payload = fetch_share_data_by_address(&share_identifier.as_str(), &mut conn); @@ -76,7 +76,7 @@ pub async fn get_share_data( return (StatusCode::NOT_FOUND, Json(res)); } - let page_data_result = fetch_page_data(firs_share_payload_data.id.unwrap(), &mut conn); + let page_data_result = fetch_page_data(firs_share_payload_data.file_id, &mut conn); if page_data_result.is_err() { tracing::error!("Failed not found ",); diff --git a/src/controllers/versions_controller.rs b/src/controllers/versions_controller.rs new file mode 100644 index 0000000..52a3baa --- /dev/null +++ b/src/controllers/versions_controller.rs @@ -0,0 +1,29 @@ + +use std::env; + +use axum::{ + body::Bytes, + extract::{DefaultBodyLimit, Multipart, Path, Request, State}, + handler::HandlerWithoutStateExt, + http::StatusCode, + response::{Html, Redirect}, + routing::{get, post}, + BoxError, Form, Json, Router, +}; +use serde_json::json; + +// Handler function that returns a JSON response +pub async fn version_details() -> Json { + let mut frontend = env::var("FRONTEND_VERSION").unwrap_or_default(); + let mut backend = env::var("BACKEND_VERSION").unwrap_or_default(); + + if frontend.is_empty() { + frontend = "1.2.0".to_string() + } + + if backend.is_empty() { + backend = "1.2.0".to_string() + } + + Json(json!({ "backend": backend, "frontend" : frontend })) +} \ No newline at end of file diff --git a/src/db/pages_db.rs b/src/db/pages_db.rs index b124d93..d50bcf6 100644 --- a/src/db/pages_db.rs +++ b/src/db/pages_db.rs @@ -1,4 +1,4 @@ -use crate::models::{PagesDataTable, PagesTable, DB_POOL}; +use crate::models::{NewPagesTable, PagesTable}; use chrono::{NaiveDateTime, Utc}; use diesel::r2d2::{self, ConnectionManager, PooledConnection}; use diesel::SqliteConnection; @@ -11,15 +11,15 @@ use diesel::result::Error as DieselError; pub fn insert_page_data( - data: PagesDataTable, + data: NewPagesTable, db_connection: &mut PooledConnection>, ) -> Result { // Use Utc to get the current time as a NaiveDateTime let naive_datetime: NaiveDateTime = Utc::now().naive_utc(); let datetime_string = naive_datetime.format("%Y-%m-%d %H:%M:%S").to_string(); - let record = PagesTable { - id: None, + let record = NewPagesTable { + // id: None, name: data.name, extension: data.extension, mode: data.mode, @@ -30,11 +30,11 @@ pub fn insert_page_data( }; let inserted_id: i32 = diesel::insert_into(crate::schema::pages::table) - .values(&record) + .values(record) .returning(crate::schema::pages::dsl::id) - .get_result::>(db_connection) - .map_err(|e| format!("Error saving new siwe data: {}", e))? - .unwrap_or(-1); // Provide a default value if None + .get_result::(db_connection) + .map_err(|e| format!("Error saving new siwe data: {}", e))?; + // .unwrap(); // Provide a default value if None Ok(inserted_id as i64) } @@ -44,7 +44,7 @@ pub fn insert_page_data( pub fn fetch_page_data( id_par: i32, db_connection: &mut PooledConnection>, -) -> Result { +) -> Result { use crate::schema::pages::dsl::*; let result = pages @@ -55,15 +55,16 @@ pub fn fetch_page_data( _ => format!("Error fetching page data: {}", e), })?; - Ok(PagesDataTable { - id: result.id.unwrap_or(0) as i64, - name: result.name, - extension: result.extension, - page_data: result.page_data, - mode: result.mode, - owner: result.owner, - is_shared: result.is_shared - }) + // Ok(PagesTable { + // id: result.id, + // name: result.name, + // extension: result.extension, + // page_data: result.page_data, + // mode: result.mode, + // owner: result.owner, + // is_shared: result.is_shared + // }) + Ok(result) } pub fn fetch_all_pages_data_per_user( @@ -89,12 +90,12 @@ pub fn fetch_all_pages_data_per_user( } pub fn update_page_data( - data: PagesDataTable, + data: PagesTable, db_connection: &mut PooledConnection>, ) -> Result<(), String> { use crate::schema::pages::dsl::*; println!("Updating"); - let res = diesel::update(pages.filter(name.eq(&data.name))) + let res = diesel::update(pages.find(data.id)) .set(( extension.eq(&data.extension), page_data.eq(&data.page_data), diff --git a/src/main.rs b/src/main.rs index a5d94ca..4d1c344 100644 --- a/src/main.rs +++ b/src/main.rs @@ -55,14 +55,13 @@ use std::sync::{mpsc, Mutex, MutexGuard}; use crate::controllers::api_controller::explorer_file_verify_hash_upload; use crate::util::{check_or_generate_domain, establish_connection}; use controllers::{api_controller::{ - explorer_aqua_file_upload, explorer_delete_all_files, explorer_delete_file, - explorer_file_upload, explorer_sign_revision, - explorer_witness_file, fetch_explorer_files, + explorer_aqua_file_upload, explorer_delete_all_files, explorer_delete_file, explorer_file_upload, explorer_import_aqua_chain, explorer_merge_chain, explorer_sign_revision, explorer_witness_file, fetch_explorer_files }, auth_controller::session_logout_by_nonce, share_controller::{get_share_data, save_share_data}}; use controllers::auth_controller::{ siwe_sign_in, verify_siwe_message, fetch_nonce_session }; +use controllers::versions_controller::version_details; use controllers::user_profile_controller::{explorer_fetch_user_profile, explorer_update_user_profile}; const UPLOADS_DIRECTORY: &str = "uploads"; @@ -116,6 +115,10 @@ async fn main() { .route("/explorer_files", get(fetch_explorer_files)) .route("/explorer_file_upload", post(explorer_file_upload)) .route( + "/explorer_aqua_chain_import", + post(explorer_import_aqua_chain), + ) + .route( "/explorer_aqua_file_upload", post(explorer_aqua_file_upload), ) @@ -125,6 +128,7 @@ async fn main() { ) .route("/explorer_sign_revision", post(explorer_sign_revision)) .route("/explorer_witness_file", post(explorer_witness_file)) + .route("/explorer_merge_chain", post(explorer_merge_chain)) .route("/explorer_delete_file", post(explorer_delete_file)) .route("/explorer_delete_all_files", get(explorer_delete_all_files)) .route( @@ -144,6 +148,7 @@ async fn main() { .route("/siwe_logout", post(session_logout_by_nonce)) .route("/share_data/{share_identifier}", get(get_share_data)) .route("/share_data", post(save_share_data)) + .route("/version", get(version_details)) //.route("/list", get(show_files_list).post(show_files)) .with_state(server_database) .layer(CorsLayer::permissive()) diff --git a/src/models/input.rs b/src/models/input.rs index c2ae9bd..0f6ecbf 100644 --- a/src/models/input.rs +++ b/src/models/input.rs @@ -1,3 +1,4 @@ +use aqua_verifier_rs_types::models::revision::Revision; use serde::{Deserialize, Serialize}; #[derive(Deserialize, Serialize, Debug)] @@ -35,3 +36,11 @@ pub struct UpdateConfigurationInput { pub mode: String, pub contract: String, } + + +#[derive(Deserialize, Serialize, Debug)] +pub struct MergeInput { + pub file_id: i32, + pub last_identical_revision_hash: String, + pub revisions_to_import: Vec, +} \ No newline at end of file diff --git a/src/models/mod.rs b/src/models/mod.rs index 680942f..bca3aea 100644 --- a/src/models/mod.rs +++ b/src/models/mod.rs @@ -1,46 +1,70 @@ - +use diesel::prelude::*; use diesel::r2d2::{self, ConnectionManager}; use diesel::SqliteConnection; use serde::{Deserialize, Serialize}; -use diesel::prelude::*; pub mod file; -pub mod page_data; pub mod input; -pub mod user_profiles; +pub mod page_data; pub mod share_data; +pub mod user_profiles; use chrono::{DateTime, Utc}; pub type DB_POOL = r2d2::Pool>; -use diesel::prelude::*; use chrono::NaiveDateTime; -use siwe::TimeStamp; -use diesel::prelude::*; use diesel::expression::AsExpression; +use diesel::prelude::*; +use diesel::prelude::*; use diesel::sql_types::Nullable; +use siwe::TimeStamp; -#[derive(Queryable, Selectable, Serialize, Deserialize, Debug, Clone, Insertable)] +#[derive( + Queryable, + Selectable, + Serialize, + Deserialize, + Debug, + Clone, + Insertable, + Identifiable, + AsChangeset, +)] #[diesel(table_name = crate::schema::pages)] pub struct PagesTable { - pub id: Option, + pub id: i32, pub name: String, pub extension: String, pub page_data: String, pub owner: String, pub mode: String, pub created_at: String, - pub is_shared : bool + pub is_shared: bool, } -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct PagesDataTable { - pub id: i64, +#[derive(Debug, Serialize, Deserialize, Clone, Insertable)] +#[diesel(table_name = crate::schema::pages)] +pub struct NewPagesTable { pub name: String, pub extension: String, pub page_data: String, - pub mode: String, pub owner: String, - pub is_shared: bool + pub mode: String, + pub created_at: String, + pub is_shared: bool, +} + +impl From for NewPagesTable { + fn from(page: PagesTable) -> Self { + NewPagesTable { + name: page.name, + extension: page.extension, + page_data: page.page_data, + owner: page.owner, + mode: page.mode, + created_at: page.created_at, + is_shared: page.is_shared, + } + } } #[derive(Queryable, Selectable, Deserialize, Serialize, Debug, Clone, Insertable)] @@ -54,7 +78,6 @@ pub struct SiweSessionsTable { pub expiration_time: Option, } - #[derive(Queryable, Selectable, Deserialize, Serialize, Debug, Clone, Insertable)] #[diesel(table_name = crate::schema::share_data)] #[diesel(check_for_backend(diesel::sqlite::Sqlite))] @@ -65,7 +88,6 @@ pub struct ShareDataTable { pub created_time: String, } - #[derive(Queryable, Selectable, Deserialize, Serialize, Debug, Clone, Insertable)] #[diesel(table_name = crate::schema::user_profiles)] #[diesel(check_for_backend(diesel::sqlite::Sqlite))] @@ -77,4 +99,4 @@ pub struct UserProfilesTable { pub contract_address: String, pub file_mode: String, pub domain_name: String, -} \ No newline at end of file +} diff --git a/src/models/share_data.rs b/src/models/share_data.rs index c131aff..f363c56 100644 --- a/src/models/share_data.rs +++ b/src/models/share_data.rs @@ -1,14 +1,14 @@ use serde::{Deserialize, Serialize}; -use super::{file::FileInfo, PagesDataTable, ShareDataTable}; +use super::{file::FileInfo, PagesTable, ShareDataTable}; #[derive(Debug, Serialize, Deserialize, Clone)] pub struct ShareDataResponse{ pub logs : Vec, pub share_data : Option, - pub file_data : Option, + pub file_data : Option, } diff --git a/src/schema.rs b/src/schema.rs index a779183..50016c5 100644 --- a/src/schema.rs +++ b/src/schema.rs @@ -2,7 +2,7 @@ diesel::table! { pages (id) { - id -> Nullable, + id -> Integer, name -> Text, extension -> Text, page_data -> Text, diff --git a/web/src/components/FilePreview.tsx b/web/src/components/FilePreview.tsx index 76a94d4..329c588 100644 --- a/web/src/components/FilePreview.tsx +++ b/web/src/components/FilePreview.tsx @@ -70,7 +70,7 @@ const FilePreview = ({ fileInfo }: IFilePreview) => { borderRadius: '12px', width: '100%', border: 'none', - height: '500px' + height: '800px' }} > diff --git a/web/src/components/aqua_chain_actions.tsx b/web/src/components/aqua_chain_actions.tsx index c2248c3..e72e520 100644 --- a/web/src/components/aqua_chain_actions.tsx +++ b/web/src/components/aqua_chain_actions.tsx @@ -18,7 +18,7 @@ import { Box, Center, Text, VStack } from "@chakra-ui/react" import { ClipboardButton, ClipboardIconButton, ClipboardInput, ClipboardLabel, ClipboardRoot } from "./ui/clipboard" import { InputGroup } from "./ui/input-group" -async function storeWitnessTx(file_id : number,filename: string, txhash: string, ownerAddress: string, network: string, files: ApiFileInfo[], setFiles: any, backend_url: string) { +async function storeWitnessTx(file_id: number, filename: string, txhash: string, ownerAddress: string, network: string, files: ApiFileInfo[], setFiles: any, backend_url: string) { const formData = new URLSearchParams(); @@ -48,7 +48,7 @@ async function storeWitnessTx(file_id : number,filename: string, txhash: string const array: ApiFileInfo[] = []; for (let index = 0; index < files.length; index++) { const element = files[index]; - if (element.name === resp.name) { + if (element.id === file_id) { array.push(resp) } else { array.push(element) @@ -65,7 +65,7 @@ async function storeWitnessTx(file_id : number,filename: string, txhash: string interface ISigningAndWitnessing { - file_id : number, + file_id: number, filename: string, lastRevisionVerificationHash?: string, backend_url: string @@ -118,7 +118,7 @@ export const WitnessAquaChain = ({ file_id, filename, lastRevisionVerificationHa params: params, }) .then(txhash => { - storeWitnessTx( file_id,filename, txhash, ethers.getAddress(walletAddress), network, files, setFiles, backend_url).then(() => { + storeWitnessTx(file_id, filename, txhash, ethers.getAddress(walletAddress), network, files, setFiles, backend_url).then(() => { }).catch(() => { @@ -135,7 +135,7 @@ export const WitnessAquaChain = ({ file_id, filename, lastRevisionVerificationHa setWitnessing(false) }) - } catch (error : any ) { + } catch (error: any) { console.log("Error ", error) setWitnessing(false) toaster.create({ @@ -213,10 +213,10 @@ export const SignAquaChain = ({ file_id, filename, lastRevisionVerificationHash messageHash, signature, ) - console.log("File id not none ==> ",file_id ) + console.log("File id not none ==> ", file_id) const formData = new URLSearchParams(); - formData.append('file_id', file_id.toString() ); + formData.append('file_id', file_id.toString()); formData.append('filename', filename); formData.append('signature', signature); /* Recovered public key if needed */ @@ -245,11 +245,11 @@ export const SignAquaChain = ({ file_id, filename, lastRevisionVerificationHash const array: ApiFileInfo[] = []; for (let index = 0; index < files.length; index++) { - const element = files[index]; - if (element.name === resp.name) { + const file = files[index]; + if (file.id === file_id) { array.push(resp) } else { - array.push(element) + array.push(file) } } setFiles(array) @@ -259,8 +259,8 @@ export const SignAquaChain = ({ file_id, filename, lastRevisionVerificationHash }) } - } catch (error : any) { - console.error("Network Error" , error) + } catch (error: any) { + console.error("Network Error", error) toaster.create({ description: `Error during signature submission`, type: "error" @@ -269,7 +269,7 @@ export const SignAquaChain = ({ file_id, filename, lastRevisionVerificationHash } setSigning(false) } catch (error) { - console.error("An Error" , error) + console.error("An Error", error) setSigning(false) toaster.create({ description: `Error during signing`, @@ -314,9 +314,9 @@ export const DeleteAquaChain = ({ file_id, filename, backend_url }: ISigningAndW if (response.status === 200) { let filesNew: Array = []; for (let index = 0; index < files.length; index++) { - const element = files[index]; - if (element.name != filename) { - filesNew.push(element) + const file = files[index]; + if (file.id != file_id) { + filesNew.push(file) } } setFiles(filesNew); @@ -409,12 +409,12 @@ export const DownloadAquaChain = ({ file }: { file: ApiFileInfo }) => { interface IShareButton { id: number | null - file_id: number + file_id: number filename: string | null } export const ShareButton = ({ filename, file_id }: IShareButton) => { - const {backend_url} = useStore(appStore) + const { backend_url } = useStore(appStore) const [isOpen, setIsOpen] = useState(false) const [sharing, setSharing] = useState(false) const [shared, setShared] = useState(null) @@ -438,12 +438,12 @@ export const ShareButton = ({ filename, file_id }: IShareButton) => { console.log(response) - if(response.status === 200){ + if (response.status === 200) { setSharing(false) const domain = window.location.origin; setShared(`${domain}/share/${unique_identifier}`) } - else{ + else { toaster.create({ description: "Error sharing", type: "error" diff --git a/web/src/components/config.tsx b/web/src/components/config.tsx index 60c36a9..fc24894 100644 --- a/web/src/components/config.tsx +++ b/web/src/components/config.tsx @@ -83,16 +83,18 @@ const LoadConfiguration = () => { } useEffect(() => { - const nonce = getCookie("pkc_nonce"); - if (nonce) { - fetchAddressGivenANonce(nonce) - } else { - setMetamaskAddress(null) - setAvatar(undefined) - toaster.create({ - description: "You are not logged in! Please login", - type: "info", - }) + if (!backend_url.includes("0.0.0.0")) { + const nonce = getCookie("pkc_nonce"); + if (nonce) { + fetchAddressGivenANonce(nonce) + } else { + setMetamaskAddress(null) + setAvatar(undefined) + toaster.create({ + description: "You are not logged in! Please login", + type: "info", + }) + } } }, [backend_url]); diff --git a/web/src/components/dropzone_file_actions.tsx b/web/src/components/dropzone_file_actions.tsx index 862076b..93ce58b 100644 --- a/web/src/components/dropzone_file_actions.tsx +++ b/web/src/components/dropzone_file_actions.tsx @@ -1,4 +1,4 @@ -import { LuImport, LuScan, LuUpload } from "react-icons/lu"; +import { LuCheck, LuChevronRight, LuImport, LuMinus, LuScan, LuUpload, LuX } from "react-icons/lu"; import { Button } from "./ui/button"; import axios from "axios"; import { useStore } from "zustand"; @@ -6,11 +6,16 @@ import appStore from "../store"; import { useEffect, useState } from "react"; import { ApiFileInfo } from "../models/FileInfo"; import { toaster } from "./ui/toaster"; -import { readJsonFile } from "../utils/functions"; +import { formatCryptoAddress, readJsonFile } from "../utils/functions"; import { ChainDetailsBtn } from "./ui/navigation/CustomDrawer"; -import { Container, Group, Text } from "@chakra-ui/react"; +import { Container, DialogCloseTrigger, Group, List, Text } from "@chakra-ui/react"; import { Alert } from "./ui/alert"; import { useNavigate } from "react-router-dom"; +import { analyzeAndMergeRevisions } from "../utils/aqua_funcs"; +import { DialogActionTrigger, DialogBody, DialogContent, DialogFooter, DialogHeader, DialogRoot, DialogTitle } from "./ui/dialog"; +import { TimelineConnector, TimelineContent, TimelineDescription, TimelineItem, TimelineRoot, TimelineTitle } from "./ui/timeline"; +import { RevisionsComparisonResult } from "../models/revision_merge"; +import { HashChain, Revision } from "aqua-verifier"; interface IDropzoneAction { @@ -246,19 +251,55 @@ export const ImportAquaChainFromChain = ({ fileInfo, isVerificationSuccessful }: const [uploading, setUploading] = useState(false) const [_uploaded, setUploaded] = useState(false) const [dbFiles, setDbFiles] = useState([]) + const [comparisonResult, setComparisonResult] = useState(null) + const [modalOpen, setModalOpen] = useState(false) + + const [existingFileId, setExistingFileId] = useState(null) + const [lastIdenticalRevisionHash, setLastIdenticalRevisionHash] = useState(null) + const [revisionsToImport, setRevisionsToImport] = useState([]) + + console.log(revisionsToImport) const { metamaskAddress, setFiles, files, user_profile, backend_url } = useStore(appStore) - let navigate = useNavigate(); + let navigate = useNavigate(); + const importAquaChain = async () => { - const existingChainFile = dbFiles.find(file => file.name === fileInfo.name) + // const existingChainFile = dbFiles.find(file => file.name === fileInfo.name) + const chainToImport: HashChain = JSON.parse(fileInfo.page_data).pages[0] + const existingChainFile = dbFiles.find(file => JSON.parse(file.page_data).pages[0].genesis_hash === chainToImport.genesis_hash) if (existingChainFile) { - toaster.create({ - description: `You already have the file called "${fileInfo.name}". Delete before importing this `, - type: "error" - }) + + const existingFileRevisions = Object.keys(JSON.parse(existingChainFile.page_data).pages[0].revisions) + const fileToImportRevisions = Object.keys(chainToImport.revisions) + + // console.log(existingFileRevisions, fileToImportRevisions) + const mergeResult = analyzeAndMergeRevisions(existingFileRevisions, fileToImportRevisions) + let _revisionsToImport: Revision[] = [] + + if (mergeResult.divergences.length > 0) { + for (let i = 0; i < mergeResult.divergences.length; i++) { + const div = mergeResult.divergences[i]; + if (div.upcomingRevisionHash) { + _revisionsToImport.push(chainToImport.revisions[div.upcomingRevisionHash]) + } + } + } + + // console.log(mergeResult) + setComparisonResult(mergeResult) + setLastIdenticalRevisionHash(mergeResult.lastIdenticalRevisionHash) + setRevisionsToImport(_revisionsToImport) + setModalOpen(true) + setExistingFileId(existingChainFile.id) + + + // toaster.create({ + // description: `You already have the file called "${fileInfo.name}". Delete before importing this `, + // type: "error" + // }) return } @@ -329,6 +370,54 @@ export const ImportAquaChainFromChain = ({ fileInfo, isVerificationSuccessful }: } }; + const handleMergeRevisions = async () => { + try { + setUploading(true) + const response = await axios.post(`${backend_url}/explorer_merge_chain`, { + file_id: existingFileId, + last_identical_revision_hash: lastIdenticalRevisionHash, + revisions_to_import: revisionsToImport + }) + if (response.status === 200) { + toaster.create({ + title: "Aqua chain import", + description: "Chain merged successfully", + type: "success" + }) + setUploading(false) + setUploaded(true) + + const res: ApiFileInfo = response.data + console.log(res) + + const newFiles: ApiFileInfo[] = []; + setFiles(newFiles) + + for (let index = 0; index < files.length; index++) { + const file = files[index]; + if (file.id === res.id) { + newFiles.push(res) + } else { + newFiles.push(file) + } + } + + navigate("/") + } + } catch (e: any) { + setUploading(false) + if (e.message) { + toaster.create({ + title: "Error occured", + description: e.message, + type: "error" + }) + } + } + } + + console.log(comparisonResult) + useEffect(() => { setDbFiles(files) }, [files]) @@ -353,6 +442,166 @@ export const ImportAquaChainFromChain = ({ fileInfo, isVerificationSuccessful }: Import */} - + + setModalOpen(e.open)}> + {/* + + */} + + + Aqua Chain Import + + + + + + + + + Verification status + Verification successful + + + + { + comparisonResult?.identical ? ( + <> + + + + + + Chains Identical + Chains are identical + + + + ) : null + } + + { + (comparisonResult?.existingRevisionsLength ?? 0) > (comparisonResult?.upcomingRevisionsLength ?? 0) ? ( + <> + + + + + + Chain Difference + Existing Chain is Longer than Upcoming Chain + + + + ) : null + } + + { + comparisonResult?.sameLength ? ( + <> + + + + + + Chains Length + Chains are of same Length + + + + ) : null + } + + + { + ( + (comparisonResult?.divergences?.length ?? 0) > 0 + && (comparisonResult?.existingRevisionsLength ?? 0) <= (comparisonResult?.upcomingRevisionsLength ?? 0) + // && isVerificationSuccessful // We won't reach here since by then the import button will be disabled + ) ? ( + <> + + + + + + Chains are Different + Chains have divergencies + + { + comparisonResult?.divergences.map((diff, i: number) => ( + + { + diff.existingRevisionHash ? ( + + + {formatCryptoAddress(diff.existingRevisionHash ?? "", 15, 4)} + + + + {formatCryptoAddress(diff.upcomingRevisionHash ?? "", 15, 4)} + + + ) : ( + <> + {formatCryptoAddress(diff.upcomingRevisionHash ?? "", 20, 4)} + + ) + } + + )) + } + + + + + + + + + + Actions + Merge Chains + + + + + + + ) : null + } + + { + ( + (comparisonResult?.identical && (comparisonResult?.sameLength && comparisonResult?.divergences.length === 0)) + || (comparisonResult?.existingRevisionsLength ?? 0) > (comparisonResult?.upcomingRevisionsLength ?? 0) + // || !isVerificationSuccessful // Import button will be disabled, no reaching this point + ) ? ( + + + + + + Action + No Action + + + ) : null + } + + + + + + + + {/* */} + + + + + + ) -} \ No newline at end of file +} diff --git a/web/src/components/ui/navigation/CustomDrawer.tsx b/web/src/components/ui/navigation/CustomDrawer.tsx index 1f524ad..58f093f 100644 --- a/web/src/components/ui/navigation/CustomDrawer.tsx +++ b/web/src/components/ui/navigation/CustomDrawer.tsx @@ -9,11 +9,11 @@ import { DrawerHeader, DrawerRoot, DrawerTitle, - + } from "../drawer" import { Button } from "../button" -import { LuCheck, LuExternalLink, LuEye, LuX } from "react-icons/lu" -import { Card, For, Group, Icon, Link, Spacer, Span, Text } from "@chakra-ui/react" +import { LuCheck, LuChevronDown, LuChevronUp, LuExternalLink, LuEye, LuX } from "react-icons/lu" +import { Box, Card, Collapsible, For, Group, Icon, Link, Spacer, Span, Text, VStack } from "@chakra-ui/react" import { TimelineConnector, TimelineContent, TimelineDescription, TimelineItem, TimelineRoot, TimelineTitle } from "../timeline" import { PageData, Revision } from "../../../models/PageData" import { formatCryptoAddress, getLastRevisionVerificationHash, timeToHumanFriendly } from "../../../utils/functions" @@ -126,7 +126,7 @@ const RevisionDisplay = ({ revision, verificationResult }: IRevisionDisplay) => {verificationResult?.metadata_verification.successful ? ' valid' : ' invalid'} - {timeToHumanFriendly(revision.metadata.time_stamp)} + {timeToHumanFriendly(revision.metadata.time_stamp, true)} { } export const ChainDetailsBtn = ({ fileInfo }: IPageDataDetails) => { + + const [showMoreDetails, setShowMoreDetails] = useState(false) + const { backend_url } = useStore(appStore) const [isOpen, setIsOpen] = useState(false) const pageData: PageData = JSON.parse(fileInfo.page_data) @@ -339,7 +342,7 @@ export const ChainDetailsBtn = ({ fileInfo }: IPageDataDetails) => { useEffect(() => { const hash = getLastRevisionVerificationHash(pageData) setLastVerificationHash(hash) - console.log("ChainDetailsBtn == > "+ JSON.stringify(fileInfo)) + console.log("ChainDetailsBtn == > " + JSON.stringify(fileInfo)) }, [fileInfo]) @@ -369,7 +372,26 @@ export const ChainDetailsBtn = ({ fileInfo }: IPageDataDetails) => { - + + + + + + + + setShowMoreDetails(open => !open)} cursor={'pointer'}> + : } /> + + + + + + + + + + + diff --git a/web/src/components/ui/navigation/Navbar.tsx b/web/src/components/ui/navigation/Navbar.tsx index a921e43..fad9cc8 100644 --- a/web/src/components/ui/navigation/Navbar.tsx +++ b/web/src/components/ui/navigation/Navbar.tsx @@ -4,6 +4,8 @@ import ConnectWallet from "./ConnectWallet" import { useColorMode } from "../color-mode" import appStore from "../../../store" import { useStore } from "zustand" +import VersionAndDisclaimer from "./VersionAndDisclaimer" +import { Link } from "react-router-dom" const Navbar = () => { @@ -14,8 +16,11 @@ const Navbar = () => {
- + + + + { metamaskAddress ? ( diff --git a/web/src/components/ui/navigation/VersionAndDisclaimer.tsx b/web/src/components/ui/navigation/VersionAndDisclaimer.tsx new file mode 100644 index 0000000..6ee78ac --- /dev/null +++ b/web/src/components/ui/navigation/VersionAndDisclaimer.tsx @@ -0,0 +1,105 @@ +import { useEffect, useState } from "react"; +import { Button } from "../button"; +import { DialogBody, DialogCloseTrigger, DialogContent, DialogHeader, DialogTitle, DialogTrigger } from "../dialog"; +import { Center, Dialog, Spacer, Text, VStack } from "@chakra-ui/react"; +import { LuMessageCircleWarning } from "react-icons/lu"; +import { useStore } from 'zustand' +import appStore from '../../../store' +import { Alert } from "../alert"; +import axios from "axios"; +import { toaster } from "../toaster"; +import VersionDetails from "../../../models/VersionDetails"; + +export default function VersionAndDisclaimer() { + // const { es, avatar, setAvatar, setUserProfile, backend_url } = useStore(appStore); + + const { backend_url } = useStore(appStore) + + const [isOpen, setIsOpen] = useState(false); + const [versionDetails, setVersionDetails] = useState({ + backend: "1.2.X", + frontend: "1.2.X" + }); + + + + + + + const fetchVersionDetails = async () => { + try { + const url = `${backend_url}/version` + + const response = await axios.get(url) + + const res: VersionDetails = await response.data; + + + if (response.status === 200) { + setVersionDetails(res) + } + } catch (e: unknown) { + console.log("Error fetching version ", e) + toaster.create({ + description: "Error fetching version details", + type: "error" + }) + } + } + + useEffect(() => { + if(!backend_url.includes("0.0.0.0")){ + fetchVersionDetails() + } + + },[backend_url]) + + return ( + setIsOpen(details.open)}> + + + + + + + Product Infomation + + + + + + +
+ Product Verion Details +
+ Container Api Version : {versionDetails.backend} + Container Web Version : {versionDetails.frontend} + + + + This is prototype software,use it with caution. + + +
+ +
+ +
+
+ ); +} diff --git a/web/src/components/ui/table.tsx b/web/src/components/ui/table.tsx index 009e28c..84c12db 100644 --- a/web/src/components/ui/table.tsx +++ b/web/src/components/ui/table.tsx @@ -58,7 +58,7 @@ const FilesTable = () => { {item.name} {getFileCategory(item.extension)} - {timeToHumanFriendly(getTimestampSafe(JSON.parse(item.page_data)))} + {timeToHumanFriendly(getTimestampSafe(JSON.parse(item.page_data)), true)} diff --git a/web/src/models/VersionDetails.ts b/web/src/models/VersionDetails.ts new file mode 100644 index 0000000..2296bee --- /dev/null +++ b/web/src/models/VersionDetails.ts @@ -0,0 +1,5 @@ +export default interface VersionDetails{ + backend: string, + frontend: string, + +} \ No newline at end of file diff --git a/web/src/models/revision_merge.ts b/web/src/models/revision_merge.ts new file mode 100644 index 0000000..c3865c2 --- /dev/null +++ b/web/src/models/revision_merge.ts @@ -0,0 +1,39 @@ +export interface Divergence { + index: number + existingRevisionHash: string | null + upcomingRevisionHash: string | null +} + +export interface RevisionsComparisonResult { + divergences: Divergence[] + mergedArray: string[] + identical: boolean + sameLength: boolean + existingRevisionsLength: number + upcomingRevisionsLength: number + lastIdenticalRevisionHash: string | null +} + + +// const comparisonResult: RevisionsComparisonResult = { +// divergences: [ +// { +// index: 6, +// existingRevisionHash: null, +// upcomingRevisionHash: 'e7719544c0ff396e3edc1dda2b784d44f03ceb73410a471f5b37091f5e43be19d0cb654045906ef6b361b499a771d88f56067acb4afa6c0949c384c421f8e51e' +// } +// ], +// mergedArray: [ +// 'e3839fff23f468300b65a9be15a452aa160c1ccbe91d657d2d73767100711bb0e97f29fb7949de6aced5a73d1278e6227b8b20225050025fd6af6b8cb6ebb25f', +// 'd32a796eb95848ffc2efbc83466e94d838218d4733d41b4f15ce134d443cea4b05b7395674a6d9926b60056cf776e34ea292302a5885606fb1064d5ff5014ad1', +// '2a3d96625db20c3a64b884a41a90af24716e177365647b25163fefd71b85cb285543b0450b1faf8edf93dff69de5d71adc4ee8adf999eb4c4ad3cec7d61973f0', +// '20a2a9ba0178a4a8d8d4f251440e71ac1d8f958c518b5eb6d7be020d58d04ef9640cb69c2e65d33efc736df976024b3663a3684f0bd4b85674fa80c5944b65c4', +// '911c4d27936f6641213a6368541662fdb57ef600c09a7e574be3beb00b56449dcf698e5385af80ac6abbf4b2a5ff0f38c75bdf3ceda6556dbadf3fe4a0341cef', +// '4ebd035c34329a227e4962c4b16ff77db0c380c6e8a1de232df5b1fd956165c826e36180d799039274bac3db8c5be5f583ac04acb38abc29e9f51c2efb4bf8a0', +// 'e7719544c0ff396e3edc1dda2b784d44f03ceb73410a471f5b37091f5e43be19d0cb654045906ef6b361b499a771d88f56067acb4afa6c0949c384c421f8e51e' +// ], +// identical: false, +// sameLength: false, +// existingRevisionsLength: 7, +// upcomingRevisionsLength: 6 +// }; \ No newline at end of file diff --git a/web/src/pages/SharePage.tsx b/web/src/pages/SharePage.tsx index 2f32228..f3c7e84 100644 --- a/web/src/pages/SharePage.tsx +++ b/web/src/pages/SharePage.tsx @@ -6,12 +6,12 @@ import axios from 'axios' import { ApiFileInfo } from '../models/FileInfo' import { toaster } from '../components/ui/toaster' import Loading from 'react-loading' -import { Box, Card, Center, Container, Group, VStack } from '@chakra-ui/react' +import { Box, Card, Center, Collapsible, Container, Group, VStack } from '@chakra-ui/react' import ChainDetails from '../components/ui/navigation/CustomDrawer' import FilePreview from '../components/FilePreview' -import ConnectWallet from '../components/ui/navigation/ConnectWallet' import { ImportAquaChainFromChain } from '../components/dropzone_file_actions' import { Alert } from '../components/ui/alert' +import { LuChevronUp, LuChevronDown } from 'react-icons/lu' const SharePage = () => { const { backend_url, metamaskAddress } = useStore(appStore) @@ -19,6 +19,7 @@ const SharePage = () => { const [loading, setLoading] = useState(false) const [hasError, setHasError] = useState(null); const [isVerificationSuccesful, setIsVerificationSuccessful] = useState(false) + const [showMoreDetails, setShowMoreDetails] = useState(false) const params = useParams() @@ -42,15 +43,15 @@ const SharePage = () => { setLoading(false) } catch (error: any) { - if (error.response.status == 404){ + if (error.response.status == 404) { setHasError(`File could not be found (probably it was deleted)`); - }else{ + } else { setHasError(`Error : ${error}`); } - console.error(error) ; + console.error(error); + + - - toaster.create({ description: `Error fetching data`, type: 'error' @@ -71,10 +72,10 @@ const SharePage = () => { // description: "Sign In is required", // type: "info" // }) - // } + // } }, []) - const showProperWidget = () => { + const showProperWidget = () => { if (hasError) { return
@@ -96,12 +97,13 @@ const SharePage = () => { } { fileInfo ? ( - + { !metamaskAddress ? ( - + // + ) : ( ) @@ -115,7 +117,25 @@ const SharePage = () => { - setIsVerificationSuccessful(res)} /> + {/* setIsVerificationSuccessful(res)} /> */} + + + + + + + setShowMoreDetails(open => !open)} cursor={'pointer'}> + : } /> + + + setIsVerificationSuccessful(res)} /> + + + + + + + diff --git a/web/src/utils/aqua_funcs.ts b/web/src/utils/aqua_funcs.ts new file mode 100644 index 0000000..14743f3 --- /dev/null +++ b/web/src/utils/aqua_funcs.ts @@ -0,0 +1,41 @@ +import { Divergence, RevisionsComparisonResult } from "../models/revision_merge"; + +export function analyzeAndMergeRevisions(existingRevisions: string[], upcomingRevisions: string[]): RevisionsComparisonResult { + // Sort the arrays + const sortedExistingRevisions = [...existingRevisions] //.sort(); + const sortedUpcomingRevisions = [...upcomingRevisions] //.sort(); + + // Check for divergence + const divergences: Divergence[] = []; + const maxLength = Math.max(sortedExistingRevisions.length, sortedUpcomingRevisions.length); + + let lastIdenticalRevision: string | null = null; + + for (let i = 0; i < maxLength; i++) { + const existingRevisionHash = sortedExistingRevisions[i] || null; + const upcomingRevisionHash = sortedUpcomingRevisions[i] || null; + + if (existingRevisionHash !== upcomingRevisionHash) { + divergences.push({ index: i, existingRevisionHash, upcomingRevisionHash }); + } else if (existingRevisionHash && upcomingRevisionHash) { + lastIdenticalRevision = existingRevisionHash; // Update if they match + } + } + // Merge arrays without duplicates + const mergedArray = Array.from(new Set([...sortedExistingRevisions, ...sortedUpcomingRevisions])); + + // Properties to check + const identical = divergences.length === 0; // True if no divergences + const sameLength = sortedExistingRevisions.length === sortedUpcomingRevisions.length; + + // Return results + return { + divergences, + mergedArray, + identical, + sameLength, + existingRevisionsLength: sortedExistingRevisions.length, + upcomingRevisionsLength: sortedUpcomingRevisions.length, + lastIdenticalRevisionHash: lastIdenticalRevision, + }; +} \ No newline at end of file diff --git a/web/src/utils/functions.ts b/web/src/utils/functions.ts index aed8bc9..d3f4b91 100644 --- a/web/src/utils/functions.ts +++ b/web/src/utils/functions.ts @@ -207,10 +207,11 @@ export function getTimestampSafe(pageData: PageData): string | null { return pageData.pages[0]?.revisions[Object.keys(pageData.pages[0]?.revisions || {})[0]]?.metadata.time_stamp; } -export function timeToHumanFriendly(timestamp: string | undefined): string { +export function timeToHumanFriendly(timestamp: string | undefined, showFull: boolean = false): string { if (!timestamp) { - return '-' + return '-'; } + // Extract the date components const year = timestamp.substring(0, 4); const month = Number(timestamp.substring(4, 6)) - 1; // Months are zero-indexed in JS @@ -222,12 +223,23 @@ export function timeToHumanFriendly(timestamp: string | undefined): string { // Create a new Date object const date = new Date(Date.UTC(Number(year), month, Number(day), Number(hours), Number(minutes), Number(seconds))); - // Format the date in a human-friendly way - const options: Intl.DateTimeFormatOptions = { year: 'numeric', month: 'short', day: 'numeric' }; - return date.toLocaleDateString('en-US', options) + // Format options + const dateOptions: Intl.DateTimeFormatOptions = { year: 'numeric', month: 'short', day: 'numeric' }; + const fullOptions: Intl.DateTimeFormatOptions = { + ...dateOptions, + hour: '2-digit', + minute: '2-digit', + second: '2-digit', + hour12: false, + timeZone: 'UTC', + }; + + // Return formatted string based on showFull + return date.toLocaleDateString('en-US', showFull ? fullOptions : dateOptions); } + export const getLastRevisionVerificationHash = (pageData: PageData) => { const revisionHashes = Object.keys(pageData.pages[0].revisions) return pageData.pages[0].revisions[revisionHashes[revisionHashes.length - 1]].metadata.verification_hash