Skip to content

Commit

Permalink
Merge pull request #14 from inblockio/db_fixes
Browse files Browse the repository at this point in the history
Db fixes
  • Loading branch information
Arthur-Kamau authored Dec 16, 2024
2 parents bd8e68c + 275e029 commit 606e753
Show file tree
Hide file tree
Showing 13 changed files with 100 additions and 82 deletions.
2 changes: 1 addition & 1 deletion migrations/2024-12-11-113924_share/up.sql
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
-- Your SQL goes here
CREATE TABLE IF NOT EXISTS share_data (
id INTEGER PRIMARY KEY AUTOINCREMENT,
file_name TEXT NOT NULL,
file_id INTEGER NOT NULL,
identifier TEXT NOT NULL,
created_time TEXT NOT NULL
);
27 changes: 14 additions & 13 deletions src/controllers/api_controller.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use crate::models::input::{DeleteInput, RevisionInput, UpdateConfigurationInput, WitnessInput};
use crate::models::page_data::{ApiResponse, PageDataContainer};
use crate::models::PagesDataTable;
use crate::models::{file::FileInfo, page_data};
use crate::util::{
check_if_page_data_revision_are_okay, check_or_generate_domain, compute_content_hash,
Expand Down Expand Up @@ -39,7 +40,7 @@ use futures::{Stream, TryStreamExt};
use serde::{Deserialize, Serialize};
extern crate serde_json_path_to_error as serde_json;
use crate::db::pages_db::{
db_data, delete_all_data, delete_all_user_files, delete_page_data,
delete_all_data, delete_all_user_files, delete_page_data,
fetch_all_pages_data_per_user, fetch_page_data, insert_page_data, update_page_data,
};
use dotenv::{dotenv, vars};
Expand Down Expand Up @@ -560,7 +561,7 @@ pub async fn explorer_aqua_file_upload(
}
};

let db_data_model = db_data {
let db_data_model = PagesDataTable {
id: 0,
name: file_name,
extension: content_type,
Expand Down Expand Up @@ -836,7 +837,7 @@ pub async fn explorer_file_upload(
mode = file_mode;
}

let db_data_model = db_data {
let db_data_model = PagesDataTable {
id: 0,
name: file_name,
extension: content_type,
Expand Down Expand Up @@ -888,7 +889,7 @@ pub async fn explorer_sign_revision(
tracing::debug!("explorer_sign_revision");

// Get the name parameter from the input
if input.filename.is_empty() {
if input.file_id == 0 {
log_data.push("Error : file name is empty".to_string());
let res: ApiResponse = ApiResponse {
logs: log_data,
Expand Down Expand Up @@ -916,7 +917,7 @@ pub async fn explorer_sign_revision(
};

// let insert_result = insert_page_data(db_data_model.clone(), & mut conn);
let page_data_result = fetch_page_data(input.filename, &mut conn);
let page_data_result = fetch_page_data(input.file_id, &mut conn);

if page_data_result.is_err() {
tracing::error!("Failed not found ",);
Expand Down Expand Up @@ -1219,7 +1220,7 @@ pub async fn explorer_delete_file(
let mut log_data: Vec<String> = Vec::new();

// Get the name parameter from the input
if input.filename.is_empty() {
if input.file_id == 0 {
log_data.push("Error : file name is empty".to_string());
let res: ApiResponse = ApiResponse {
logs: log_data,
Expand Down Expand Up @@ -1250,13 +1251,13 @@ pub async fn explorer_delete_file(
}
};

let result = delete_page_data(input.filename.clone(), &mut conn);
let result = delete_page_data(input.file_id, &mut conn);

match result {
Ok(result_data) => {
// Check the number of affected rows
if result_data > 0 {
tracing::error!("Successfully deleted the row with name: {}", input.filename);
tracing::error!("Successfully deleted the row with name: {}", input.file_id);
log_data.push("Error : file data is deleted ".to_string());
let res: ApiResponse = ApiResponse {
logs: log_data,
Expand All @@ -1265,9 +1266,9 @@ pub async fn explorer_delete_file(
};
return (StatusCode::OK, Json(res));
} else {
tracing::error!("No row found with ID: {}", input.filename);
tracing::error!("No row found with ID: {}", input.file_id);

log_data.push(format!("Error : No row found with name {}", input.filename));
log_data.push(format!("Error : No row found with name {}", input.file_id));
let res: ApiResponse = ApiResponse {
logs: log_data,
file: None,
Expand Down Expand Up @@ -1297,7 +1298,7 @@ pub async fn explorer_witness_file(
let mut log_data: Vec<String> = Vec::new();

// Get the name parameter from the input
if input.filename.is_empty() {
if input.file_id == 0 {
log_data.push("Error : file name is empty".to_string());
let res: ApiResponse = ApiResponse {
logs: log_data,
Expand Down Expand Up @@ -1336,7 +1337,7 @@ pub async fn explorer_witness_file(
}
};

let page_data_result = fetch_page_data(input.filename.clone(), &mut conn);
let page_data_result = fetch_page_data(input.file_id, &mut conn);

if page_data_result.is_err() {
tracing::error!("Failed not found {}", page_data_result.err().unwrap());
Expand All @@ -1355,7 +1356,7 @@ pub async fn explorer_witness_file(

log_data.push(format!(
"Success : Page data for {} not found in database",
input.filename
input.file_id
));

// Deserialize page data
Expand Down
17 changes: 12 additions & 5 deletions src/controllers/share_controller.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use crate::models::page_data::ApiResponse;
use crate::{db::share::fetch_share_data_by_address, models::ShareDataTable};

use crate::db::pages_db::{
db_data, delete_all_data, delete_all_user_files, delete_page_data,
delete_all_data, delete_all_user_files, delete_page_data,
fetch_all_pages_data_per_user, fetch_page_data, insert_page_data, update_page_data,
};
use crate::models::share_data::{CreateShareData, ShareDataResponse};
Expand Down Expand Up @@ -70,7 +70,13 @@ pub async fn get_share_data(

let firs_share_payload_data = share_payload_data.first().unwrap();

let page_data_result = fetch_page_data(firs_share_payload_data.file_name.clone(), &mut conn);

if firs_share_payload_data.id.is_none(){
log_data.push("Error id not found in system".to_string());
return (StatusCode::NOT_FOUND, Json(res));
}

let page_data_result = fetch_page_data(firs_share_payload_data.id.unwrap(), &mut conn);

if page_data_result.is_err() {
tracing::error!("Failed not found ",);
Expand Down Expand Up @@ -102,7 +108,7 @@ pub async fn save_share_data(
};

// Get the name parameter from the input
if input.filename.is_empty() {
if input.file_id == 0 {
res.logs.push("Error : file name is empty".to_string());

return (StatusCode::BAD_REQUEST, Json(res));
Expand Down Expand Up @@ -132,7 +138,7 @@ pub async fn save_share_data(

// confirm if page data exist

let page_data_result = fetch_page_data(input.filename.clone(), &mut conn);
let page_data_result = fetch_page_data(input.file_id.clone(), &mut conn);

if page_data_result.is_err() {
tracing::error!("Failed not found ",);
Expand All @@ -148,10 +154,11 @@ pub async fn save_share_data(
let time_data = current_utc.format("%Y-%m-%d %H:%M:%S UTC");
let time_data_str = format!("{:?}", time_data);
println!("Custom format 1: {} str {}", time_data, time_data_str);

// insert share data to db
let share_payload = ShareDataTable {
id: None,
file_name: input.filename.clone(),
file_id: input.file_id.clone(),
identifier: input.identifier.clone(),
created_time: time_data_str,
};
Expand Down
2 changes: 1 addition & 1 deletion src/controllers/user_profile_controller.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ use aqua_verifier::util::{
};
extern crate serde_json_path_to_error as serde_json;
use crate::db::pages_db::{
db_data, delete_all_data, delete_page_data, fetch_page_data, insert_page_data, update_page_data,
delete_all_data, delete_page_data, fetch_page_data, insert_page_data, update_page_data,
};
use dotenv::{dotenv, vars};
use sha3::{Digest, Sha3_512};
Expand Down
38 changes: 15 additions & 23 deletions src/db/pages_db.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use crate::models::{PagesTable, DB_POOL};
use crate::models::{PagesDataTable, PagesTable, DB_POOL};
use chrono::{NaiveDateTime, Utc};
use diesel::r2d2::{self, ConnectionManager, PooledConnection};
use diesel::SqliteConnection;
Expand All @@ -8,19 +8,10 @@ use diesel::prelude::*;
use diesel::prelude::*;
use diesel::result::Error as DieselError;

#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct db_data {
pub id: i64,
pub name: String,
pub extension: String,
pub page_data: String,
pub mode: String,
pub owner: String,
pub is_shared: bool
}


pub fn insert_page_data(
data: db_data,
data: PagesDataTable,
db_connection: &mut PooledConnection<ConnectionManager<SqliteConnection>>,
) -> Result<i64, String> {
// Use Utc to get the current time as a NaiveDateTime
Expand Down Expand Up @@ -48,23 +39,23 @@ pub fn insert_page_data(
Ok(inserted_id as i64)
}

// The existing db_data and PagesTable structs remain the same
// The existing PagesDataTable and PagesTable structs remain the same

pub fn fetch_page_data(
filename: String,
id_par: i32,
db_connection: &mut PooledConnection<ConnectionManager<SqliteConnection>>,
) -> Result<db_data, String> {
) -> Result<PagesDataTable, String> {
use crate::schema::pages::dsl::*;

let result = pages
.filter(name.eq(&filename))
.filter(id.eq(&id_par))
.first::<PagesTable>(db_connection)
.map_err(|e| match e {
DieselError::NotFound => format!("No page found with filename: {:#?}", filename),
DieselError::NotFound => format!("No page found with id: {:#?}", id_par),
_ => format!("Error fetching page data: {}", e),
})?;

Ok(db_data {
Ok(PagesDataTable {
id: result.id.unwrap_or(0) as i64,
name: result.name,
extension: result.extension,
Expand All @@ -83,10 +74,10 @@ pub fn fetch_all_pages_data_per_user(

let results = pages
.filter(owner.eq(&user))
.load::<PagesTable>(db_connection) // Replace `db_data` with the correct Diesel model type
.load::<PagesTable>(db_connection) // Replace `PagesDataTable` with the correct Diesel model type
.map_err(|e| format!("Error fetching pages for user {}: {}", user, e))?;

// Ok(db_data {
// Ok(PagesDataTable {
// id: result.id.unwrap_or(0) as i64,
// name: result.name,
// extension: result.extension,
Expand All @@ -98,7 +89,7 @@ pub fn fetch_all_pages_data_per_user(
}

pub fn update_page_data(
data: db_data,
data: PagesDataTable,
db_connection: &mut PooledConnection<ConnectionManager<SqliteConnection>>,
) -> Result<(), String> {
use crate::schema::pages::dsl::*;
Expand All @@ -109,6 +100,7 @@ pub fn update_page_data(
page_data.eq(&data.page_data),
mode.eq(&data.mode),
owner.eq(&data.owner),
name.eq(&data.name),
is_shared.eq(&data.is_shared)
))
.execute(db_connection)
Expand All @@ -118,12 +110,12 @@ pub fn update_page_data(
}

pub fn delete_page_data(
page_name: String,
file_id: i32,
db_connection: &mut PooledConnection<ConnectionManager<SqliteConnection>>,
) -> Result<i8, String> {
use crate::schema::pages::dsl::*;

let deleted_count = diesel::delete(pages.filter(name.eq(&page_name)))
let deleted_count = diesel::delete(pages.filter(id.eq(&file_id)))
.execute(db_connection)
.map_err(|e| format!("Error deleting page data: {}", e))?;

Expand Down
1 change: 0 additions & 1 deletion src/db/share.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ pub fn insert_share_data(
}

pub fn fetch_all_share_data(

db_connection: &mut PooledConnection<ConnectionManager<SqliteConnection>>
) -> Result<Vec<ShareDataTable>, String> {
use crate::schema::share_data::dsl::*;
Expand Down
6 changes: 3 additions & 3 deletions src/models/input.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use serde::{Deserialize, Serialize};
#[derive(Deserialize, Serialize, Debug)]
#[allow(dead_code)]
pub struct RevisionInput {
pub filename: String,
pub file_id: i32,
pub signature: String,
pub publickey: String,
pub wallet_address: String,
Expand All @@ -12,7 +12,7 @@ pub struct RevisionInput {
#[derive(Deserialize, Serialize, Debug)]
#[allow(dead_code)]
pub struct WitnessInput {
pub filename: String,
pub file_id: i32,
pub tx_hash: String,
pub wallet_address: String,
pub network: String,
Expand All @@ -21,7 +21,7 @@ pub struct WitnessInput {
#[derive(Deserialize, Serialize, Debug, Clone)]
#[allow(dead_code)]
pub struct DeleteInput {
pub filename: String,
pub file_id: i32,

}

Expand Down
13 changes: 12 additions & 1 deletion src/models/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,17 @@ pub struct PagesTable {
pub is_shared : bool
}

#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct PagesDataTable {
pub id: i64,
pub name: String,
pub extension: String,
pub page_data: String,
pub mode: String,
pub owner: String,
pub is_shared: bool
}

#[derive(Queryable, Selectable, Deserialize, Serialize, Debug, Clone, Insertable)]
#[diesel(table_name = crate::schema::siwe_sessions)]
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
Expand All @@ -49,7 +60,7 @@ pub struct SiweSessionsTable {
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
pub struct ShareDataTable {
pub id: Option<i32>,
pub file_name: String,
pub file_id: i32,
pub identifier: String,
pub created_time: String,
}
Expand Down
7 changes: 3 additions & 4 deletions src/models/share_data.rs
Original file line number Diff line number Diff line change
@@ -1,21 +1,20 @@
use serde::{Deserialize, Serialize};

use crate::db::pages_db::db_data;

use super::{file::FileInfo, ShareDataTable};
use super::{file::FileInfo, PagesDataTable, ShareDataTable};

#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ShareDataResponse{

pub logs : Vec<String>,
pub share_data : Option<ShareDataTable>,
pub file_data : Option<db_data>,
pub file_data : Option<PagesDataTable>,

}

#[derive(Deserialize, Serialize, Debug, Clone)]
#[allow(dead_code)]
pub struct CreateShareData {
pub filename: String,
pub file_id : i32,
pub identifier : String
}
2 changes: 1 addition & 1 deletion src/schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ diesel::table! {
diesel::table! {
share_data (id) {
id -> Nullable<Integer>,
file_name -> Text,
file_id -> Integer,
identifier -> Text,
created_time -> Text,
}
Expand Down
Loading

0 comments on commit 606e753

Please sign in to comment.