diff --git a/README.md b/README.md index e05598f483..508cb7546f 100644 --- a/README.md +++ b/README.md @@ -118,37 +118,35 @@ cargo run --bin safe --features local-discovery -- files download ### Folders -The folders storage capability can be demonstrated by uploading folders to the local network, making changes and syncing them with the stored version on the network, as well as downloading the entire folders hierarchy onto a local directory. +The folders storage capability can be demonstrated by storing folders on the network, making changes and syncing them with the stored version on the network, as well as downloading the entire folders hierarchy onto a local directory. All the following commands act on the current directory by default, but since we are building the CLI binary to run it, we will have to always provide the directory we want them to act as a path argument. When otherwise running directly an already built CLI binary, we can simply make sure we are located at the directory we want to act on without the need of providing the path as argument. -Upload a directory: +Initialise a directory to then be able to track changes made on it, and sync them up with the network: ```bash -cargo run --bin safe --features local-discovery -- folders upload +cargo run --bin safe --features local-discovery -- folders init ``` Make sure you made a backup copy of the "recovery secret" generated by the above command, or the one you have provided when prompted. -After it finishes uploading the complete directory, with files and sub-directories, it will show the network address where the main directory can be pulled from. - -Now you can download the folders onto any other path by providing it as the target directory path (you will be prompted to enter the "recovery secret"): +If any changes are now made to files or directories within this folder (at this point all files and folders are considered new since it has just been initalised for tracking), before trying to push those changes to the network, we can get a report of the changes that have been made locally: ```bash -cargo run --bin safe --features local-discovery -- folders download +cargo run --bin safe --features local-discovery -- folders status ``` -If any changes are now made to files or directories within this folder, before trying to push those changes to the network, we can get a report of the changes that have been made locally: +We can now push all local changes made to files and directories to the network, as well as pull any changes that could have been made to the version stored on the network since last time we synced with it: ```bash -cargo run --bin safe --features local-discovery -- folders status +cargo run --bin safe --features local-discovery -- folders sync ``` -We can both push local changes made to files and directories to the network, as well as pull any changes that have been made to the version stored on the network since last time we synced with it: +Now that's all stored on the network, you can download the folders onto any other path by providing it as the target directory to the following command (you will be prompted to enter the "recovery secret" you obtained when initialising the directory with `init` command): ```bash -cargo run --bin safe --features local-discovery -- folders sync +cargo run --bin safe --features local-discovery -- folders download ``` ### Token Transfers diff --git a/sn_cli/src/bin/subcommands/folders.rs b/sn_cli/src/bin/subcommands/folders.rs index 90e3eb84e3..d0cbf92176 100644 --- a/sn_cli/src/bin/subcommands/folders.rs +++ b/sn_cli/src/bin/subcommands/folders.rs @@ -32,25 +32,6 @@ pub enum FoldersCmds { #[clap(name = "recovery key")] root_sk: Option, }, - Upload { - /// The location of the file(s) to upload for creating the folder on the network. - /// By default the current path is assumed. - #[clap(name = "path", value_name = "PATH")] - path: Option, - /// The batch_size to split chunks into parallel handling batches - /// during payment and upload processing. - #[clap(long, default_value_t = BATCH_SIZE, short='b')] - batch_size: usize, - /// Should the files be made accessible to all. (This is irreversible) - #[clap(long, name = "make_public", default_value = "false", short = 'p')] - make_data_public: bool, - /// Set the strategy to use on chunk upload failure. Does not modify the spend failure retry attempts yet. - /// - /// Choose a retry strategy based on effort level, from 'quick' (least effort), through 'balanced', - /// to 'persistent' (most effort). - #[clap(long, default_value_t = RetryStrategy::Balanced, short = 'r', help = "Sets the retry strategy on upload failure. Options: 'quick' for minimal effort, 'balanced' for moderate effort, or 'persistent' for maximum effort.")] - retry_strategy: RetryStrategy, - }, Download { /// The full local path where to download the folder. By default the current path is assumed, /// and the main Folder's network address will be used as the folder name. @@ -110,31 +91,6 @@ pub(crate) async fn folders_cmds( let acc_packet = AccountPacket::init(client.clone(), root_dir, &path, &root_sk, None)?; println!("Directoy at {path:?} initialised as a root Folder, ready to track and sync changes with the network at address: {}", acc_packet.root_folder_addr().to_hex()) } - FoldersCmds::Upload { - path, - batch_size, - make_data_public, - retry_strategy, - } => { - let path = get_path(path, None)?; - // initialise path as a fresh new Folder with a network address derived from a random SK - let root_sk = get_recovery_secret_sk(None, true)?; - let mut acc_packet = - AccountPacket::init(client.clone(), root_dir, &path, &root_sk, None)?; - - let options = UploadCfg { - verify_store, - batch_size, - retry_strategy, - ..Default::default() - }; - acc_packet.sync(options, make_data_public).await?; - - println!( - "\nFolder hierarchy from {path:?} uploaded successfully at {}", - acc_packet.root_folder_addr().to_hex() - ); - } FoldersCmds::Download { path, root_sk,