diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index b110d15..db97644 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -24,9 +24,8 @@ jobs: toolchain: stable override: true - - run: cargo publish --features full --token ${CRATES_TOKEN} - env: - CRATES_TOKEN: ${{ secrets.CRATES_TOKEN }} + - name: Publish on crates.io + run: cargo publish --features full --token ${{ secrets.CRATES_TOKEN }} check-publish: name: Check Publish runs-on: ubuntu-latest @@ -42,6 +41,5 @@ jobs: toolchain: stable override: true - - run: cargo publish --features cli --token ${CRATES_TOKEN} --dry-run -v - env: - CRATES_TOKEN: ${{ secrets.CRATES_TOKEN }} + - name: Check if can publish on crates.io + run: cargo publish --features cli --token ${{ secrets.CRATES_TOKEN }} --dry-run -v diff --git a/Cargo.lock b/Cargo.lock index b7939ed..24ece07 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -258,7 +258,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn", + "syn 2.0.79", ] [[package]] @@ -325,7 +325,7 @@ checksum = "c8936e42f9b4f5bdfaf23700609ac1f11cb03ad4c1ec128a4ee4fd0903e228db" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.79", ] [[package]] @@ -447,6 +447,18 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "enum_dispatch" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa18ce2bc66555b3218614519ac839ddb759a7d6720732f979ef8d13be147ecd" +dependencies = [ + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.79", +] + [[package]] name = "equivalent" version = "1.0.1" @@ -564,7 +576,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.79", ] [[package]] @@ -813,8 +825,10 @@ dependencies = [ "clap_complete", "codspeed-criterion-compat", "criterion", + "enum_dispatch", "futures", "is-terminal", + "lifetime", "predicates", "pulldown-cmark", "reqwest", @@ -839,6 +853,36 @@ version = "0.2.159" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "561d97a539a36e26a9a5fad1ea11a3039a67714694aaa379433e580854bc3dc5" +[[package]] +name = "lifetime" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b9ac39b3413a5c091fc208f8eb84d9bdb35b6ef8c84570bc2f6fa47a3ba948e" +dependencies = [ + "lifetime_proc_macros", +] + +[[package]] +name = "lifetime_proc_macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb3008bffdf1aaae933e04c9019963a422d495bd0e3913db40eb7078883df30a" +dependencies = [ + "lifetime_proc_macros_impl", + "syn 1.0.109", +] + +[[package]] +name = "lifetime_proc_macros_impl" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d9b75338d55f6248964c017967525a03d48d3f3092701c2ec3c027f3c60514b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "linux-raw-sys" version = "0.4.14" @@ -970,7 +1014,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.79", ] [[package]] @@ -1339,7 +1383,7 @@ checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.79", ] [[package]] @@ -1409,6 +1453,17 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + [[package]] name = "syn" version = "2.0.79" @@ -1502,7 +1557,7 @@ checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.79", ] [[package]] @@ -1554,7 +1609,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.79", ] [[package]] @@ -1773,7 +1828,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn", + "syn 2.0.79", "wasm-bindgen-shared", ] @@ -1807,7 +1862,7 @@ checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.79", "wasm-bindgen-backend", "wasm-bindgen-shared", ] diff --git a/Cargo.toml b/Cargo.toml index 890062c..f58857a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,7 +12,9 @@ required-features = ["cli"] annotate-snippets = {version = "^0.9.1", optional = true} clap = {version = "^4.5.18", features = ["cargo", "derive", "env", "wrap_help"], optional = true} clap_complete = {version = "^4.5.2", optional = true} +enum_dispatch = {version = "0.3.13", optional = true} is-terminal = {version = "0.4.3", optional = true} +lifetime = {version = "0.1.0", features = ["macros"]} pulldown-cmark = {version = "0.10.2", optional = true} reqwest = {version = "^0.11", default-features = false, features = ["json"]} serde = {version = "^1.0", features = ["derive"]} @@ -33,7 +35,7 @@ tokio = {version = "^1.0", features = ["macros"]} [features] annotate = ["dep:annotate-snippets"] -cli = ["annotate", "color", "dep:clap", "dep:is-terminal", "multithreaded"] +cli = ["annotate", "color", "dep:clap", "dep:enum_dispatch", "dep:is-terminal", "multithreaded"] cli-complete = ["cli", "clap_complete"] color = ["annotate-snippets?/color", "dep:termcolor"] default = ["cli", "native-tls"] @@ -61,7 +63,7 @@ license = "MIT" name = "languagetool-rust" readme = "README.md" repository = "https://github.com/jeertmans/languagetool-rust" -rust-version = "1.74.0" +rust-version = "1.75.0" version = "2.1.4" [package.metadata.docs.rs] diff --git a/README.md b/README.md index f591fd8..b6d94aa 100644 --- a/README.md +++ b/README.md @@ -154,13 +154,14 @@ languagetool-rust = "^2.1" ```rust use languagetool_rust::api::{check, server::ServerClient}; +use std::borrow::Cow; #[tokio::main] async fn main() -> Result<(), Box> { let client = ServerClient::from_env_or_default(); let req = check::Request::default() - .with_text("Some phrase with a smal mistake".to_string()); // # codespell:ignore smal + .with_text("Some phrase with a smal mistake"); // # codespell:ignore smal println!( "{}", diff --git a/benches/benchmarks/check_texts.rs b/benches/benchmarks/check_texts.rs index 04d645f..f0a6f6e 100644 --- a/benches/benchmarks/check_texts.rs +++ b/benches/benchmarks/check_texts.rs @@ -14,7 +14,7 @@ static FILES: [(&str, &str); 3] = [ ("large", include_str!("../large.txt")), ]; -async fn request_until_success(req: &Request, client: &ServerClient) -> Response { +async fn request_until_success<'source>(req: &Request<'source>, client: &ServerClient) -> Response { loop { match client.check(req).await { Ok(resp) => return resp, @@ -34,7 +34,7 @@ async fn check_text_basic(text: &str) -> Response { "Please use a local server for benchmarking, and configure the environ variables to use \ it.", ); - let req = Request::default().with_text(text.to_string()); + let req = Request::default().with_text(text); request_until_success(&req, &client).await } diff --git a/rustfmt.toml b/rustfmt.toml index 4c2e3c4..10fcdbd 100644 --- a/rustfmt.toml +++ b/rustfmt.toml @@ -1,4 +1,5 @@ condense_wildcard_suffixes = true +edition = "2021" # error_on_line_overflow = true # error_on_unformatted = true force_multiline_blocks = true @@ -9,5 +10,4 @@ imports_granularity = "Crate" match_block_trailing_comma = true normalize_doc_attributes = true unstable_features = true -version = "Two" wrap_comments = true diff --git a/src/api/check.rs b/src/api/check.rs index 9bce829..6d8f2ba 100644 --- a/src/api/check.rs +++ b/src/api/check.rs @@ -1,7 +1,6 @@ //! Structures for `check` requests and responses. -#[cfg(feature = "cli")] -use std::path::PathBuf; +use std::{borrow::Cow, marker::PhantomData, mem, ops::Deref}; #[cfg(feature = "annotate")] use annotate_snippets::{ @@ -9,12 +8,13 @@ use annotate_snippets::{ snippet::{Annotation, AnnotationType, Slice, Snippet, SourceAnnotation}, }; #[cfg(feature = "cli")] -use clap::{Args, Parser, ValueEnum}; +use clap::ValueEnum; +use lifetime::IntoStatic; use serde::{Deserialize, Serialize, Serializer}; use crate::error::{Error, Result}; -/// Requests +// REQUESTS /// Parse `v` is valid language code. /// @@ -123,65 +123,79 @@ where } } -#[derive(Clone, Debug, Deserialize, PartialEq, Eq, Serialize, Hash)] +/// A portion of text to be checked. +#[derive(Clone, Debug, Deserialize, PartialEq, Eq, Serialize, Hash, IntoStatic)] #[non_exhaustive] #[serde(rename_all = "camelCase")] -/// A portion of text to be checked. -pub struct DataAnnotation { - /// If set, the markup will be interpreted as this. - #[serde(skip_serializing_if = "Option::is_none")] - pub interpret_as: Option, +pub struct DataAnnotation<'source> { + /// Text that should be treated as normal text. + /// + /// This or `markup` is required. #[serde(skip_serializing_if = "Option::is_none")] + pub text: Option>, /// Text that should be treated as markup. - pub markup: Option, + /// + /// This or `text` is required. #[serde(skip_serializing_if = "Option::is_none")] - /// Text that should be treated as normal text. - pub text: Option, -} - -impl Default for DataAnnotation { - fn default() -> Self { - Self { - interpret_as: None, - markup: None, - text: Some(String::new()), - } - } + pub markup: Option>, + /// If set, the markup will be interpreted as this. + #[serde(skip_serializing_if = "Option::is_none")] + pub interpret_as: Option>, } -impl DataAnnotation { +impl<'source> DataAnnotation<'source> { /// Instantiate a new `DataAnnotation` with text only. #[inline] #[must_use] - pub fn new_text(text: String) -> Self { + pub fn new_text>>(text: T) -> Self { Self { - interpret_as: None, + text: Some(text.into()), markup: None, - text: Some(text), + interpret_as: None, } } /// Instantiate a new `DataAnnotation` with markup only. #[inline] #[must_use] - pub fn new_markup(markup: String) -> Self { + pub fn new_markup>>(markup: M) -> Self { Self { - interpret_as: None, - markup: Some(markup), text: None, + markup: Some(markup.into()), + interpret_as: None, } } /// Instantiate a new `DataAnnotation` with markup and its interpretation. #[inline] #[must_use] - pub fn new_interpreted_markup(markup: String, interpret_as: String) -> Self { + pub fn new_interpreted_markup>, I: Into>>( + markup: M, + interpret_as: I, + ) -> Self { Self { - interpret_as: Some(interpret_as), - markup: Some(markup), + interpret_as: Some(interpret_as.into()), + markup: Some(markup.into()), text: None, } } + + /// Return the text or markup within the data annotation. + /// + /// # Errors + /// + /// If this data annotation does not contain text or markup. + pub fn try_get_text(&self) -> Result> { + if let Some(ref text) = self.text { + Ok(text.clone()) + } else if let Some(ref markup) = self.markup { + Ok(markup.clone()) + } else { + Err(Error::InvalidDataAnnotation(format!( + "missing either text or markup field in {self:?}" + ))) + } + } } #[cfg(test)] @@ -191,49 +205,61 @@ mod data_annotation_tests { #[test] fn test_text() { - let da = DataAnnotation::new_text("Hello".to_string()); + let da = DataAnnotation::new_text("Hello"); - assert_eq!(da.text.unwrap(), "Hello".to_string()); + assert_eq!(da.text.unwrap(), "Hello"); assert!(da.markup.is_none()); assert!(da.interpret_as.is_none()); } #[test] fn test_markup() { - let da = DataAnnotation::new_markup("Hello".to_string()); + let da = DataAnnotation::new_markup("Hello"); assert!(da.text.is_none()); - assert_eq!(da.markup.unwrap(), "Hello".to_string()); + assert_eq!(da.markup.unwrap(), "Hello"); assert!(da.interpret_as.is_none()); } #[test] fn test_interpreted_markup() { - let da = - DataAnnotation::new_interpreted_markup("Hello".to_string(), "Hello".to_string()); + let da = DataAnnotation::new_interpreted_markup("Hello", "Hello"); assert!(da.text.is_none()); - assert_eq!(da.markup.unwrap(), "Hello".to_string()); - assert_eq!(da.interpret_as.unwrap(), "Hello".to_string()); + assert_eq!(da.markup.unwrap(), "Hello"); + assert_eq!(da.interpret_as.unwrap(), "Hello"); } } /// Alternative text to be checked. #[derive(Clone, Debug, Default, Deserialize, PartialEq, Eq, Hash)] #[non_exhaustive] -pub struct Data { +pub struct Data<'source> { /// Vector of markup text, see [`DataAnnotation`]. - pub annotation: Vec, + pub annotation: Vec>, +} + +impl IntoStatic for Data<'_> { + type Static = Data<'static>; + fn into_static(self) -> Self::Static { + Data { + annotation: self + .annotation + .into_iter() + .map(IntoStatic::into_static) + .collect(), + } + } } -impl> FromIterator for Data { +impl<'source, T: Into>> FromIterator for Data<'source> { fn from_iter>(iter: I) -> Self { let annotation = iter.into_iter().map(std::convert::Into::into).collect(); Data { annotation } } } -impl Serialize for Data { +impl Serialize for Data<'_> { fn serialize(&self, serializer: S) -> std::result::Result where S: serde::Serializer, @@ -246,7 +272,7 @@ impl Serialize for Data { } #[cfg(feature = "cli")] -impl std::str::FromStr for Data { +impl std::str::FromStr for Data<'_> { type Err = Error; fn from_str(s: &str) -> Result { @@ -378,6 +404,21 @@ pub fn split_len<'source>(s: &'source str, n: usize, pat: &str) -> Vec<&'source vec } +/// Default value for [`Request::language`]. +pub const DEFAULT_LANGUAGE: &str = "auto"; + +/// Custom serialization for [`Request::language`]. +fn serialize_language(lang: &str, s: S) -> std::result::Result +where + S: Serializer, +{ + s.serialize_str(if lang.is_empty() { + DEFAULT_LANGUAGE + } else { + lang + }) +} + /// LanguageTool POST check request. /// /// The main feature - check a text with LanguageTool for possible style and @@ -385,18 +426,13 @@ pub fn split_len<'source>(s: &'source str, n: usize, pat: &str) -> Vec<&'source /// /// The structure below tries to follow as closely as possible the JSON API /// described [here](https://languagetool.org/http-api/swagger-ui/#!/default/post_check). -#[cfg_attr(feature = "cli", derive(Args))] -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Hash)] +#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Hash, IntoStatic)] #[serde(rename_all = "camelCase")] #[non_exhaustive] -pub struct Request { +pub struct Request<'source> { /// The text to be checked. This or 'data' is required. - #[cfg_attr( - feature = "cli", - clap(short = 't', long, conflicts_with = "data", allow_hyphen_values(true)) - )] #[serde(skip_serializing_if = "Option::is_none")] - pub text: Option, + pub text: Option>, /// The text to be checked, given as a JSON document that specifies what's /// text and what's markup. This or 'text' is required. /// @@ -421,49 +457,29 @@ pub struct Request { /// ``` /// The 'data' feature is not limited to HTML or XML, it can be used for any /// kind of markup. Entities will need to be expanded in this input. - #[cfg_attr(feature = "cli", clap(short = 'd', long, conflicts_with = "text"))] #[serde(skip_serializing_if = "Option::is_none")] - pub data: Option, + pub data: Option>, /// A language code like `en-US`, `de-DE`, `fr`, or `auto` to guess the /// language automatically (see `preferredVariants` below). /// /// For languages with variants (English, German, Portuguese) spell checking /// will only be activated when you specify the variant, e.g. `en-GB` /// instead of just `en`. - #[cfg_attr( - all(feature = "cli", feature = "cli", feature = "cli"), - clap( - short = 'l', - long, - default_value = "auto", - value_parser = parse_language_code - ) - )] + #[serde(serialize_with = "serialize_language")] pub language: String, /// Set to get Premium API access: Your username/email as used to log in at /// languagetool.org. - #[cfg_attr( - feature = "cli", - clap(short = 'u', long, requires = "api_key", env = "LANGUAGETOOL_USERNAME") - )] #[serde(skip_serializing_if = "Option::is_none")] pub username: Option, - /// Set to get Premium API access: [your API - /// key](https://languagetool.org/editor/settings/api). - #[cfg_attr( - feature = "cli", - clap(short = 'k', long, requires = "username", env = "LANGUAGETOOL_API_KEY") - )] + /// Set to get Premium API access: your API key (see ). #[serde(skip_serializing_if = "Option::is_none")] pub api_key: Option, /// Comma-separated list of dictionaries to include words from; uses special /// default dictionary if this is unset. - #[cfg_attr(feature = "cli", clap(long))] #[serde(serialize_with = "serialize_option_vec_string")] pub dicts: Option>, /// A language code of the user's native language, enabling false friends /// checks for some language pairs. - #[cfg_attr(feature = "cli", clap(long))] #[serde(skip_serializing_if = "Option::is_none")] pub mother_tongue: Option, /// Comma-separated list of preferred language variants. @@ -475,79 +491,51 @@ pub struct Request { /// should set variants for at least German and English, as otherwise the /// spell checking will not work for those, as no spelling dictionary can be /// selected for just `en` or `de`. - #[cfg_attr(feature = "cli", clap(long, conflicts_with = "language"))] #[serde(serialize_with = "serialize_option_vec_string")] pub preferred_variants: Option>, /// IDs of rules to be enabled, comma-separated. - #[cfg_attr(feature = "cli", clap(long))] #[serde(serialize_with = "serialize_option_vec_string")] pub enabled_rules: Option>, /// IDs of rules to be disabled, comma-separated. - #[cfg_attr(feature = "cli", clap(long))] #[serde(serialize_with = "serialize_option_vec_string")] pub disabled_rules: Option>, /// IDs of categories to be enabled, comma-separated. - #[cfg_attr(feature = "cli", clap(long))] #[serde(serialize_with = "serialize_option_vec_string")] pub enabled_categories: Option>, /// IDs of categories to be disabled, comma-separated. - #[cfg_attr(feature = "cli", clap(long))] #[serde(serialize_with = "serialize_option_vec_string")] pub disabled_categories: Option>, /// If true, only the rules and categories whose IDs are specified with /// `enabledRules` or `enabledCategories` are enabled. - #[cfg_attr(feature = "cli", clap(long))] - #[serde(skip_serializing_if = "is_false")] + #[serde(skip_serializing_if = "std::ops::Not::not")] pub enabled_only: bool, /// If set to `picky`, additional rules will be activated, i.e. rules that /// you might only find useful when checking formal text. - #[cfg_attr( - feature = "cli", - clap(long, default_value = "default", ignore_case = true, value_enum) - )] #[serde(skip_serializing_if = "Level::is_default")] pub level: Level, } -impl Default for Request { - #[inline] - fn default() -> Request { - Request { - text: Default::default(), - data: Default::default(), +impl<'source> Request<'source> { + /// Create a new empty request with language set to `"auto"`. + #[must_use] + pub fn new() -> Self { + Self { language: "auto".to_string(), - username: Default::default(), - api_key: Default::default(), - dicts: Default::default(), - mother_tongue: Default::default(), - preferred_variants: Default::default(), - enabled_rules: Default::default(), - disabled_rules: Default::default(), - enabled_categories: Default::default(), - disabled_categories: Default::default(), - enabled_only: Default::default(), - level: Default::default(), + ..Default::default() } } -} - -#[inline] -fn is_false(b: &bool) -> bool { - !(*b) -} -impl Request { /// Set the text to be checked and remove potential data field. #[must_use] - pub fn with_text(mut self, text: String) -> Self { - self.text = Some(text); + pub fn with_text>>(mut self, text: T) -> Self { + self.text = Some(text.into()); self.data = None; self } /// Set the data to be checked and remove potential text field. #[must_use] - pub fn with_data(mut self, data: Data) -> Self { + pub fn with_data(mut self, data: Data<'source>) -> Self { self.data = Some(data); self.text = None; self @@ -556,7 +544,7 @@ impl Request { /// Set the data (obtained from string) to be checked and remove potential /// text field pub fn with_data_str(self, data: &str) -> serde_json::Result { - Ok(self.with_data(serde_json::from_str(data)?)) + serde_json::from_str(data).map(|data| self.with_data(data)) } /// Set the language of the text / data. @@ -566,29 +554,29 @@ impl Request { self } - /// Return a copy of the text within the request. + /// Return the text within the request. /// /// # Errors /// /// If both `self.text` and `self.data` are [`None`]. /// If any data annotation does not contain text or markup. - pub fn try_get_text(&self) -> Result { + pub fn try_get_text(&self) -> Result> { if let Some(ref text) = self.text { Ok(text.clone()) } else if let Some(ref data) = self.data { - let mut text = String::new(); - for da in data.annotation.iter() { - if let Some(ref t) = da.text { - text.push_str(t.as_str()); - } else if let Some(ref t) = da.markup { - text.push_str(t.as_str()); - } else { - return Err(Error::InvalidDataAnnotation( - "missing either text or markup field in {da:?}".to_string(), - )); - } + match data.annotation.len() { + 0 => Ok(Default::default()), + 1 => data.annotation[0].try_get_text(), + _ => { + let mut text = String::new(); + + for da in data.annotation.iter() { + text.push_str(da.try_get_text()?.deref()); + } + + Ok(Cow::Owned(text)) + }, } - Ok(text) } else { Err(Error::InvalidRequest( "missing either text or data field".to_string(), @@ -604,7 +592,7 @@ impl Request { /// If both `self.text` and `self.data` are [`None`]. /// If any data annotation does not contain text or markup. #[must_use] - pub fn get_text(&self) -> String { + pub fn get_text(&self) -> Cow<'source, str> { self.try_get_text().unwrap() } @@ -614,15 +602,20 @@ impl Request { /// # Errors /// /// If `self.text` is none. - pub fn try_split(&self, n: usize, pat: &str) -> Result> { - let text = self - .text - .as_ref() - .ok_or(Error::InvalidRequest("missing text field".to_string()))?; + pub fn try_split(mut self, n: usize, pat: &str) -> Result> { + let text = mem::take(&mut self.text) + .ok_or_else(|| Error::InvalidRequest("missing text field".to_string()))?; + let string: &str = match &text { + Cow::Owned(s) => s.as_str(), + Cow::Borrowed(s) => s, + }; - Ok(split_len(text.as_str(), n, pat) + Ok(split_len(string, n, pat) .iter() - .map(|text_fragment| self.clone().with_text(text_fragment.to_string())) + .map(|text_fragment| { + self.clone() + .with_text(Cow::Owned(text_fragment.to_string())) + }) .collect()) } @@ -634,78 +627,11 @@ impl Request { /// /// If `self.text` is none. #[must_use] - pub fn split(&self, n: usize, pat: &str) -> Vec { + pub fn split(self, n: usize, pat: &str) -> Vec { self.try_split(n, pat).unwrap() } } -/// Parse a string slice into a [`PathBuf`], and error if the file does not -/// exist. -#[cfg(feature = "cli")] -fn parse_filename(s: &str) -> Result { - let path_buf: PathBuf = s.parse().unwrap(); - - if path_buf.is_file() { - Ok(path_buf) - } else { - Err(Error::InvalidFilename(s.to_string())) - } -} - -/// Support file types. -#[cfg(feature = "cli")] -#[derive(Clone, Debug, Default, ValueEnum)] -#[non_exhaustive] -pub enum FileType { - /// Auto. - #[default] - Auto, - /// Markdown. - Markdown, - /// Typst. - Typst, -} - -/// Check text using LanguageTool server. -/// -/// The input can be one of the following: -/// -/// - raw text, if `--text TEXT` is provided; -/// - annotated data, if `--data TEXT` is provided; -/// - raw text, if `-- [FILE]...` are provided. Note that some file types will -/// use a -/// - raw text, through stdin, if nothing is provided. -#[cfg(feature = "cli")] -#[derive(Debug, Parser)] -pub struct CheckCommand { - /// If present, raw JSON output will be printed instead of annotated text. - /// This has no effect if `--data` is used, because it is never - /// annotated. - #[cfg(feature = "cli")] - #[clap(short = 'r', long)] - pub raw: bool, - /// Sets the maximum number of characters before splitting. - #[clap(long, default_value_t = 1500)] - pub max_length: usize, - /// If text is too long, will split on this pattern. - #[clap(long, default_value = "\n\n")] - pub split_pattern: String, - /// Max. number of suggestions kept. If negative, all suggestions are kept. - #[clap(long, default_value_t = 5, allow_negative_numbers = true)] - pub max_suggestions: isize, - /// Specify the files type to use the correct parser. - /// - /// If set to auto, the type is guessed from the filename extension. - #[clap(long, value_enum, default_value_t = FileType::default(), ignore_case = true)] - pub r#type: FileType, - /// Optional filenames from which input is read. - #[arg(conflicts_with_all(["text", "data"]), value_parser = parse_filename)] - pub filenames: Vec, - /// Inner [`Request`]. - #[command(flatten, next_help_heading = "Request options")] - pub request: Request, -} - #[cfg(test)] mod request_tests { @@ -713,22 +639,22 @@ mod request_tests { #[test] fn test_with_text() { - let req = Request::default().with_text("hello".to_string()); + let req = Request::default().with_text("hello"); - assert_eq!(req.text.unwrap(), "hello".to_string()); + assert_eq!(req.text.unwrap(), "hello"); assert!(req.data.is_none()); } #[test] fn test_with_data() { - let req = Request::default().with_text("hello".to_string()); + let req = Request::default().with_text("hello"); - assert_eq!(req.text.unwrap(), "hello".to_string()); + assert_eq!(req.text.unwrap(), "hello"); assert!(req.data.is_none()); } } -/// Responses +// RESPONSES /// Detected language from check request. #[allow(clippy::derive_partial_eq_without_eq)] @@ -1024,21 +950,30 @@ impl Response { /// /// This structure exists to keep a link between a check response /// and the original text that was checked. -#[derive(Debug, Clone, PartialEq)] -pub struct ResponseWithContext { +#[derive(Debug, Clone, PartialEq, IntoStatic)] +pub struct ResponseWithContext<'source> { /// Original text that was checked by LT. - pub text: String, + pub text: Cow<'source, str>, /// Check response. pub response: Response, /// Text's length. pub text_length: usize, } -impl ResponseWithContext { +impl Deref for ResponseWithContext<'_> { + type Target = Response; + fn deref(&self) -> &Self::Target { + &self.response + } +} + +impl<'source> ResponseWithContext<'source> { /// Bind a check response with its original text. #[must_use] - pub fn new(text: String, response: Response) -> Self { + pub fn new(text: Cow<'source, str>, response: Response) -> Self { let text_length = text.chars().count(); + + // Add more context to response Self { text, response, @@ -1047,7 +982,7 @@ impl ResponseWithContext { } /// Return an iterator over matches. - pub fn iter_matches(&self) -> std::slice::Iter<'_, Match> { + pub fn iter_matches(&'source self) -> std::slice::Iter<'source, Match> { self.response.iter_matches() } @@ -1059,7 +994,7 @@ impl ResponseWithContext { /// Return an iterator over matches and corresponding line number and line /// offset. #[must_use] - pub fn iter_match_positions(&self) -> MatchPositions<'_, std::slice::Iter<'_, Match>> { + pub fn iter_match_positions(&self) -> MatchPositions<'_, '_, std::slice::Iter<'_, Match>> { self.into() } @@ -1090,42 +1025,59 @@ impl ResponseWithContext { } self.response.matches.append(&mut other.response.matches); - self.text.push_str(other.text.as_str()); + + self.text.to_mut().push_str(&other.text); self.text_length += other.text_length; + self } } -impl From for Response { - #[allow(clippy::needless_borrow)] - fn from(mut resp: ResponseWithContext) -> Self { - let iter: MatchPositions<'_, std::slice::IterMut<'_, Match>> = (&mut resp).into(); - - for (line_number, line_offset, m) in iter { +impl<'source> From> for Response { + fn from(mut resp: ResponseWithContext<'source>) -> Self { + for (line_number, line_offset, m) in MatchPositions::new(&resp.text, &mut resp.response) { m.more_context = Some(MoreContext { line_number, line_offset, }); } + resp.response } } /// Iterator over matches and their corresponding line number and line offset. #[derive(Clone, Debug)] -pub struct MatchPositions<'source, T> { +pub struct MatchPositions<'source, 'response, T: Iterator + 'response> { text_chars: std::str::Chars<'source>, matches: T, line_number: usize, line_offset: usize, offset: usize, + _marker: PhantomData<&'response ()>, +} + +impl<'source, 'response> MatchPositions<'source, 'response, std::slice::IterMut<'response, Match>> { + fn new(text: &'source str, response: &'response mut Response) -> Self { + MatchPositions { + _marker: Default::default(), + text_chars: text.chars(), + matches: response.iter_matches_mut(), + line_number: 1, + line_offset: 0, + offset: 0, + } + } } -impl<'source> From<&'source ResponseWithContext> - for MatchPositions<'source, std::slice::Iter<'source, Match>> +impl<'source, 'response> From<&'source ResponseWithContext<'source>> + for MatchPositions<'source, 'response, std::slice::Iter<'response, Match>> +where + 'source: 'response, { fn from(response: &'source ResponseWithContext) -> Self { MatchPositions { + _marker: Default::default(), text_chars: response.text.chars(), matches: response.iter_matches(), line_number: 1, @@ -1135,11 +1087,14 @@ impl<'source> From<&'source ResponseWithContext> } } -impl<'source> From<&'source mut ResponseWithContext> - for MatchPositions<'source, std::slice::IterMut<'source, Match>> +impl<'source, 'response> From<&'source mut ResponseWithContext<'source>> + for MatchPositions<'source, 'response, std::slice::IterMut<'response, Match>> +where + 'source: 'response, { fn from(response: &'source mut ResponseWithContext) -> Self { MatchPositions { + _marker: Default::default(), text_chars: response.text.chars(), matches: response.response.iter_matches_mut(), line_number: 1, @@ -1149,8 +1104,8 @@ impl<'source> From<&'source mut ResponseWithContext> } } -impl<'source, T> MatchPositions<'source, T> { - /// Set the line number to a give value. +impl<'response, T: Iterator + 'response> MatchPositions<'_, 'response, T> { + /// Set the line number to a given value. /// /// By default, the first line number is 1. pub fn set_line_number(mut self, line_number: usize) -> Self { @@ -1179,7 +1134,11 @@ impl<'source, T> MatchPositions<'source, T> { } } -impl<'source> Iterator for MatchPositions<'source, std::slice::Iter<'source, Match>> { +impl<'source, 'response> Iterator + for MatchPositions<'source, 'response, std::slice::Iter<'response, Match>> +where + 'response: 'source, +{ type Item = (usize, usize, &'source Match); fn next(&mut self) -> Option { @@ -1192,7 +1151,11 @@ impl<'source> Iterator for MatchPositions<'source, std::slice::Iter<'source, Mat } } -impl<'source> Iterator for MatchPositions<'source, std::slice::IterMut<'source, Match>> { +impl<'source, 'response> Iterator + for MatchPositions<'source, 'response, std::slice::IterMut<'response, Match>> +where + 'response: 'source, +{ type Item = (usize, usize, &'source mut Match); fn next(&mut self) -> Option { @@ -1228,11 +1191,11 @@ mod tests { } } - impl<'source> From> for DataAnnotation { + impl<'source> From> for DataAnnotation<'source> { fn from(token: Token<'source>) -> Self { match token { - Token::Text(s) => DataAnnotation::new_text(s.to_string()), - Token::Skip(s) => DataAnnotation::new_markup(s.to_string()), + Token::Text(s) => DataAnnotation::new_text(s), + Token::Skip(s) => DataAnnotation::new_markup(s), } } } @@ -1244,10 +1207,10 @@ mod tests { let expected_data = Data { annotation: vec![ - DataAnnotation::new_text("My".to_string()), - DataAnnotation::new_text("name".to_string()), - DataAnnotation::new_text("is".to_string()), - DataAnnotation::new_markup("Q34XY".to_string()), + DataAnnotation::new_text("My"), + DataAnnotation::new_text("name"), + DataAnnotation::new_text("is"), + DataAnnotation::new_markup("Q34XY"), ], }; diff --git a/src/api/mod.rs b/src/api/mod.rs index f6ed976..813a72f 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -11,7 +11,7 @@ pub mod languages; pub mod server; pub mod words; -use crate::error::{Error, Result}; +use crate::error::Result; /// A HTTP client for making requests to a LanguageTool server. #[derive(Debug)] @@ -48,7 +48,7 @@ impl Client { } /// Send a check request to the server and await for the response. - pub async fn check(&self, request: &check::Request) -> Result { + pub async fn check(&self, request: &check::Request<'_>) -> Result { self.client .post(self.url("/check")) .query(request) diff --git a/src/api/server.rs b/src/api/server.rs index ebb3a08..495d380 100644 --- a/src/api/server.rs +++ b/src/api/server.rs @@ -9,6 +9,7 @@ use crate::{ }; #[cfg(feature = "cli")] use clap::Args; +use lifetime::IntoStatic; use reqwest::Client; use serde::{Deserialize, Serialize}; use serde_json::Value; @@ -262,6 +263,7 @@ impl Default for ServerParameters { /// To use your local server instead of online api, set: /// * `hostname` to "http://localhost" /// * `port` to "8081" +/// /// if you used the default configuration to start the server. #[cfg_attr(feature = "cli", derive(Args))] #[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] @@ -366,7 +368,7 @@ impl ServerClient { } /// Send a check request to the server and await for the response. - pub async fn check(&self, request: &Request) -> Result { + pub async fn check(&self, request: &Request<'_>) -> Result { match self .client .post(format!("{0}/check", self.api)) @@ -408,36 +410,44 @@ impl ServerClient { /// /// If any of the requests has `self.text` field which is none. #[cfg(feature = "multithreaded")] - pub async fn check_multiple_and_join(&self, requests: Vec) -> Result { + pub async fn check_multiple_and_join<'source>( + &self, + requests: Vec>, + ) -> Result> { + use std::borrow::Cow; + let mut tasks = Vec::with_capacity(requests.len()); - for request in requests.into_iter() { - let server_client = self.clone(); - tasks.push(tokio::spawn(async move { - let response = server_client.check(&request).await?; - let text = request.text.ok_or(Error::InvalidRequest( - "missing text field; cannot join requests with data annotations".to_string(), - ))?; - Result::<(String, Response)>::Ok((text, response)) - })); - } + requests + .into_iter() + .map(|r| r.into_static()) + .for_each(|request| { + let server_client = self.clone(); + + tasks.push(tokio::spawn(async move { + let response = server_client.check(&request).await?; + let text = request.text.ok_or_else(|| { + Error::InvalidRequest( + "missing text field; cannot join requests with data annotations" + .to_string(), + ) + })?; + Result::<(Cow<'static, str>, Response)>::Ok((text, response)) + })); + }); let mut response_with_context: Option = None; for task in tasks { let (text, response) = task.await.unwrap()?; - match response_with_context { - Some(resp) => { - response_with_context = - Some(resp.append(check::ResponseWithContext::new(text, response))) - }, - None => { - response_with_context = Some(check::ResponseWithContext::new(text, response)) - }, - } + + response_with_context = Some(match response_with_context { + Some(resp) => resp.append(check::ResponseWithContext::new(text, response)), + None => check::ResponseWithContext::new(text, response), + }) } - Ok(response_with_context.unwrap().into()) + Ok(response_with_context.unwrap()) } /// Send a check request to the server, await for the response and annotate @@ -445,14 +455,14 @@ impl ServerClient { #[cfg(feature = "annotate")] pub async fn annotate_check( &self, - request: &Request, + request: &Request<'_>, origin: Option<&str>, color: bool, ) -> Result { let text = request.get_text(); let resp = self.check(request).await?; - Ok(resp.annotate(text.as_str(), origin, color)) + Ok(resp.annotate(text.as_ref(), origin, color)) } /// Send a languages request to the server and await for the response. @@ -595,7 +605,7 @@ mod tests { #[tokio::test] async fn test_server_check_text() { let client = ServerClient::from_env_or_default(); - let req = Request::default().with_text("je suis une poupee".to_string()); + let req = Request::default().with_text("je suis une poupee"); assert!(client.check(&req).await.is_ok()); } diff --git a/src/api/words/mod.rs b/src/api/words/mod.rs index b23f24b..204d81c 100644 --- a/src/api/words/mod.rs +++ b/src/api/words/mod.rs @@ -4,7 +4,7 @@ use crate::error::{Error, Result}; use super::check::serialize_option_vec_string; #[cfg(feature = "cli")] -use clap::{Args, Parser, Subcommand}; +use clap::Args; use serde::{Deserialize, Serialize}; pub mod add; @@ -43,7 +43,7 @@ pub struct LoginArgs { clap(short = 'u', long, required = true, env = "LANGUAGETOOL_USERNAME") )] pub username: String, - /// [Your API key](https://languagetool.org/editor/settings/api). + /// Your API key (see ). #[cfg_attr( feature = "cli", clap(short = 'k', long, required = true, env = "LANGUAGETOOL_API_KEY") @@ -84,7 +84,7 @@ pub struct Request { /// Copy of [`Request`], but used to CLI only. /// -/// This is a temporary solution, until [#3165](https://github.com/clap-rs/clap/issues/3165) is +/// This is a temporary solution, until [#4697](https://github.com/clap-rs/clap/issues/4697) is /// closed. #[cfg(feature = "cli")] #[derive(Args, Clone, Debug, Default, PartialEq, Eq, Deserialize, Serialize)] @@ -120,30 +120,6 @@ impl From for Request { } } -/// Words' optional subcommand. -#[cfg(feature = "cli")] -#[derive(Clone, Debug, Subcommand)] -pub enum WordsSubcommand { - /// Add a word to some user's list. - Add(add::Request), - /// Remove a word from some user's list. - Delete(delete::Request), -} - -/// Retrieve some user's words list. -#[cfg(feature = "cli")] -#[derive(Debug, Parser)] -#[clap(args_conflicts_with_subcommands = true)] -#[clap(subcommand_negates_reqs = true)] -pub struct WordsCommand { - /// Actual GET request. - #[command(flatten)] - pub request: RequestArgs, - /// Optional subcommand. - #[command(subcommand)] - pub subcommand: Option, -} - /// LanguageTool GET words response. #[derive(Clone, Debug, Default, PartialEq, Eq, Deserialize, Serialize)] #[non_exhaustive] diff --git a/src/cli.rs b/src/cli.rs deleted file mode 100644 index 00292bc..0000000 --- a/src/cli.rs +++ /dev/null @@ -1,342 +0,0 @@ -//! Command line tools. -//! -//! This module is specifically designed to be used by LTRS's binary target. -//! It contains all the content needed to create LTRS's command line interface. -use std::io::{self, Write}; - -use clap::{CommandFactory, Parser, Subcommand}; -use is_terminal::IsTerminal; -#[cfg(feature = "annotate")] -use termcolor::WriteColor; -use termcolor::{ColorChoice, StandardStream}; - -use crate::{ - api::{ - check, - server::{ServerCli, ServerClient}, - words::WordsSubcommand, - }, - error::Result, -}; - -/// Read lines from standard input and write to buffer string. -/// -/// Standard output is used when waiting for user to input text. -fn read_from_stdin(stdout: &mut W, buffer: &mut String) -> Result<()> -where - W: io::Write, -{ - if io::stdin().is_terminal() { - #[cfg(windows)] - writeln!( - stdout, - "Reading from STDIN, press [CTRL+Z] when you're done." - )?; - - #[cfg(unix)] - writeln!( - stdout, - "Reading from STDIN, press [CTRL+D] when you're done." - )?; - } - let stdin = std::io::stdin(); - - while stdin.read_line(buffer)? > 0 {} - Ok(()) -} - -/// Main command line structure. Contains every subcommand. -#[derive(Parser, Debug)] -#[command( - author, - version, - about = "LanguageTool API bindings in Rust.", - propagate_version(true), - subcommand_required(true), - verbatim_doc_comment -)] -pub struct Cli { - /// Specify WHEN to colorize output. - #[arg(short, long, value_name = "WHEN", default_value = "auto", default_missing_value = "always", num_args(0..=1), require_equals(true))] - pub color: clap::ColorChoice, - /// [`ServerCli`] arguments. - #[command(flatten, next_help_heading = "Server options")] - pub server_cli: ServerCli, - /// Subcommand. - #[command(subcommand)] - #[allow(missing_docs)] - pub command: Command, -} - -/// Enumerate all possible commands. -#[derive(Subcommand, Debug)] -#[allow(missing_docs)] -pub enum Command { - /// Check text using LanguageTool server. - Check(crate::api::check::CheckCommand), - /// Commands to easily run a LanguageTool server with Docker. - #[cfg(feature = "docker")] - Docker(crate::docker::DockerCommand), - /// Return list of supported languages. - #[clap(visible_alias = "lang")] - Languages, - /// Ping the LanguageTool server and return time elapsed in ms if success. - Ping, - /// Retrieve some user's words list, or add / delete word from it. - Words(crate::api::words::WordsCommand), - /// Generate tab-completion scripts for supported shells - #[cfg(feature = "cli-complete")] - Completions(complete::CompleteCommand), -} - -impl Cli { - /// Return a standard output stream that optionally supports color. - #[must_use] - fn stdout(&self) -> StandardStream { - let mut choice: ColorChoice = match self.color { - clap::ColorChoice::Auto => ColorChoice::Auto, - clap::ColorChoice::Always => ColorChoice::Always, - clap::ColorChoice::Never => ColorChoice::Never, - }; - - if choice == ColorChoice::Auto && !io::stdout().is_terminal() { - choice = ColorChoice::Never; - } - - StandardStream::stdout(choice) - } - - /// Execute command, possibly returning an error. - pub async fn execute(self) -> Result<()> { - let mut stdout = self.stdout(); - - let server_client: ServerClient = self.server_cli.into(); - - match self.command { - Command::Check(cmd) => { - let mut request = cmd.request; - #[cfg(feature = "annotate")] - let color = stdout.supports_color(); - - let server_client = server_client.with_max_suggestions(cmd.max_suggestions); - - if cmd.filenames.is_empty() { - if request.text.is_none() && request.data.is_none() { - let mut text = String::new(); - read_from_stdin(&mut stdout, &mut text)?; - request = request.with_text(text); - } - - let mut response = if request.text.is_some() { - let requests = request.split(cmd.max_length, cmd.split_pattern.as_str()); - server_client.check_multiple_and_join(requests).await? - } else { - server_client.check(&request).await? - }; - - if request.text.is_some() && !cmd.raw { - let text = request.text.unwrap(); - response = check::ResponseWithContext::new(text.clone(), response).into(); - writeln!( - &mut stdout, - "{}", - &response.annotate(text.as_str(), None, color) - )?; - } else { - writeln!(&mut stdout, "{}", serde_json::to_string_pretty(&response)?)?; - } - - return Ok(()); - } - - for filename in cmd.filenames.iter() { - let text = std::fs::read_to_string(filename)?; - let requests = request - .clone() - .with_text(text.clone()) - .split(cmd.max_length, cmd.split_pattern.as_str()); - let response = server_client.check_multiple_and_join(requests).await?; - - if !cmd.raw { - writeln!( - &mut stdout, - "{}", - &response.annotate(text.as_str(), filename.to_str(), color) - )?; - } else { - writeln!(&mut stdout, "{}", serde_json::to_string_pretty(&response)?)?; - } - } - }, - #[cfg(feature = "docker")] - Command::Docker(cmd) => { - cmd.execute(&mut stdout)?; - }, - Command::Languages => { - let languages_response = server_client.languages().await?; - let languages = serde_json::to_string_pretty(&languages_response)?; - - writeln!(&mut stdout, "{languages}")?; - }, - Command::Ping => { - let ping = server_client.ping().await?; - writeln!(&mut stdout, "PONG! Delay: {ping} ms")?; - }, - Command::Words(cmd) => { - let words = match &cmd.subcommand { - Some(WordsSubcommand::Add(request)) => { - let words_response = server_client.words_add(request).await?; - serde_json::to_string_pretty(&words_response)? - }, - Some(WordsSubcommand::Delete(request)) => { - let words_response = server_client.words_delete(request).await?; - serde_json::to_string_pretty(&words_response)? - }, - None => { - let words_response = server_client.words(&cmd.request.into()).await?; - serde_json::to_string_pretty(&words_response)? - }, - }; - - writeln!(&mut stdout, "{words}")?; - }, - #[cfg(feature = "cli-complete")] - Command::Completions(cmd) => { - cmd.execute(&mut stdout)?; - }, - } - Ok(()) - } -} - -/// Build a command from the top-level command line structure. -#[must_use] -pub fn build_cli() -> clap::Command { - Cli::command() -} - -#[cfg(test)] -mod tests { - use super::*; - #[test] - fn test_cli() { - Cli::command().debug_assert(); - } -} - -#[cfg(feature = "cli-complete")] -pub(crate) mod complete { - //! Completion scripts generation with [`clap_complete`]. - - use crate::error::Result; - use clap::{Command, Parser}; - use clap_complete::{generate, shells::Shell}; - use std::io::Write; - - /// Command structure to generate complete scripts. - #[derive(Debug, Parser)] - #[command( - about = "Generate tab-completion scripts for supported shells", - after_help = "Use --help for installation help.", - after_long_help = COMPLETIONS_HELP -)] - pub struct CompleteCommand { - /// Shell for which to completion script is generated. - #[arg(value_enum, ignore_case = true)] - shell: Shell, - } - - impl CompleteCommand { - /// Generate completion file for current shell and write to buffer. - pub fn generate_completion_file(&self, build_cli: F, buffer: &mut W) - where - F: FnOnce() -> Command, - W: Write, - { - generate(self.shell, &mut build_cli(), "ltrs", buffer); - } - - /// Execute command by writing completion script to stdout. - pub fn execute(&self, stdout: &mut W) -> Result<()> - where - W: Write, - { - self.generate_completion_file(super::build_cli, stdout); - Ok(()) - } - } - - pub(crate) static COMPLETIONS_HELP: &str = r"DISCUSSION: - Enable tab completion for Bash, Fish, Zsh, or PowerShell - Elvish shell completion is currently supported, but not documented below. - The script is output on `stdout`, allowing one to re-direct the - output to the file of their choosing. Where you place the file - will depend on which shell, and which operating system you are - using. Your particular configuration may also determine where - these scripts need to be placed. - Here are some common set ups for the three supported shells under - Unix and similar operating systems (such as GNU/Linux). - BASH: - Completion files are commonly stored in `/etc/bash_completion.d/` for - system-wide commands, but can be stored in - `~/.local/share/bash-completion/completions` for user-specific commands. - Run the command: - $ mkdir -p ~/.local/share/bash-completion/completions - $ ltrs completions bash >> ~/.local/share/bash-completion/completions/ltrs - This installs the completion script. You may have to log out and - log back in to your shell session for the changes to take effect. - BASH (macOS/Homebrew): - Homebrew stores bash completion files within the Homebrew directory. - With the `bash-completion` brew formula installed, run the command: - $ mkdir -p $(brew --prefix)/etc/bash_completion.d - $ ltrs completions bash > $(brew --prefix)/etc/bash_completion.d/ltrs.bash-completion - FISH: - Fish completion files are commonly stored in - `$HOME/.config/fish/completions`. Run the command: - $ mkdir -p ~/.config/fish/completions - $ ltrs completions fish > ~/.config/fish/completions/ltrs.fish - This installs the completion script. You may have to log out and - log back in to your shell session for the changes to take effect. - ZSH: - ZSH completions are commonly stored in any directory listed in - your `$fpath` variable. To use these completions, you must either - add the generated script to one of those directories, or add your - own to this list. - Adding a custom directory is often the safest bet if you are - unsure of which directory to use. First create the directory; for - this example we'll create a hidden directory inside our `$HOME` - directory: - $ mkdir ~/.zfunc - Then add the following lines to your `.zshrc` just before - `compinit`: - fpath+=~/.zfunc - Now you can install the completions script using the following - command: - $ ltrs completions zsh > ~/.zfunc/_ltrs - You must then either log out and log back in, or simply run - $ exec zsh - for the new completions to take effect. - CUSTOM LOCATIONS: - Alternatively, you could save these files to the place of your - choosing, such as a custom directory inside your $HOME. Doing so - will require you to add the proper directives, such as `source`ing - inside your login script. Consult your shells documentation for - how to add such directives. - POWERSHELL: - The powershell completion scripts require PowerShell v5.0+ (which - comes with Windows 10, but can be downloaded separately for windows 7 - or 8.1). - First, check if a profile has already been set - PS C:\> Test-Path $profile - If the above command returns `False` run the following - PS C:\> New-Item -path $profile -type file -force - Now open the file provided by `$profile` (if you used the - `New-Item` command it will be - `${env:USERPROFILE}\Documents\WindowsPowerShell\Microsoft.PowerShell_profile.ps1` - Next, we either save the completions file into our profile, or - into a separate file and source it inside our profile. To save the - completions into our profile simply use - PS C:\> ltrs completions powershell >> ${env:USERPROFILE}\Documents\WindowsPowerShell\Microsoft.PowerShell_profile.ps1 - SOURCE: - This documentation is directly taken from: https://github.com/rust-lang/rustup/blob/8f6b53628ad996ad86f9c6225fa500cddf860905/src/cli/help.rs#L157"; -} diff --git a/src/cli/check.rs b/src/cli/check.rs new file mode 100644 index 0000000..f040dd5 --- /dev/null +++ b/src/cli/check.rs @@ -0,0 +1,512 @@ +//! Check text using LanguageTool server. +//! +//! The input can be one of the following: +//! +//! - raw text, if `--text TEXT` is provided; +//! - annotated data, if `--data TEXT` is provided; +//! - text from file(s), if `[FILE(S)]...` are provided. +//! - raw text through `stdin`, if nothing else is provided. +use std::{borrow::Cow, io::Write, mem, path::PathBuf}; + +use clap::{Args, Parser, ValueEnum}; +use serde::{Deserialize, Serialize}; +use termcolor::{StandardStream, WriteColor}; + +use crate::{ + api::{ + check::{ + parse_language_code, split_len, Data, DataAnnotation, Level, Request, DEFAULT_LANGUAGE, + }, + server::ServerClient, + }, + error::{Error, Result}, +}; + +use super::ExecuteSubcommand; + +/// Parse a string slice into a [`PathBuf`], and error if the file does not +/// exist. +fn parse_filename(s: &str) -> Result { + let path_buf = PathBuf::from(s); + + if path_buf.is_file() { + Ok(path_buf) + } else { + Err(Error::InvalidFilename(s.to_string())) + } +} + +/// Command to check a text with LanguageTool for possible style and grammar +/// issues. +#[derive(Debug, Parser)] +pub struct Command { + /// If present, raw JSON output will be printed instead of annotated text. + /// This has no effect if `--data` is used, because it is never + /// annotated. + #[clap(short = 'r', long)] + pub raw: bool, + /// Sets the maximum number of characters before splitting. + #[clap(long, default_value_t = 1500)] + pub max_length: usize, + /// If text is too long, will split on this pattern. + #[clap(long, default_value = "\n\n")] + pub split_pattern: String, + /// Max. number of suggestions kept. If negative, all suggestions are kept. + #[clap(long, default_value_t = 5, allow_negative_numbers = true)] + pub max_suggestions: isize, + /// Specify the files type to use the correct parser. + /// + /// If set to auto, the type is guessed from the filename extension. + #[clap(long, value_enum, default_value_t = FileType::default(), ignore_case = true)] + pub r#type: FileType, + /// Optional filenames from which input is read. + #[arg(conflicts_with_all(["text", "data"]), value_parser = parse_filename)] + pub filenames: Vec, + /// Inner [`Request`]. + #[command(flatten, next_help_heading = "Request options")] + pub request: CliRequest, +} + +/// Support file types. +#[derive(Clone, Debug, Default, ValueEnum)] +#[non_exhaustive] +pub enum FileType { + /// Auto. + #[default] + Auto, + /// Markdown. + Markdown, + /// Typst. + Typst, +} + +impl ExecuteSubcommand for Command { + /// Executes the `check` subcommand. + async fn execute(self, mut stdout: StandardStream, server_client: ServerClient) -> Result<()> { + let mut request = self.request; + #[cfg(feature = "annotate")] + let color = stdout.supports_color(); + + let server_client = server_client.with_max_suggestions(self.max_suggestions); + + // ANNOTATED DATA, RAW TEXT, STDIN + if self.filenames.is_empty() { + // Fallback to `stdin` if nothing else is provided + if request.text.is_none() && request.data.is_none() { + let mut text = String::new(); + super::read_from_stdin(&mut stdout, &mut text)?; + request = request.with_text(Cow::Owned(text)); + } + + if request.text.is_none() { + // Handle annotated data + let response = server_client.check(&request.into()).await?; + writeln!(&mut stdout, "{}", serde_json::to_string_pretty(&response)?)?; + return Ok(()); + }; + + let requests = request.split(self.max_length, self.split_pattern.as_str()); + let response = server_client.check_multiple_and_join(requests).await?; + + writeln!( + &mut stdout, + "{}", + &response.annotate(response.text.as_ref(), None, color) + )?; + + return Ok(()); + } + + // FILES + for filename in self.filenames.iter() { + let text = std::fs::read_to_string(filename)?; + let requests = request + .clone() + .with_text(text) + .split(self.max_length, self.split_pattern.as_str()); + let response = server_client.check_multiple_and_join(requests).await?; + + if !self.raw { + writeln!( + &mut stdout, + "{}", + &response.annotate(response.text.as_ref(), filename.to_str(), color) + )?; + } else { + writeln!(&mut stdout, "{}", serde_json::to_string_pretty(&*response)?)?; + } + } + + Ok(()) + } +} + +// NOTE: The below structs are copied from `../api/check.rs` to avoid lifetime +// issues with `clap` TODO: Remove these once this upstream issue is resolved: +// ------------------------------------------------------------------------------------------------- + +/// LanguageTool POST check request. +/// +/// The main feature - check a text with LanguageTool for possible style and +/// grammar issues. +/// +/// The structure below tries to follow as closely as possible the JSON API +/// described [here](https://languagetool.org/http-api/swagger-ui/#!/default/post_check). +#[derive(Args, Clone, Debug, Default, PartialEq, Eq, Hash)] +#[non_exhaustive] +pub struct CliRequest { + /// The text to be checked. This or 'data' is required. + #[clap(short = 't', long, conflicts_with = "data", allow_hyphen_values(true))] + pub text: Option, + /// The text to be checked, given as a JSON document that specifies what's + /// text and what's markup. This or 'text' is required. + /// + /// Markup will be ignored when looking for errors. Example text: + /// ```html + /// A test + /// ``` + /// JSON for the example text: + /// ```json + /// {"annotation":[ + /// {"text": "A "}, + /// {"markup": ""}, + /// {"text": "test"}, + /// {"markup": ""} + /// ]} + /// ``` + /// If you have markup that should be interpreted as whitespace, like `

` + /// in HTML, you can have it interpreted like this: + /// + /// ```json + /// {"markup": "

", "interpretAs": "\n\n"} + /// ``` + /// The 'data' feature is not limited to HTML or XML, it can be used for any + /// kind of markup. Entities will need to be expanded in this input. + #[clap(short = 'd', long, conflicts_with = "text")] + pub data: Option, + /// A language code like `en-US`, `de-DE`, `fr`, or `auto` to guess the + /// language automatically (see `preferredVariants` below). + /// + /// For languages with variants (English, German, Portuguese) spell checking + /// will only be activated when you specify the variant, e.g. `en-GB` + /// instead of just `en`. + #[cfg_attr( + feature = "cli", + clap( + short = 'l', + long, + default_value = DEFAULT_LANGUAGE, + value_parser = parse_language_code + ) + )] + pub language: String, + /// Set to get Premium API access: Your username/email as used to log in at + /// languagetool.org. + #[cfg_attr( + feature = "cli", + clap(short = 'u', long, requires = "api_key", env = "LANGUAGETOOL_USERNAME") + )] + pub username: Option, + /// Set to get Premium API access: your API key (see ). + #[cfg_attr( + feature = "cli", + clap(short = 'k', long, requires = "username", env = "LANGUAGETOOL_API_KEY") + )] + pub api_key: Option, + /// Comma-separated list of dictionaries to include words from; uses special + /// default dictionary if this is unset. + #[cfg_attr(feature = "cli", clap(long))] + pub dicts: Option>, + /// A language code of the user's native language, enabling false friends + /// checks for some language pairs. + #[cfg_attr(feature = "cli", clap(long))] + pub mother_tongue: Option, + /// Comma-separated list of preferred language variants. + /// + /// The language detector used with `language=auto` can detect e.g. English, + /// but it cannot decide whether British English or American English is + /// used. Thus this parameter can be used to specify the preferred variants + /// like `en-GB` and `de-AT`. Only available with `language=auto`. You + /// should set variants for at least German and English, as otherwise the + /// spell checking will not work for those, as no spelling dictionary can be + /// selected for just `en` or `de`. + #[cfg_attr(feature = "cli", clap(long, conflicts_with = "language"))] + pub preferred_variants: Option>, + /// IDs of rules to be enabled, comma-separated. + #[cfg_attr(feature = "cli", clap(long))] + pub enabled_rules: Option>, + /// IDs of rules to be disabled, comma-separated. + #[cfg_attr(feature = "cli", clap(long))] + pub disabled_rules: Option>, + /// IDs of categories to be enabled, comma-separated. + #[cfg_attr(feature = "cli", clap(long))] + pub enabled_categories: Option>, + /// IDs of categories to be disabled, comma-separated. + #[cfg_attr(feature = "cli", clap(long))] + pub disabled_categories: Option>, + /// If true, only the rules and categories whose IDs are specified with + /// `enabledRules` or `enabledCategories` are enabled. + #[cfg_attr(feature = "cli", clap(long))] + pub enabled_only: bool, + /// If set to `picky`, additional rules will be activated, i.e. rules that + /// you might only find useful when checking formal text. + #[cfg_attr( + feature = "cli", + clap(long, default_value = "default", ignore_case = true, value_enum) + )] + pub level: Level, +} + +impl From for Request<'_> { + fn from(val: CliRequest) -> Self { + Request { + text: val.text.map(Cow::Owned), + data: val.data.map(Into::into), + language: val.language, + username: val.username, + api_key: val.api_key, + dicts: val.dicts, + mother_tongue: val.mother_tongue, + preferred_variants: val.preferred_variants, + enabled_rules: val.enabled_rules, + disabled_rules: val.disabled_rules, + enabled_categories: val.enabled_categories, + disabled_categories: val.disabled_categories, + enabled_only: val.enabled_only, + level: val.level, + } + } +} + +impl CliRequest { + /// Create a new empty request with language set to `"auto"`. + #[must_use] + pub fn new() -> Self { + Self { + language: "auto".to_string(), + ..Default::default() + } + } + + /// Set the text to be checked and remove potential data field. + #[must_use] + pub fn with_text>(mut self, text: T) -> Self { + self.text = Some(text.into()); + self.data = None; + self + } + + /// Set the data to be checked and remove potential text field. + #[must_use] + pub fn with_data(mut self, data: CliData) -> Self { + self.data = Some(data); + self.text = None; + self + } + + /// Set the data (obtained from string) to be checked and remove potential + /// text field + pub fn with_data_str(self, data: &str) -> serde_json::Result { + serde_json::from_str(data).map(|data| self.with_data(data)) + } + + /// Set the language of the text / data. + #[must_use] + pub fn with_language(mut self, language: String) -> Self { + self.language = language; + self + } + + /// Return the text within the request. + /// + /// # Errors + /// + /// If both `self.text` and `self.data` are [`None`]. + /// If any data annotation does not contain text or markup. + pub fn try_get_text(&self) -> Result { + if let Some(ref text) = self.text { + Ok(text.clone()) + } else if let Some(ref data) = self.data { + match data.annotation.len() { + 0 => Ok(Default::default()), + 1 => data.annotation[0].try_get_text(), + _ => { + let mut text = String::new(); + + for da in data.annotation.iter() { + text.push_str(&da.try_get_text()?); + } + + Ok(text) + }, + } + } else { + Err(Error::InvalidRequest( + "missing either text or data field".to_string(), + )) + } + } + + /// Return a copy of the text within the request. + /// Call [`Request::try_get_text`] but panic on error. + /// + /// # Panics + /// + /// If both `self.text` and `self.data` are [`None`]. + /// If any data annotation does not contain text or markup. + #[must_use] + pub fn get_text(&self) -> String { + self.try_get_text().unwrap() + } + + /// Split this request into multiple, using [`split_len`] function to split + /// text. + /// + /// # Errors + /// + /// If `self.text` is none. + pub fn try_split(mut self, n: usize, pat: &str) -> Result> { + let text = mem::take(&mut self.text) + .ok_or_else(|| Error::InvalidRequest("missing text field".to_string()))?; + + Ok(split_len(&text, n, pat) + .iter() + .map(|text_fragment| self.clone().with_text(text_fragment.to_string()).into()) + .collect()) + } + + /// Split this request into multiple, using [`split_len`] function to split + /// text. + /// Call [`Request::try_split`] but panic on error. + /// + /// # Panics + /// + /// If `self.text` is none. + #[must_use] + pub fn split(self, n: usize, pat: &str) -> Vec { + self.try_split(n, pat).unwrap() + } +} + +/// Alternative text to be checked. +#[derive(Clone, Debug, Default, Deserialize, PartialEq, Eq, Hash)] +#[non_exhaustive] +pub struct CliData { + /// Vector of markup text, see [`DataAnnotation`]. + pub annotation: Vec, +} + +impl From for Data<'_> { + fn from(val: CliData) -> Self { + Data { + annotation: val + .annotation + .into_iter() + .map(|a| a.into()) + .collect::>(), + } + } +} + +/// A portion of text to be checked. +#[derive(Clone, Debug, Deserialize, PartialEq, Eq, Serialize, Hash)] +#[non_exhaustive] +#[serde(rename_all = "camelCase")] +pub struct CliDataAnnotation { + /// Text that should be treated as normal text. + /// + /// This or `markup` is required. + #[serde(skip_serializing_if = "Option::is_none")] + pub text: Option, + /// Text that should be treated as markup. + /// + /// This or `text` is required. + #[serde(skip_serializing_if = "Option::is_none")] + pub markup: Option, + /// If set, the markup will be interpreted as this. + #[serde(skip_serializing_if = "Option::is_none")] + pub interpret_as: Option, +} + +impl From for DataAnnotation<'_> { + fn from(val: CliDataAnnotation) -> Self { + DataAnnotation { + text: val.text.map(Cow::Owned), + markup: val.markup.map(Cow::Owned), + interpret_as: val.interpret_as.map(Cow::Owned), + } + } +} + +impl CliDataAnnotation { + /// Instantiate a new `CliDataAnnotation` with text only. + #[inline] + #[must_use] + pub fn new_text>(text: T) -> Self { + Self { + text: Some(text.into()), + markup: None, + interpret_as: None, + } + } + + /// Instantiate a new `CliDataAnnotation` with markup only. + #[inline] + #[must_use] + pub fn new_markup>(markup: M) -> Self { + Self { + text: None, + markup: Some(markup.into()), + interpret_as: None, + } + } + + /// Instantiate a new `CliDataAnnotation` with markup and its + /// interpretation. + #[inline] + #[must_use] + pub fn new_interpreted_markup, I: Into>( + markup: M, + interpret_as: I, + ) -> Self { + Self { + interpret_as: Some(interpret_as.into()), + markup: Some(markup.into()), + text: None, + } + } + + /// Return the text or markup within the data annotation. + /// + /// # Errors + /// + /// If this data annotation does not contain text or markup. + pub fn try_get_text(&self) -> Result { + if let Some(ref text) = self.text { + Ok(text.clone()) + } else if let Some(ref markup) = self.markup { + Ok(markup.clone()) + } else { + Err(Error::InvalidDataAnnotation(format!( + "missing either text or markup field in {self:?}" + ))) + } + } +} + +impl> FromIterator for CliData { + fn from_iter>(iter: I) -> Self { + let annotation = iter.into_iter().map(std::convert::Into::into).collect(); + CliData { annotation } + } +} + +#[cfg(feature = "cli")] +impl std::str::FromStr for CliData { + type Err = Error; + + fn from_str(s: &str) -> Result { + let v: Self = serde_json::from_str(s)?; + Ok(v) + } +} diff --git a/src/cli/completions.rs b/src/cli/completions.rs new file mode 100644 index 0000000..5f9d2ec --- /dev/null +++ b/src/cli/completions.rs @@ -0,0 +1,115 @@ +//! Completion scripts generation with [`clap_complete`]. + +use crate::{api::server::ServerClient, error::Result}; +use clap::Parser; +use clap_complete::{generate, shells::Shell}; +use std::io::Write; +use termcolor::StandardStream; + +use super::ExecuteSubcommand; + +/// Command structure to generate complete scripts. +#[derive(Debug, Parser)] +#[command( + about = "Generate tab-completion scripts for supported shells", + after_help = "Use --help for installation help.", + after_long_help = COMPLETIONS_HELP +)] +pub struct Command { + /// Shell for which to completion script is generated. + #[arg(value_enum, ignore_case = true)] + shell: Shell, +} + +impl Command { + /// Generate completion file for current shell and write to buffer. + pub fn generate_completion_file(&self, build_cli: F, buffer: &mut W) + where + F: FnOnce() -> clap::Command, + W: Write, + { + generate(self.shell, &mut build_cli(), "ltrs", buffer); + } +} + +impl ExecuteSubcommand for Command { + /// Executes the `completions` subcommand. + async fn execute(self, mut stdout: StandardStream, _: ServerClient) -> Result<()> { + self.generate_completion_file(super::build_cli, &mut stdout); + Ok(()) + } +} + +pub(crate) static COMPLETIONS_HELP: &str = r"DISCUSSION: + Enable tab completion for Bash, Fish, Zsh, or PowerShell + Elvish shell completion is currently supported, but not documented below. + The script is output on `stdout`, allowing one to re-direct the + output to the file of their choosing. Where you place the file + will depend on which shell, and which operating system you are + using. Your particular configuration may also determine where + these scripts need to be placed. + Here are some common set ups for the three supported shells under + Unix and similar operating systems (such as GNU/Linux). + BASH: + Completion files are commonly stored in `/etc/bash_completion.d/` for + system-wide commands, but can be stored in + `~/.local/share/bash-completion/completions` for user-specific commands. + Run the command: + $ mkdir -p ~/.local/share/bash-completion/completions + $ ltrs completions bash >> ~/.local/share/bash-completion/completions/ltrs + This installs the completion script. You may have to log out and + log back in to your shell session for the changes to take effect. + BASH (macOS/Homebrew): + Homebrew stores bash completion files within the Homebrew directory. + With the `bash-completion` brew formula installed, run the command: + $ mkdir -p $(brew --prefix)/etc/bash_completion.d + $ ltrs completions bash > $(brew --prefix)/etc/bash_completion.d/ltrs.bash-completion + FISH: + Fish completion files are commonly stored in + `$HOME/.config/fish/completions`. Run the command: + $ mkdir -p ~/.config/fish/completions + $ ltrs completions fish > ~/.config/fish/completions/ltrs.fish + This installs the completion script. You may have to log out and + log back in to your shell session for the changes to take effect. + ZSH: + ZSH completions are commonly stored in any directory listed in + your `$fpath` variable. To use these completions, you must either + add the generated script to one of those directories, or add your + own to this list. + Adding a custom directory is often the safest bet if you are + unsure of which directory to use. First create the directory; for + this example we'll create a hidden directory inside our `$HOME` + directory: + $ mkdir ~/.zfunc + Then add the following lines to your `.zshrc` just before + `compinit`: + fpath+=~/.zfunc + Now you can install the completions script using the following + command: + $ ltrs completions zsh > ~/.zfunc/_ltrs + You must then either log out and log back in, or simply run + $ exec zsh + for the new completions to take effect. + CUSTOM LOCATIONS: + Alternatively, you could save these files to the place of your + choosing, such as a custom directory inside your $HOME. Doing so + will require you to add the proper directives, such as `source`ing + inside your login script. Consult your shells documentation for + how to add such directives. + POWERSHELL: + The powershell completion scripts require PowerShell v5.0+ (which + comes with Windows 10, but can be downloaded separately for windows 7 + or 8.1). + First, check if a profile has already been set + PS C:\> Test-Path $profile + If the above command returns `False` run the following + PS C:\> New-Item -path $profile -type file -force + Now open the file provided by `$profile` (if you used the + `New-Item` command it will be + `${env:USERPROFILE}\Documents\WindowsPowerShell\Microsoft.PowerShell_profile.ps1` + Next, we either save the completions file into our profile, or + into a separate file and source it inside our profile. To save the + completions into our profile simply use + PS C:\> ltrs completions powershell >> ${env:USERPROFILE}\Documents\WindowsPowerShell\Microsoft.PowerShell_profile.ps1 + SOURCE: + This documentation is directly taken from: https://github.com/rust-lang/rustup/blob/8f6b53628ad996ad86f9c6225fa500cddf860905/src/cli/help.rs#L157"; diff --git a/src/docker.rs b/src/cli/docker.rs similarity index 70% rename from src/docker.rs rename to src/cli/docker.rs index a040d41..eaf3a47 100644 --- a/src/docker.rs +++ b/src/cli/docker.rs @@ -1,61 +1,52 @@ //! Structures and methods to easily manipulate Docker images, especially for //! LanguageTool applications. -use std::process::{Command, Output, Stdio}; +use std::process::{self, Output, Stdio}; -#[cfg(feature = "cli")] use clap::{Args, Parser}; +use termcolor::StandardStream; -use crate::error::{exit_status_error, Error, Result}; +use crate::{ + api::server::ServerClient, + error::{exit_status_error, Error, Result}, +}; + +use super::ExecuteSubcommand; /// Commands to pull, start and stop a `LanguageTool` container using Docker. -#[cfg_attr(feature = "cli", derive(Args))] -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Args)] pub struct Docker { /// Image or repository from a registry. - #[cfg_attr( - feature = "cli", - clap( - default_value = "erikvl87/languagetool", - env = "LANGUAGETOOL_DOCKER_IMAGE" - ) + #[clap( + default_value = "erikvl87/languagetool", + env = "LANGUAGETOOL_DOCKER_IMAGE" )] name: String, /// Path to Docker's binaries. - #[cfg_attr( - feature = "cli", - clap( - short = 'b', - long, - default_value = "docker", - env = "LANGUAGETOOL_DOCKER_BIN" - ) + #[clap( + short = 'b', + long, + default_value = "docker", + env = "LANGUAGETOOL_DOCKER_BIN" )] bin: String, /// Name assigned to the container. - #[cfg_attr( - feature = "cli", - clap(long, default_value = "languagetool", env = "LANGUAGETOOL_DOCKER_NAME") - )] + #[clap(long, default_value = "languagetool", env = "LANGUAGETOOL_DOCKER_NAME")] container_name: String, /// Publish a container's port(s) to the host. - #[cfg_attr( - feature = "cli", - clap( - short = 'p', - long, - default_value = "8010:8010", - env = "LANGUAGETOOL_DOCKER_PORT" - ) + #[clap( + short = 'p', + long, + default_value = "8010:8010", + env = "LANGUAGETOOL_DOCKER_PORT" )] port: String, /// Docker action. - #[cfg_attr(feature = "cli", clap(subcommand))] + #[clap(subcommand)] action: Action, } -#[cfg_attr(feature = "cli", derive(clap::Subcommand))] -#[derive(Clone, Debug)] +#[derive(clap::Subcommand, Clone, Debug)] /// Enumerate supported Docker actions. enum Action { /// Pull a docker docker image. @@ -76,7 +67,7 @@ enum Action { impl Docker { /// Pull a Docker image from the given repository/file/... pub fn pull(&self) -> Result { - let output = Command::new(&self.bin) + let output = process::Command::new(&self.bin) .args(["pull", &self.name]) .stdout(Stdio::inherit()) .stderr(Stdio::inherit()) @@ -90,7 +81,7 @@ impl Docker { /// Start a Docker container with given specifications. pub fn start(&self) -> Result { - let output = Command::new(&self.bin) + let output = process::Command::new(&self.bin) .args([ "run", "--rm", @@ -113,7 +104,7 @@ impl Docker { /// Stop the latest Docker container with the given name. pub fn stop(&self) -> Result { - let output = Command::new(&self.bin) + let output = process::Command::new(&self.bin) .args([ "ps", "-l", @@ -132,7 +123,7 @@ impl Docker { .filter(|c| c.is_alphanumeric()) // This avoids newlines .collect(); - let output = Command::new(&self.bin) + let output = process::Command::new(&self.bin) .args(["kill", &docker_id]) .stdout(Stdio::inherit()) .stderr(Stdio::inherit()) @@ -154,21 +145,16 @@ impl Docker { } /// Commands to easily run a LanguageTool server with Docker. -#[cfg(feature = "cli")] #[derive(Debug, Parser)] -pub struct DockerCommand { +pub struct Command { /// Actual command arguments. #[command(flatten)] pub docker: Docker, } -#[cfg(feature = "cli")] -impl DockerCommand { - /// Execute a Docker command and write output to stdout. - pub fn execute(&self, _stdout: &mut W) -> Result<()> - where - W: std::io::Write, - { +impl ExecuteSubcommand for Command { + /// Execute the `docker` subcommand. + async fn execute(self, _stdout: StandardStream, _: ServerClient) -> Result<()> { self.docker.run_action()?; Ok(()) } diff --git a/src/cli/languages.rs b/src/cli/languages.rs new file mode 100644 index 0000000..a8d2a13 --- /dev/null +++ b/src/cli/languages.rs @@ -0,0 +1,21 @@ +use clap::Parser; +use std::io::Write; +use termcolor::StandardStream; + +use crate::{api::server::ServerClient, error::Result}; + +use super::ExecuteSubcommand; + +#[derive(Debug, Parser)] +pub struct Command {} + +impl ExecuteSubcommand for Command { + /// Executes the `languages` subcommand. + async fn execute(self, mut stdout: StandardStream, server_client: ServerClient) -> Result<()> { + let languages_response = server_client.languages().await?; + let languages = serde_json::to_string_pretty(&languages_response)?; + + writeln!(&mut stdout, "{languages}")?; + Ok(()) + } +} diff --git a/src/cli/mod.rs b/src/cli/mod.rs new file mode 100644 index 0000000..d458cfe --- /dev/null +++ b/src/cli/mod.rs @@ -0,0 +1,149 @@ +//! Command line tools. +//! +//! This module is specifically designed to be used by LTRS's binary target. +//! It contains all the content needed to create LTRS's command line interface. + +pub mod check; +#[cfg(feature = "cli-complete")] +mod completions; +#[cfg(feature = "docker")] +mod docker; +mod languages; +mod ping; +mod words; + +use std::io; + +use clap::{CommandFactory, Parser, Subcommand}; +#[cfg(feature = "cli")] +use enum_dispatch::enum_dispatch; +use is_terminal::IsTerminal; +#[cfg(feature = "annotate")] +use termcolor::{ColorChoice, StandardStream}; + +#[cfg(feature = "docker")] +pub use docker::Docker; + +use crate::{ + api::server::{ServerCli, ServerClient}, + error::Result, +}; + +/// Read lines from standard input and write to buffer string. +/// +/// Standard output is used when waiting for user to input text. +fn read_from_stdin(stdout: &mut W, buffer: &mut String) -> Result<()> +where + W: io::Write, +{ + if io::stdin().is_terminal() { + #[cfg(windows)] + writeln!( + stdout, + "Reading from STDIN, press [CTRL+Z] when you're done." + )?; + + #[cfg(unix)] + writeln!( + stdout, + "Reading from STDIN, press [CTRL+D] when you're done." + )?; + } + let stdin = std::io::stdin(); + + while stdin.read_line(buffer)? > 0 {} + Ok(()) +} + +/// Main command line structure. Contains every subcommand. +#[derive(Parser, Debug)] +#[command( + author, + version, + about = "LanguageTool API bindings in Rust.", + propagate_version(true), + subcommand_required(true), + verbatim_doc_comment +)] +pub struct Cli { + /// Specify WHEN to colorize output. + #[arg(short, long, value_name = "WHEN", default_value = "auto", default_missing_value = "always", num_args(0..=1), require_equals(true))] + pub color: clap::ColorChoice, + /// [`ServerCli`] arguments. + #[command(flatten, next_help_heading = "Server options")] + pub server_cli: ServerCli, + /// Subcommand. + #[command(subcommand)] + #[allow(missing_docs)] + pub command: Command, +} + +/// All possible subcommands. +#[derive(Subcommand, Debug)] +#[enum_dispatch] +#[allow(missing_docs)] +pub enum Command { + /// Check text using LanguageTool server. + Check(check::Command), + /// Commands to easily run a LanguageTool server with Docker. + #[cfg(feature = "docker")] + Docker(docker::Command), + /// Return list of supported languages. + #[clap(visible_alias = "lang")] + Languages(languages::Command), + /// Ping the LanguageTool server and return time elapsed in ms if success. + Ping(ping::Command), + /// Retrieve some user's words list, or add / delete word from it. + Words(words::Command), + /// Generate tab-completion scripts for supported shells + #[cfg(feature = "cli-complete")] + Completions(completions::Command), +} + +/// Provides a common interface for executing the subcommands. +#[enum_dispatch(Command)] +trait ExecuteSubcommand { + /// Executes the subcommand. + async fn execute(self, stdout: StandardStream, server_client: ServerClient) -> Result<()>; +} + +impl Cli { + /// Return a standard output stream that optionally supports color. + #[must_use] + fn stdout(&self) -> StandardStream { + let mut choice: ColorChoice = match self.color { + clap::ColorChoice::Auto => ColorChoice::Auto, + clap::ColorChoice::Always => ColorChoice::Always, + clap::ColorChoice::Never => ColorChoice::Never, + }; + + if choice == ColorChoice::Auto && !io::stdout().is_terminal() { + choice = ColorChoice::Never; + } + + StandardStream::stdout(choice) + } + + /// Execute command, possibly returning an error. + pub async fn execute(self) -> Result<()> { + let stdout = self.stdout(); + let server_client: ServerClient = self.server_cli.into(); + + self.command.execute(stdout, server_client).await + } +} + +/// Build a command from the top-level command line structure. +#[must_use] +pub fn build_cli() -> clap::Command { + Cli::command() +} + +#[cfg(test)] +mod tests { + use super::*; + #[test] + fn test_cli() { + Cli::command().debug_assert(); + } +} diff --git a/src/cli/ping.rs b/src/cli/ping.rs new file mode 100644 index 0000000..d32d3a7 --- /dev/null +++ b/src/cli/ping.rs @@ -0,0 +1,20 @@ +use clap::Parser; +use std::io::Write; +use termcolor::StandardStream; + +use crate::{api::server::ServerClient, error::Result}; + +use super::ExecuteSubcommand; + +#[derive(Debug, Parser)] +pub struct Command {} + +impl ExecuteSubcommand for Command { + /// Execute the `languages` subcommand. + async fn execute(self, mut stdout: StandardStream, server_client: ServerClient) -> Result<()> { + let ping = server_client.ping().await?; + + writeln!(&mut stdout, "PONG! Delay: {ping} ms")?; + Ok(()) + } +} diff --git a/src/cli/words.rs b/src/cli/words.rs new file mode 100644 index 0000000..f87aa1b --- /dev/null +++ b/src/cli/words.rs @@ -0,0 +1,55 @@ +use clap::{Parser, Subcommand}; +use std::io::Write; +use termcolor::StandardStream; + +use crate::{ + api::{self, server::ServerClient, words::RequestArgs}, + error::Result, +}; + +use super::ExecuteSubcommand; + +/// Retrieve some user's words list. +#[derive(Debug, Parser)] +#[clap(args_conflicts_with_subcommands = true)] +#[clap(subcommand_negates_reqs = true)] +pub struct Command { + /// Actual GET request. + #[command(flatten)] + pub request: RequestArgs, + /// Optional subcommand. + #[command(subcommand)] + pub subcommand: Option, +} + +/// Words' optional subcommand. +#[derive(Clone, Debug, Subcommand)] +pub enum WordsSubcommand { + /// Add a word to some user's list. + Add(api::words::add::Request), + /// Remove a word from some user's list. + Delete(api::words::delete::Request), +} + +impl ExecuteSubcommand for Command { + /// Executes the `words` subcommand. + async fn execute(self, mut stdout: StandardStream, server_client: ServerClient) -> Result<()> { + let words = match self.subcommand { + Some(WordsSubcommand::Add(request)) => { + let words_response = server_client.words_add(&request).await?; + serde_json::to_string_pretty(&words_response)? + }, + Some(WordsSubcommand::Delete(request)) => { + let words_response = server_client.words_delete(&request).await?; + serde_json::to_string_pretty(&words_response)? + }, + None => { + let words_response = server_client.words(&self.request.into()).await?; + serde_json::to_string_pretty(&words_response)? + }, + }; + + writeln!(&mut stdout, "{words}")?; + Ok(()) + } +} diff --git a/src/lib.rs b/src/lib.rs index a28f029..4d8f386 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -20,9 +20,4 @@ pub mod api; #[cfg(feature = "cli")] pub mod cli; -#[cfg(feature = "docker")] -pub mod docker; pub mod error; - -#[cfg(feature = "docker")] -pub use crate::docker::Docker; diff --git a/tests/match_positions.rs b/tests/match_positions.rs index 944166c..cc5d814 100644 --- a/tests/match_positions.rs +++ b/tests/match_positions.rs @@ -7,7 +7,7 @@ macro_rules! test_match_positions { async fn $name() -> Result<(), Box> { let client = ServerClient::from_env_or_default(); - let req = check::Request::default().with_text($text.to_string()); + let req = check::Request::default().with_text($text); let resp = client.check(&req).await.unwrap(); let resp = check::ResponseWithContext::new(req.get_text(), resp);