Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: clippy errors and small typos in write function #36

Merged
merged 1 commit into from
Jul 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

20 changes: 5 additions & 15 deletions src/crypto.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use super::model::Entity;
use crate::{model::TripleMask, rdf_types::*};
use blake3;

pub trait Pseudonymize {
// Pseudonymize parts of a triple set by its mask
Expand All @@ -26,9 +25,9 @@ pub trait Pseudonymize {

fn pseudo_entity(&self, e: &Entity) -> Entity {
match e {
Entity::Literal(l) => Entity::Literal(self.pseudo_literal(&l)),
Entity::NamedNode(n) => Entity::NamedNode(self.pseudo_named_node(&n)),
Entity::BlankNode(b) => Entity::BlankNode(self.pseudo_blank_node(&b)),
Entity::Literal(l) => Entity::Literal(self.pseudo_literal(l)),
Entity::NamedNode(n) => Entity::NamedNode(self.pseudo_named_node(n)),
Entity::BlankNode(b) => Entity::BlankNode(self.pseudo_blank_node(b)),
}
}
// private methods? Blanket implementations
Expand All @@ -42,17 +41,8 @@ pub trait Pseudonymize {
// return u.clone()
}

pub struct DefaultHasher {
hasher: blake3::Hasher,
}

impl DefaultHasher {
pub fn new() -> Self {
return DefaultHasher {
hasher: blake3::Hasher::new(),
};
}
}
#[derive(Default)]
pub struct DefaultHasher {}

impl Pseudonymize for DefaultHasher {
fn pseudo_named_node(&self, t: &NamedNode) -> NamedNode {
Expand Down
6 changes: 2 additions & 4 deletions src/io.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
use crate::rules::Rules;
use rio_turtle::NTriplesParser;
use serde_yml;
use std::{
boxed::Box,
fs::File,
io::{self, stdin, stdout, BufRead, BufReader, BufWriter, Write},
io::{self, stdin, stdout, BufRead, BufReader, BufWriter},
path::Path,
};

Expand Down Expand Up @@ -46,7 +44,7 @@ pub fn parse_ntriples(reader: impl BufRead) -> NTriplesParser<impl BufRead> {

// Parse yaml configuration file.
pub fn parse_config(path: &Path) -> Rules {
return match File::open(&path) {
return match File::open(path) {
Ok(file) => serde_yml::from_reader(file).expect("Error parsing config file."),
Err(e) => panic!("Cannot open file '{:?}': '{}'.", path, e),
};
Expand Down
1 change: 0 additions & 1 deletion src/log.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use slog::{self, o, Drain};
use slog_async;
use std::{io, sync::Arc};

pub type Logger = slog::Logger;
Expand Down
2 changes: 1 addition & 1 deletion src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ struct IndexArgs {

/// File descriptor to read triples from.
/// Defaults to `stdin`.
#[arg(default_value = "-")]
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

question: is there any issues with having input as positional argument? i.e.

index data.nt
instead of
index -i input.nt

Also this would set -i as the short option, which kind of clashes with index in pseudo -i

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ah jeah. that might be better. Make --input and --index. I am anyway not a big fan of using abreviations.
or -i and -x for index. @cmdoret : can you do a PR on this?

#[arg(short, long, default_value = "-")]
input: PathBuf,
}

Expand Down
1 change: 0 additions & 1 deletion src/model.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use std::hash::Hash;

use crate::rdf_types::*;
use bitflags;

#[derive(Eq, PartialEq, Debug, Clone, Hash)]
pub enum Entity {
Expand Down
38 changes: 23 additions & 15 deletions src/pass_first.rs
Original file line number Diff line number Diff line change
@@ -1,30 +1,38 @@
use rio_api::{model::Triple, parser::TriplesParser};
use rio_api::parser::TriplesParser;
use rio_turtle::TurtleError;
use std::{
io::{stdin, BufRead, BufReader, Write},
path::Path,
use std::{io::Write, path::Path};

use crate::{
io,
rdf_types::{Triple, TripleView},
};

use crate::io;
fn index_triple(t: Triple, out: &mut impl Write) {
if t.predicate.iri.as_str() == "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" {
let r = || -> std::io::Result<()> {
out.write_all(t.to_string().as_bytes())?;
out.write_all(b" .\n")
}();

fn index_triple(t: Triple, out: &mut impl Write) -> Result<(), TurtleError> {
match t.predicate.iri {
"http://www.w3.org/1999/02/22-rdf-syntax-ns#type" => {
let _ = out.write(&format!("{} .\n", &t.to_string()).into_bytes());
if let Err(e) = r {
panic!("Error writting to out buffer: {e}");
}
_ => {}
}

Ok(())
}

pub fn create_type_map(input: &Path, output: &Path) {
let buf_in = io::get_reader(input);
let mut buf_out = io::get_writer(output);
let mut triples = io::parse_ntriples(buf_in);

while !triples.is_end() {
triples
.parse_step(&mut |t| index_triple(t, &mut buf_out))
.unwrap();
let _ = triples
.parse_step(&mut |t: TripleView| {
index_triple(t.into(), &mut buf_out);
Result::<(), TurtleError>::Ok(())
})
.inspect_err(|e| {
panic!("Parsing error occured: {e}");
});
}
}
36 changes: 21 additions & 15 deletions src/pass_second.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ use rio_api::parser::TriplesParser;
use rio_turtle::TurtleError;
use std::{
collections::HashMap,
fmt::{Debug, Display},
io::{BufRead, Write},
path::Path,
};
Expand Down Expand Up @@ -38,13 +37,18 @@ fn process_triple(
rules_config: &Rules,
node_to_type: &HashMap<String, String>,
out: &mut impl Write,
) -> Result<(), TurtleError> {
let mask = match_rules(triple.clone(), &rules_config, &node_to_type);
let hasher = DefaultHasher::new();
let _ =
out.write(&format!("{} .\n", hasher.pseudo_triple(&triple, mask).to_string()).into_bytes());
) {
let mask = match_rules(triple.clone(), rules_config, node_to_type);
let hasher = DefaultHasher::default();

Ok(())
let r = || -> std::io::Result<()> {
out.write_all(hasher.pseudo_triple(&triple, mask).to_string().as_bytes())?;
out.write_all(b" .\n")
}();

if let Err(e) = r {
panic!("Error writting to out buffer: {e}");
}
}

// Create a index mapping node -> type from an input ntriples buffer
Expand All @@ -55,8 +59,8 @@ fn load_type_map(input: impl BufRead) -> HashMap<String, String> {
while !triples.is_end() {
let _: Result<(), TurtleError> = triples.parse_step(&mut |t| {
node_to_type.insert(
t.subject.to_string().replace(&['<', '>'], ""),
t.object.to_string().replace(&['<', '>'], ""),
t.subject.to_string().replace(['<', '>'], ""),
t.object.to_string().replace(['<', '>'], ""),
);
Ok(())
});
Expand All @@ -65,7 +69,7 @@ fn load_type_map(input: impl BufRead) -> HashMap<String, String> {
return node_to_type;
}

pub fn pseudonymize_graph(log: &Logger, input: &Path, config: &Path, output: &Path, index: &Path) {
pub fn pseudonymize_graph(_: &Logger, input: &Path, config: &Path, output: &Path, index: &Path) {
let buf_input = io::get_reader(input);
let buf_index = io::get_reader(index);
let mut buf_output = io::get_writer(output);
Expand All @@ -75,13 +79,15 @@ pub fn pseudonymize_graph(log: &Logger, input: &Path, config: &Path, output: &Pa

let mut triples = io::parse_ntriples(buf_input);

// TODO: Try to make this into an iterator loop to leverage rayons parallelization feature over
// iterators.

// Run the loop single-threaded.
while !triples.is_end() {
triples
.parse_step(&mut |t| {
process_triple(t.into(), &rules_config, &node_to_type, &mut buf_output)
.parse_step(&mut |t: TripleView| {
process_triple(t.into(), &rules_config, &node_to_type, &mut buf_output);
Result::<(), TurtleError>::Ok(())
})
.inspect_err(|e| {
panic!("Parsing error occured: {e}");
})
.unwrap();
}
Expand Down
33 changes: 13 additions & 20 deletions src/rdf_types.rs
Original file line number Diff line number Diff line change
@@ -1,18 +1,17 @@
use super::model::{Entity, TripleMask};
use rio_api;
use std::{fmt, fmt::Write, ops::Sub};
use super::model::Entity;
use std::{fmt, fmt::Write};

// Rewrite all the rio types to be able to instanciate triples
// Rename rio types as XXXView to distinguish them from our types
// Use rio types for parsing and serializing
// Define mappers between the two types
//
type NamedNodeView<'a> = rio_api::model::NamedNode<'a>;
type LiteralView<'a> = rio_api::model::Literal<'a>;
type TermView<'a> = rio_api::model::Term<'a>;
type TripleView<'a> = rio_api::model::Triple<'a>;
type BlankNodeView<'a> = rio_api::model::BlankNode<'a>;
type SubjectView<'a> = rio_api::model::Subject<'a>;
pub type NamedNodeView<'a> = rio_api::model::NamedNode<'a>;
pub type LiteralView<'a> = rio_api::model::Literal<'a>;
pub type TermView<'a> = rio_api::model::Term<'a>;
pub type TripleView<'a> = rio_api::model::Triple<'a>;
pub type BlankNodeView<'a> = rio_api::model::BlankNode<'a>;
pub type SubjectView<'a> = rio_api::model::Subject<'a>;

#[derive(Eq, PartialEq, Debug, Clone, Hash)]
pub struct Triple {
Expand Down Expand Up @@ -130,16 +129,10 @@ impl fmt::Display for BlankNode {

impl<'a> From<TripleView<'a>> for Triple {
fn from(t: TripleView<'a>) -> Self {
match t {
TripleView {
subject,
predicate,
object,
} => Triple {
subject: subject.into(),
predicate: predicate.into(),
object: object.into(),
},
Triple {
subject: t.subject.into(),
predicate: t.predicate.into(),
object: t.object.into(),
}
}
}
Expand Down Expand Up @@ -247,7 +240,7 @@ impl From<Entity> for Term {
}

#[inline]
fn fmt_quoted_str(string: &String, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fn fmt_quoted_str(string: &str, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_char('"')?;
for c in string.chars() {
match c {
Expand Down
18 changes: 8 additions & 10 deletions src/rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ pub fn match_type_rule_subject(
) -> TripleMask {
match subject {
Subject::NamedNode(n) => {
return mask | match_type_rule_named_node(true, &n, mask, rules, type_map);
return mask | match_type_rule_named_node(true, n, mask, rules, type_map);
}
Subject::BlankNode(_) => return mask,
}
Expand All @@ -64,21 +64,19 @@ pub fn match_type_rule_object(
) -> TripleMask {
match object {
Term::NamedNode(n) => {
return mask | match_type_rule_named_node(false, &n, mask, rules, type_map);
return mask | match_type_rule_named_node(false, n, mask, rules, type_map);
}
_ => return mask,
}
}

pub fn match_predicate_rule(predicate: &NamedNode, mask: TripleMask, rules: &Rules) -> TripleMask {
match predicate {
NamedNode { iri: n } => {
if rules.replace_value_of_predicate.contains(n) {
return mask | TripleMask::OBJECT;
} else {
return mask;
}
}
let NamedNode { iri: i } = predicate;

if rules.replace_value_of_predicate.contains(i) {
return mask | TripleMask::OBJECT;
} else {
return mask;
}
}

Expand Down
3 changes: 1 addition & 2 deletions tools/lint-rust.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,8 @@ cargo --version
cargo clippy --version

print_info "Run Rust Clippy linter."
print_warning "Currently warnings are not errors!"

cargo clippy --no-deps -- -A clippy::needless_return "$@" ||
cargo clippy --no-deps -- -D warnings -A clippy::needless_return "$@" ||
{
git diff --name-status || true
die "Rust clippy failed."
Expand Down