Skip to content

Commit

Permalink
Merge pull request #68 from Lurk/housekeeping
Browse files Browse the repository at this point in the history
Housekeeping
  • Loading branch information
Lurk authored Nov 20, 2024
2 parents ea0e604 + aabdde4 commit 9e52236
Show file tree
Hide file tree
Showing 7 changed files with 74 additions and 6 deletions.
6 changes: 3 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
[package]
name = "yamd"
description = "Yet Another Markdown Document (flavour)"
version = "0.14.0"
version = "0.14.1"
edition = "2021"
license = "MIT OR Apache-2.0"
repository = "https://github.com/Lurk/yamd"
readme = "README.md"
keywords = ["markdown", "parser"]

[dependencies]
serde = { version = "1.0.197", features = ["derive"] }
serde = { version = "1.0.215", features = ["derive"] }

[dev-dependencies]
pretty_assertions = "1.4.0"
pretty_assertions = "1.4.1"


4 changes: 4 additions & 0 deletions fuzz/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
target
corpus
artifacts
coverage
21 changes: 21 additions & 0 deletions fuzz/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
[package]
name = "yamd-fuzz"
version = "0.0.0"
publish = false
edition = "2021"

[package.metadata]
cargo-fuzz = true

[dependencies]
libfuzzer-sys = "0.4"

[dependencies.yamd]
path = ".."

[[bin]]
name = "deserialize"
path = "fuzz_targets/deserialize.rs"
test = false
doc = false
bench = false
9 changes: 9 additions & 0 deletions fuzz/fuzz_targets/deserialize.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
#![no_main]

use libfuzzer_sys::fuzz_target;

fuzz_target!(|data: &[u8]| {
if let Ok(s) = std::str::from_utf8(data) {
let _ = yamd::deserialize(s);
}
});
14 changes: 13 additions & 1 deletion src/parser/anchor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,9 @@ pub(crate) fn anchor(p: &mut Parser<'_>) -> Option<Anchor> {
p.next_token();
paren_count += 1;
}
TokenKind::RightParenthesis if right_square_bracket_pos.is_some() => {
TokenKind::RightParenthesis
if right_square_bracket_pos.is_some() && paren_count > 0 =>
{
last_right_paren_pos.replace(pos);
p.next_token();
paren_count -= 1;
Expand Down Expand Up @@ -133,4 +135,14 @@ mod tests {
Some((&Token::new(TokenKind::Literal, "[", Position::default()), 0))
)
}

#[test]
fn right_paren() {
let mut p = Parser::new("[a])");
assert_eq!(anchor(&mut p), None);
assert_eq!(
p.peek(),
Some((&Token::new(TokenKind::Literal, "[", Position::default()), 0))
)
}
}
12 changes: 11 additions & 1 deletion src/parser/metadata.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,11 @@ pub(crate) fn metadata(p: &mut Parser) -> Option<String> {
match t.kind {
TokenKind::Minus if t.slice.len() == 3 && t.position.column == 0 => {
p.next_token();
return Some(p.range_to_string(start + 2..pos - 1));
if (start + 2) > (pos - 1) {
return Some(String::from(""));
} else {
return Some(p.range_to_string(start + 2..pos - 1));
}
}
_ => {
p.next_token();
Expand Down Expand Up @@ -85,4 +89,10 @@ mod tests {
))
)
}

#[test]
fn no_content() {
let mut p = Parser::new("---\n---");
assert_eq!(metadata(&mut p), Some(String::from("")));
}
}
14 changes: 13 additions & 1 deletion src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,9 @@ impl<'input> Parser<'input> {

pub fn next_token(&mut self) -> Option<&Token<'input>> {
if self.stack.len() > self.stack_pos {
let res = self.stack.get(self.stack_pos);
self.stack_pos += 1;
return self.stack.get(self.stack_pos);
return res;
};

self.stack.push(self.lexer.next()?);
Expand Down Expand Up @@ -210,4 +211,15 @@ mod tests {
Some((&Token::new(TokenKind::Literal, "!", Position::default()), 0))
)
}

#[test]
fn backtrack() {
let mut p = Parser::new("!");
p.next_token();
p.backtrack(0);
assert_eq!(
p.next_token(),
Some(&Token::new(TokenKind::Bang, "!", Position::default()))
);
}
}

0 comments on commit 9e52236

Please sign in to comment.