From a4d5b8019dc7e4bbdba5f6354812d1b170277c04 Mon Sep 17 00:00:00 2001 From: misson20000 Date: Fri, 26 Jul 2024 22:20:34 -0400 Subject: [PATCH] Refactor model::listing::layout into model::listing::line and model::listing::window --- src/bin/layoutplayground.rs | 9 +- src/logic/tokenizer.rs | 70 ++- src/model/listing/{layout.rs => line.rs} | 524 +---------------------- src/model/listing/mod.rs | 18 +- src/model/listing/window.rs | 466 ++++++++++++++++++++ src/view/listing.rs | 8 +- src/view/listing/facet/scroll.rs | 6 +- src/view/listing/line.rs | 21 +- 8 files changed, 562 insertions(+), 560 deletions(-) rename src/model/listing/{layout.rs => line.rs} (58%) create mode 100644 src/model/listing/window.rs diff --git a/src/bin/layoutplayground.rs b/src/bin/layoutplayground.rs index 65ec564..37a7e32 100644 --- a/src/bin/layoutplayground.rs +++ b/src/bin/layoutplayground.rs @@ -3,17 +3,18 @@ use std::sync; use std::vec; use charm::model::document; -use charm::model::listing::layout; +use charm::model::listing::line; use charm::model::listing::token; use charm::model::listing::token::TokenKind; +use charm::model::listing::window; struct Line { indent: usize, tokens: vec::Vec } -impl layout::LineView for Line { - fn from_line(line: layout::Line) -> Self { +impl window::LineView for Line { + fn from_line(line: line::Line) -> Self { let tokens: vec::Vec = line.to_tokens().collect(); Line { @@ -38,7 +39,7 @@ fn main() { let xml_path = args.next().expect("expected path to xml"); let document = sync::Arc::new(document::Document::load_from_testing_structure(xml_path).unwrap()); - let mut window = layout::Window::::new(document); + let mut window = window::Window::::new(document); window.resize(150); diff --git a/src/logic/tokenizer.rs b/src/logic/tokenizer.rs index fbfd4a3..940e5fc 100644 --- a/src/logic/tokenizer.rs +++ b/src/logic/tokenizer.rs @@ -11,10 +11,11 @@ use std::sync; use crate::model::addr; -use crate::model::listing::token; -use crate::model::listing::token::TokenKind; -use crate::model::document::structure; use crate::model::document::change; +use crate::model::document::structure; +use crate::model::document; +use crate::model::listing::token::TokenKind; +use crate::model::listing::token; use tracing::instrument; @@ -66,6 +67,23 @@ pub struct TokenizerStackEntry { node_addr: addr::Address, } +/* This lets us provide an alternate, simpler implementation to + * certain unit tests to help isolate bugs to either Tokenizer logic + * or Window/Line logic. */ +pub trait AbstractTokenizer: Clone { + fn at_beginning(root: sync::Arc) -> Self; + fn at_path(root: sync::Arc, path: &structure::Path, offset: addr::Address) -> Self; + fn port_change(&mut self, new_doc: &sync::Arc, change: &document::change::Change); + fn hit_top(&self) -> bool; + fn hit_bottom(&self) -> bool; + fn gen_token(&self) -> TokenGenerationResult; + fn move_prev(&mut self) -> bool; + fn move_next(&mut self) -> bool; + fn next_postincrement(&mut self) -> Option; + fn prev(&mut self) -> Option; + fn in_summary(&self) -> bool; +} + #[derive(Clone)] pub struct Tokenizer { /* invariants: @@ -1661,6 +1679,52 @@ pub mod xml { } } +impl AbstractTokenizer for Tokenizer { + fn at_beginning(root: sync::Arc) -> Self { + Tokenizer::at_beginning(root) + } + + fn at_path(root: sync::Arc, path: &structure::Path, offset: addr::Address) -> Self { + Tokenizer::at_path(root, path, offset) + } + + fn port_change(&mut self, new_doc: &sync::Arc, change: &document::change::Change) { + Tokenizer::port_change(self, &new_doc.root, change, &mut PortOptions::default()); + } + + fn hit_top(&self) -> bool { + Tokenizer::hit_top(self) + } + + fn hit_bottom(&self) -> bool { + Tokenizer::hit_bottom(self) + } + + fn gen_token(&self) -> TokenGenerationResult { + Tokenizer::gen_token(self) + } + + fn move_prev(&mut self) -> bool { + Tokenizer::move_prev(self) + } + + fn move_next(&mut self) -> bool { + Tokenizer::move_next(self) + } + + fn next_postincrement(&mut self) -> Option { + Tokenizer::next_postincrement(self) + } + + fn prev(&mut self) -> Option { + Tokenizer::prev(self) + } + + fn in_summary(&self) -> bool { + Tokenizer::in_summary(self) + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/src/model/listing/layout.rs b/src/model/listing/line.rs similarity index 58% rename from src/model/listing/layout.rs rename to src/model/listing/line.rs index fba5fe2..6966273 100644 --- a/src/model/listing/layout.rs +++ b/src/model/listing/line.rs @@ -7,11 +7,9 @@ use std::iter; use std::sync; use crate::model::addr; -use crate::model::document; use crate::model::document::structure; use crate::model::listing::token; use crate::model::listing::token::TokenKind; -use crate::model::versioned::Versioned; use crate::logic::tokenizer; use crate::util; @@ -50,216 +48,6 @@ pub struct Line { pub ty: LineType, } -pub trait LineView { - fn from_line(line: Line) -> Self; - fn iter_tokens(&self) -> impl iter::Iterator>; - fn to_tokens(self) -> impl iter::DoubleEndedIterator; -} - -/* This lets us swap out a simpler implementation for testing to help narrow down - * whether bugs are in Window logic or Tokenizer logic. */ -pub trait WindowTokenizer: Clone { - fn at_beginning(root: sync::Arc) -> Self; - fn at_path(root: sync::Arc, path: &structure::Path, offset: addr::Address) -> Self; - fn port_change(&mut self, new_doc: &sync::Arc, change: &document::change::Change); - fn hit_top(&self) -> bool; - fn hit_bottom(&self) -> bool; - fn gen_token(&self) -> tokenizer::TokenGenerationResult; - fn move_prev(&mut self) -> bool; - fn move_next(&mut self) -> bool; - fn next_postincrement(&mut self) -> Option; - fn prev(&mut self) -> Option; - fn in_summary(&self) -> bool; -} - -/// A listing window with a fixed height. Useful for scrolling by lines. -/// It is up to the user to make sure that this gets properly notified with structure invalidation events. -#[derive(Clone)] -pub struct Window { - pub current_document: sync::Arc, - top: Tokenizer, - bottom: Tokenizer, - - pub line_views: collections::VecDeque, - pub window_height: usize, - - pub wants_update: bool, -} - -impl Window { - pub fn new(doc: sync::Arc) -> Window { - Window { - top: Tokenizer::at_beginning(doc.root.clone()), - bottom: Tokenizer::at_beginning(doc.root.clone()), - - current_document: doc, - - line_views: std::collections::VecDeque::::new(), - window_height: 0, - - wants_update: false, - } - } - - /// Moves the top of the window to the specified address. Returns amount - /// window was adjusted upwards by due to hitting the bottom of the address space. - pub fn seek(&mut self, document: sync::Arc, path: &structure::Path, offset: addr::Address) -> usize { - self.current_document = document; - let root = self.current_document.root.clone(); - self.repopulate_window(move |tok, _| *tok = Tokenizer::at_path(root, path, offset)) - } - - fn repopulate_window(&mut self, tokenizer_provider: F) -> usize where - F: FnOnce(&mut Tokenizer, &mut sync::Arc) { - tokenizer_provider(&mut self.bottom, &mut self.current_document); - let (first_line, top, _index) = Line::containing_tokenizer(&mut self.bottom); - self.top = top; - self.line_views.clear(); - - if !first_line.is_empty() { - self.line_views.push_back(LV::from_line(first_line)); - } - - let mut offset = 0; - - while self.line_views.len() < self.window_height { - if self.bottom.hit_bottom() { - if self.top.hit_top() { - /* entire document is too small to fit in window. */ - break; - } - self.grow_top(); - offset+= 1; - } else { - self.grow_bottom(); - } - } - - self.wants_update = true; - - offset - } - - fn grow_top(&mut self) { - if self.top.hit_top() { - return; - } - - let line = Line::prev_from_tokenizer(&mut self.top); - - if line.is_empty() { - assert!(self.top.hit_top()); - return; - } - - self.line_views.push_front(LV::from_line(line)); - } - - fn grow_bottom(&mut self) { - if self.bottom.hit_bottom() { - return; - } - - let line = Line::next_from_tokenizer(&mut self.bottom); - - if line.is_empty() { - assert!(self.bottom.hit_bottom()); - return; - } - - self.line_views.push_back(LV::from_line(line)); - } - - fn shrink_top(&mut self) { - let line = self.line_views.pop_front().unwrap(); - - for token in line.to_tokens() { - assert_eq!(token, self.top.next_postincrement().unwrap()); - } - } - - fn shrink_bottom(&mut self) { - let line = self.line_views.pop_back().unwrap(); - - for token in line.to_tokens().rev() { - assert_eq!(token, self.bottom.prev().unwrap()); - } - } - - /// Scrolls the window upwards by one line. Returns false if the - /// beginning of the token stream is hit. - pub fn scroll_up(&mut self) -> bool { - if self.top.hit_top() { - return false; - } - - self.grow_top(); - self.shrink_bottom(); - - self.wants_update = true; - - true - } - - /// Scrolls the window downwards by one line. Returns false if the - /// end of the token stream is hit. - pub fn scroll_down(&mut self) -> bool { - if self.bottom.hit_bottom() { - return false; - } - - self.grow_bottom(); - self.shrink_top(); - - self.wants_update = true; - - true - } - - /// Changes the size of the window. - pub fn resize(&mut self, size: usize) { - self.window_height = size; - - while self.line_views.len() > self.window_height { - self.shrink_bottom(); - } - while self.line_views.len() < self.window_height { - if self.bottom.hit_bottom() { - if self.top.hit_top() { - break; - } else { - self.grow_top(); - } - } else { - self.grow_bottom(); - } - } - - self.wants_update = true; - } - - pub fn get_window_height(&self) -> usize { - self.window_height - } - - pub fn get_bottom_hit_end(&self) -> bool { - self.bottom.hit_bottom() - } - - /* state bookkeeping */ - - pub fn update(&mut self, document: &sync::Arc) { - if self.current_document.is_outdated(document) { - self.repopulate_window(|tok, current_doc| { - document.changes_since(¤t_doc.clone(), &mut |new_doc, change| { - tok.port_change(new_doc, change); - *current_doc = new_doc.clone() - }); - }); - } - } -} - impl Line { pub fn empty() -> Self { Line { @@ -268,7 +56,7 @@ impl Line { } /// Figures out what line contains the token immediately after the tokenizer's position. Moves the referenced tokenizer to the end of that line, and returns the line, a tokenizer pointing to the beginning of the line, and the index of the specified token within that line. - pub fn containing_tokenizer(tokenizer: &mut Tokenizer) -> (Self, Tokenizer, usize) { + pub fn containing_tokenizer(tokenizer: &mut Tokenizer) -> (Self, Tokenizer, usize) { /* Put the first token on the line. */ let mut line = Line::from_token(loop { match tokenizer.gen_token() { @@ -326,7 +114,7 @@ impl Line { } /// Returns the line ending at the tokenizer's current position, and moves the tokenizer to the beginning of that line. - pub fn prev_from_tokenizer(tokenizer: &mut impl WindowTokenizer) -> Self { + pub fn prev_from_tokenizer(tokenizer: &mut impl tokenizer::AbstractTokenizer) -> Self { let mut line = Self::empty(); loop { @@ -354,7 +142,7 @@ impl Line { } /// Returns the line beginning at the tokenizer's current position, and moves the tokenizer to the end of that line. - pub fn next_from_tokenizer(tokenizer: &mut impl WindowTokenizer) -> Self { + pub fn next_from_tokenizer(tokenizer: &mut impl tokenizer::AbstractTokenizer) -> Self { let mut line = Line::empty(); loop { @@ -680,20 +468,6 @@ impl Line { } } -impl LineView for Line { - fn from_line(line: Line) -> Self { - line - } - - fn iter_tokens(&self) -> impl iter::Iterator> { - self.iter_tokens() - } - - fn to_tokens(self) -> impl iter::DoubleEndedIterator { - self.into_iter() - } -} - impl PartialEq for Line { fn eq(&self, other: &Line) -> bool { match (&self.ty, &other.ty) { @@ -766,295 +540,3 @@ impl fmt::Debug for Line { .finish() } } - -impl WindowTokenizer for tokenizer::Tokenizer { - fn at_beginning(root: sync::Arc) -> Self { - tokenizer::Tokenizer::at_beginning(root) - } - - fn at_path(root: sync::Arc, path: &structure::Path, offset: addr::Address) -> Self { - tokenizer::Tokenizer::at_path(root, path, offset) - } - - fn port_change(&mut self, new_doc: &sync::Arc, change: &document::change::Change) { - tokenizer::Tokenizer::port_change(self, &new_doc.root, change, &mut tokenizer::PortOptions::default()); - } - - fn hit_top(&self) -> bool { - tokenizer::Tokenizer::hit_top(self) - } - - fn hit_bottom(&self) -> bool { - tokenizer::Tokenizer::hit_bottom(self) - } - - fn gen_token(&self) -> tokenizer::TokenGenerationResult { - tokenizer::Tokenizer::gen_token(self) - } - - fn move_prev(&mut self) -> bool { - tokenizer::Tokenizer::move_prev(self) - } - - fn move_next(&mut self) -> bool { - tokenizer::Tokenizer::move_next(self) - } - - fn next_postincrement(&mut self) -> Option { - tokenizer::Tokenizer::next_postincrement(self) - } - - fn prev(&mut self) -> Option { - tokenizer::Tokenizer::prev(self) - } - - fn in_summary(&self) -> bool { - tokenizer::Tokenizer::in_summary(self) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn scroll_around() { - let document = document::Builder::default().arc(); - let mut window = Window::::new(document); - - window.resize(5); - - /* scroll back and forth a bit */ - assert!(window.scroll_down()); - assert!(window.scroll_down()); - assert!(window.scroll_up()); - assert!(window.scroll_down()); - assert!(window.scroll_down()); - assert!(window.scroll_up()); - assert!(window.scroll_up()); - assert!(window.scroll_up()); - } - - fn print_lines(window: &Window) { - for l in &window.line_views { - print!(" "); - for t in l.iter_tokens() { - print!("{}", token::TokenTestFormat(t)); - } - println!(); - } - } - - #[test] - fn bonk_top() { - let document = document::Builder::default().arc(); - let mut window = Window::::new(document); - - window.resize(5); - - let window_before = window.clone(); - - println!("before scrolling down: "); - print_lines(&window); - - /* scroll down and back up again */ - assert!(window.scroll_down()); - println!("after scrolling down: "); - print_lines(&window); - - assert!(window.scroll_up()); - println!("after scrolling back up: "); - print_lines(&window); - - /* should leave the window in the same state */ - itertools::assert_equal( - window_before.line_views.iter(), - window.line_views.iter()); - - /* should hit the top */ - assert!(!window.scroll_up()); - - /* after having hit the top, scroll down and back up again */ - assert!(window.scroll_down()); - assert!(window.scroll_up()); - } - - #[test] - fn graze_top() { - let document = document::Builder::default().arc(); - let mut window = Window::::new(document); - - window.resize(2); - - let window_before = window.clone(); - - print_lines(&window); - assert!(window.scroll_down()); - print_lines(&window); - assert!(window.scroll_down()); - print_lines(&window); - assert!(window.scroll_up()); - print_lines(&window); - assert!(window.scroll_up()); - print_lines(&window); - - itertools::assert_equal( - window_before.line_views.iter(), - window.line_views.iter()); - - assert!(window.scroll_down()); - } - - #[test] - fn seek() { - let root = structure::Node::builder() - .name("root") - .size(0x4a0) - .child(0x10, |b| b - .name("child0") - .size(0x20)) - .child(0x14, |b| b - .name("child1") - .size(0x50) - .children_display(structure::ChildrenDisplay::Summary) - .child(0x0, |b| b - .name("child1.0") - .size(0x18)) - .child(0x20, |b| b - .name("child1.1") - .size(0x18)) - .child(0x34, |b| b - .name("child1.2") - .size(0x18)) - .child(0x48, |b| b - .name("child1.3") - .size(0x1c))) - .child(0x60, |b| b - .name("child2") - .size(0x40)) - .child(0xa0, |b| b - .name("child3") - .size(0x400)) - .build(); - let document = document::Builder::new(root).arc(); - let mut window1 = Window::::new(document.clone()); - window1.resize(10); - let mut window2 = window1.clone(); - - /* Scroll window 1 down until its first line is the one we're going to seek the other window to. */ - while !match window1.line_views[0].iter_tokens().next().unwrap() { - token::TokenRef::Hexdump(hdt) => hdt.common.node_path == &[2] && hdt.extent.begin == 0x20.into(), - _ => false - } { - window1.scroll_down(); - } - window2.seek(document.clone(), &vec![2], 0x24.into()); - - if !window1.line_views.iter().eq(window2.line_views.iter()) { - let mut i1 = window1.line_views.iter(); - let mut i2 = window2.line_views.iter(); - loop { - let line1 = i1.next(); - let line2 = i2.next(); - - let str1 = line1.map(|l| format!("{}", l)).unwrap_or("".to_string()); - let str2 = line2.map(|l| format!("{}", l)).unwrap_or("".to_string()); - - println!("{:60} | {:60}", str1, str2); - - if line1.is_none() && line2.is_none() { - break; - } - } - panic!("windows mismatched"); - } - } - - #[test] - fn containing_tokenizer() { - let root = structure::Node::builder() - .name("root") - .size(0x40) - .child(0x10, |b| b - .name("child0") - .size(0x20)) - .child(0x14, |b| b - .name("child1") - .size(0x50) - .children_display(structure::ChildrenDisplay::Summary) - .child(0x0, |b| b - .name("child1.0") - .size(0x18)) - .child(0x20, |b| b - .name("child1.1") - .size(0x18)) - .child(0x34, |b| b - .name("child1.2") - .size(0x18)) - .child(0x48, |b| b - .name("child1.3") - .size(0x1c))) - .child(0x60, |b| b - .name("child2") - .size(0x4)) - .build(); - - /* Pregenerate all the lines from a simple forward walk through the whole document. */ - let mut tokenizer = tokenizer::Tokenizer::at_beginning(root.clone()); - let mut lines = vec![]; - loop { - let mut begin = tokenizer.clone(); - begin.canonicalize_next(); - - let line = Line::next_from_tokenizer(&mut tokenizer); - if line.is_empty() { - break; - } - - let mut end = tokenizer.clone(); - end.canonicalize_next(); - - lines.push((begin, line, end)); - } - - let mut tokenizer = tokenizer::Tokenizer::at_beginning(root.clone()); - let mut i = 0; - loop { - let token = match tokenizer.gen_token() { - tokenizer::TokenGenerationResult::Ok(token) => token, - tokenizer::TokenGenerationResult::Skip => if tokenizer.move_next() { continue } else { break }, - tokenizer::TokenGenerationResult::Boundary => break, - }; - - let expected_index_in_line = loop { - if let Some(index) = lines[i].1.iter_tokens().position(|t| t == token.as_ref()) { - break index; - } else { - i+= 1; - } - }; - - let mut line_end = tokenizer.clone(); - let (line, mut line_begin, index_in_line) = Line::containing_tokenizer(&mut line_end); - line_begin.canonicalize_next(); - line_end.canonicalize_next(); - - if line != lines[i].1 || index_in_line != expected_index_in_line || line_begin != lines[i].0 || line_end != lines[i].2 { - println!("seeked to {:?}", token); - println!("line from forward walk : {}", lines[i].1); - println!("line from containing_tokenizer: {}", line); - println!("expected index {}, got index {}", expected_index_in_line, index_in_line); - - println!("begin tokenizer [actual] : {:#?}", line_begin); - println!("begin tokenizer [expected]: {:#?}", lines[i].0); - - println!("end tokenizer [actual] : {:#?}", line_end); - println!("end tokenizer [expected]: {:#?}", lines[i].2); - - panic!("mismatched"); - } - - tokenizer.move_next(); - } - } -} diff --git a/src/model/listing/mod.rs b/src/model/listing/mod.rs index 1a2e41e..e5ad3b9 100644 --- a/src/model/listing/mod.rs +++ b/src/model/listing/mod.rs @@ -1,16 +1,4 @@ -/// The term "listing" is used to refer to the representation of a document that the user interacts with, which is a -/// list of lines containing tokens. - -/// The window module is used to describe how the listing is accessed. A listing is a stream of lines, and it is often -/// difficult to exactly represent a specific location to seek to within a listing; if you were to ask for the line at -/// address 0x4000a000, but there was a break at that address, it cannot be determined whether you mean to retrieve the -/// blank line from the break header, the break header line, or the hex line at that address, but we want to see all of -/// them. Instead, a generator pattern is used. The FlexWindow struct represents a variable-sized window into the -/// listing. It can be expanded or contracted both upwards and downwards to reliably enumerate the listing, but it -/// cannot reliably be seeked to arbitrary locations. - -/// The cursor module is used to describe how the cursor moves around in the listing and accepts edits to it. - -pub mod token; -pub mod layout; pub mod cursor; +pub mod line; +pub mod token; +pub mod window; diff --git a/src/model/listing/window.rs b/src/model/listing/window.rs new file mode 100644 index 0000000..4130d40 --- /dev/null +++ b/src/model/listing/window.rs @@ -0,0 +1,466 @@ +use std::collections; +use std::iter; +use std::sync; + +use crate::model::addr; +use crate::model::document; +use crate::model::document::structure; +use crate::model::listing::line; +use crate::model::listing::token; +use crate::model::versioned::Versioned; +use crate::logic::tokenizer; + +pub trait LineView { + fn from_line(line: line::Line) -> Self; + fn iter_tokens(&self) -> impl iter::Iterator>; + fn to_tokens(self) -> impl iter::DoubleEndedIterator; +} + +/// A listing window with a fixed height. Useful for scrolling by lines. +/// It is up to the user to make sure that this gets properly notified with structure invalidation events. +#[derive(Clone)] +pub struct Window { + pub current_document: sync::Arc, + top: Tokenizer, + bottom: Tokenizer, + + pub line_views: collections::VecDeque, + pub window_height: usize, + + pub wants_update: bool, +} + +impl Window { + pub fn new(doc: sync::Arc) -> Window { + Window { + top: Tokenizer::at_beginning(doc.root.clone()), + bottom: Tokenizer::at_beginning(doc.root.clone()), + + current_document: doc, + + line_views: std::collections::VecDeque::::new(), + window_height: 0, + + wants_update: false, + } + } + + /// Moves the top of the window to the specified address. Returns amount + /// window was adjusted upwards by due to hitting the bottom of the address space. + pub fn seek(&mut self, document: sync::Arc, path: &structure::Path, offset: addr::Address) -> usize { + self.current_document = document; + let root = self.current_document.root.clone(); + self.repopulate_window(move |tok, _| *tok = Tokenizer::at_path(root, path, offset)) + } + + fn repopulate_window(&mut self, tokenizer_provider: F) -> usize where + F: FnOnce(&mut Tokenizer, &mut sync::Arc) { + tokenizer_provider(&mut self.bottom, &mut self.current_document); + let (first_line, top, _index) = line::Line::containing_tokenizer(&mut self.bottom); + self.top = top; + self.line_views.clear(); + + if !first_line.is_empty() { + self.line_views.push_back(LV::from_line(first_line)); + } + + let mut offset = 0; + + while self.line_views.len() < self.window_height { + if self.bottom.hit_bottom() { + if self.top.hit_top() { + /* entire document is too small to fit in window. */ + break; + } + self.grow_top(); + offset+= 1; + } else { + self.grow_bottom(); + } + } + + self.wants_update = true; + + offset + } + + fn grow_top(&mut self) { + if self.top.hit_top() { + return; + } + + let line = line::Line::prev_from_tokenizer(&mut self.top); + + if line.is_empty() { + assert!(self.top.hit_top()); + return; + } + + self.line_views.push_front(LV::from_line(line)); + } + + fn grow_bottom(&mut self) { + if self.bottom.hit_bottom() { + return; + } + + let line = line::Line::next_from_tokenizer(&mut self.bottom); + + if line.is_empty() { + assert!(self.bottom.hit_bottom()); + return; + } + + self.line_views.push_back(LV::from_line(line)); + } + + fn shrink_top(&mut self) { + let line = self.line_views.pop_front().unwrap(); + + for token in line.to_tokens() { + assert_eq!(token, self.top.next_postincrement().unwrap()); + } + } + + fn shrink_bottom(&mut self) { + let line = self.line_views.pop_back().unwrap(); + + for token in line.to_tokens().rev() { + assert_eq!(token, self.bottom.prev().unwrap()); + } + } + + /// Scrolls the window upwards by one line. Returns false if the + /// beginning of the token stream is hit. + pub fn scroll_up(&mut self) -> bool { + if self.top.hit_top() { + return false; + } + + self.grow_top(); + self.shrink_bottom(); + + self.wants_update = true; + + true + } + + /// Scrolls the window downwards by one line. Returns false if the + /// end of the token stream is hit. + pub fn scroll_down(&mut self) -> bool { + if self.bottom.hit_bottom() { + return false; + } + + self.grow_bottom(); + self.shrink_top(); + + self.wants_update = true; + + true + } + + /// Changes the size of the window. + pub fn resize(&mut self, size: usize) { + self.window_height = size; + + while self.line_views.len() > self.window_height { + self.shrink_bottom(); + } + while self.line_views.len() < self.window_height { + if self.bottom.hit_bottom() { + if self.top.hit_top() { + break; + } else { + self.grow_top(); + } + } else { + self.grow_bottom(); + } + } + + self.wants_update = true; + } + + pub fn get_window_height(&self) -> usize { + self.window_height + } + + pub fn get_bottom_hit_end(&self) -> bool { + self.bottom.hit_bottom() + } + + /* state bookkeeping */ + + pub fn update(&mut self, document: &sync::Arc) { + if self.current_document.is_outdated(document) { + self.repopulate_window(|tok, current_doc| { + document.changes_since(¤t_doc.clone(), &mut |new_doc, change| { + tok.port_change(new_doc, change); + *current_doc = new_doc.clone() + }); + }); + } + } +} + +impl LineView for line::Line { + fn from_line(line: line::Line) -> Self { + line + } + + fn iter_tokens(&self) -> impl iter::Iterator> { + self.iter_tokens() + } + + fn to_tokens(self) -> impl iter::DoubleEndedIterator { + self.into_iter() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::model::listing::token::TokenKind; + + #[test] + fn scroll_around() { + let document = document::Builder::default().arc(); + let mut window = Window::::new(document); + + window.resize(5); + + /* scroll back and forth a bit */ + assert!(window.scroll_down()); + assert!(window.scroll_down()); + assert!(window.scroll_up()); + assert!(window.scroll_down()); + assert!(window.scroll_down()); + assert!(window.scroll_up()); + assert!(window.scroll_up()); + assert!(window.scroll_up()); + } + + fn print_lines(window: &Window) { + for l in &window.line_views { + print!(" "); + for t in l.iter_tokens() { + print!("{}", token::TokenTestFormat(t)); + } + println!(); + } + } + + #[test] + fn bonk_top() { + let document = document::Builder::default().arc(); + let mut window = Window::::new(document); + + window.resize(5); + + let window_before = window.clone(); + + println!("before scrolling down: "); + print_lines(&window); + + /* scroll down and back up again */ + assert!(window.scroll_down()); + println!("after scrolling down: "); + print_lines(&window); + + assert!(window.scroll_up()); + println!("after scrolling back up: "); + print_lines(&window); + + /* should leave the window in the same state */ + itertools::assert_equal( + window_before.line_views.iter(), + window.line_views.iter()); + + /* should hit the top */ + assert!(!window.scroll_up()); + + /* after having hit the top, scroll down and back up again */ + assert!(window.scroll_down()); + assert!(window.scroll_up()); + } + + #[test] + fn graze_top() { + let document = document::Builder::default().arc(); + let mut window = Window::::new(document); + + window.resize(2); + + let window_before = window.clone(); + + print_lines(&window); + assert!(window.scroll_down()); + print_lines(&window); + assert!(window.scroll_down()); + print_lines(&window); + assert!(window.scroll_up()); + print_lines(&window); + assert!(window.scroll_up()); + print_lines(&window); + + itertools::assert_equal( + window_before.line_views.iter(), + window.line_views.iter()); + + assert!(window.scroll_down()); + } + + #[test] + fn seek() { + let root = structure::Node::builder() + .name("root") + .size(0x4a0) + .child(0x10, |b| b + .name("child0") + .size(0x20)) + .child(0x14, |b| b + .name("child1") + .size(0x50) + .children_display(structure::ChildrenDisplay::Summary) + .child(0x0, |b| b + .name("child1.0") + .size(0x18)) + .child(0x20, |b| b + .name("child1.1") + .size(0x18)) + .child(0x34, |b| b + .name("child1.2") + .size(0x18)) + .child(0x48, |b| b + .name("child1.3") + .size(0x1c))) + .child(0x60, |b| b + .name("child2") + .size(0x40)) + .child(0xa0, |b| b + .name("child3") + .size(0x400)) + .build(); + let document = document::Builder::new(root).arc(); + let mut window1 = Window::::new(document.clone()); + window1.resize(10); + let mut window2 = window1.clone(); + + /* Scroll window 1 down until its first line is the one we're going to seek the other window to. */ + while !match window1.line_views[0].iter_tokens().next().unwrap() { + token::TokenRef::Hexdump(hdt) => hdt.common.node_path == &[2] && hdt.extent.begin == 0x20.into(), + _ => false + } { + window1.scroll_down(); + } + window2.seek(document.clone(), &vec![2], 0x24.into()); + + if !window1.line_views.iter().eq(window2.line_views.iter()) { + let mut i1 = window1.line_views.iter(); + let mut i2 = window2.line_views.iter(); + loop { + let line1 = i1.next(); + let line2 = i2.next(); + + let str1 = line1.map(|l| format!("{}", l)).unwrap_or("".to_string()); + let str2 = line2.map(|l| format!("{}", l)).unwrap_or("".to_string()); + + println!("{:60} | {:60}", str1, str2); + + if line1.is_none() && line2.is_none() { + break; + } + } + panic!("windows mismatched"); + } + } + + #[test] + fn containing_tokenizer() { + let root = structure::Node::builder() + .name("root") + .size(0x40) + .child(0x10, |b| b + .name("child0") + .size(0x20)) + .child(0x14, |b| b + .name("child1") + .size(0x50) + .children_display(structure::ChildrenDisplay::Summary) + .child(0x0, |b| b + .name("child1.0") + .size(0x18)) + .child(0x20, |b| b + .name("child1.1") + .size(0x18)) + .child(0x34, |b| b + .name("child1.2") + .size(0x18)) + .child(0x48, |b| b + .name("child1.3") + .size(0x1c))) + .child(0x60, |b| b + .name("child2") + .size(0x4)) + .build(); + + /* Pregenerate all the lines from a simple forward walk through the whole document. */ + let mut tokenizer = tokenizer::Tokenizer::at_beginning(root.clone()); + let mut lines = vec![]; + loop { + let mut begin = tokenizer.clone(); + begin.canonicalize_next(); + + let line = line::Line::next_from_tokenizer(&mut tokenizer); + if line.is_empty() { + break; + } + + let mut end = tokenizer.clone(); + end.canonicalize_next(); + + lines.push((begin, line, end)); + } + + let mut tokenizer = tokenizer::Tokenizer::at_beginning(root.clone()); + let mut i = 0; + loop { + let token = match tokenizer.gen_token() { + tokenizer::TokenGenerationResult::Ok(token) => token, + tokenizer::TokenGenerationResult::Skip => if tokenizer.move_next() { continue } else { break }, + tokenizer::TokenGenerationResult::Boundary => break, + }; + + let expected_index_in_line = loop { + if let Some(index) = lines[i].1.iter_tokens().position(|t| t == token.as_ref()) { + break index; + } else { + i+= 1; + } + }; + + let mut line_end = tokenizer.clone(); + let (line, mut line_begin, index_in_line) = line::Line::containing_tokenizer(&mut line_end); + line_begin.canonicalize_next(); + line_end.canonicalize_next(); + + if line != lines[i].1 || index_in_line != expected_index_in_line || line_begin != lines[i].0 || line_end != lines[i].2 { + println!("seeked to {:?}", token); + println!("line from forward walk : {}", lines[i].1); + println!("line from containing_tokenizer: {}", line); + println!("expected index {}, got index {}", expected_index_in_line, index_in_line); + + println!("begin tokenizer [actual] : {:#?}", line_begin); + println!("begin tokenizer [expected]: {:#?}", lines[i].0); + + println!("end tokenizer [actual] : {:#?}", line_end); + println!("end tokenizer [expected]: {:#?}", lines[i].2); + + panic!("mismatched"); + } + + tokenizer.move_next(); + } + } +} diff --git a/src/view/listing.rs b/src/view/listing.rs index cd10ee6..bd3a676 100644 --- a/src/view/listing.rs +++ b/src/view/listing.rs @@ -11,8 +11,8 @@ use crate::model::datapath::DataPathExt; use crate::model::document; use crate::model::document::structure; use crate::model::listing::cursor; -use crate::model::listing::layout as layout_model; -use crate::model::listing::layout::LineView; +use crate::model::listing::window as window_model; +use crate::model::listing::window::LineView; use crate::model::selection; use crate::model::versioned::Versioned; use crate::view; @@ -73,7 +73,7 @@ struct Interior { charm_window: rc::Weak, charm_window_id: u64, - window: layout_model::Window, + window: window_model::Window, cursor: facet::cursor::CursorView, scroll: facet::scroll::Scroller, hover: Option<(f64, f64)>, @@ -285,7 +285,7 @@ impl ListingWidget { charm_window: rc::Rc::downgrade(window), charm_window_id: window.id, - window: layout_model::Window::new(document.clone()), + window: window_model::Window::new(document.clone()), cursor: facet::cursor::CursorView::new(document.clone(), config.clone()), scroll: facet::scroll::Scroller::new(config.clone()), hover: None, diff --git a/src/view/listing/facet/scroll.rs b/src/view/listing/facet/scroll.rs index 2e0635e..2d78ee6 100644 --- a/src/view/listing/facet/scroll.rs +++ b/src/view/listing/facet/scroll.rs @@ -4,8 +4,8 @@ use crate::view::config; use crate::model::addr; use crate::model::document; use crate::model::document::structure; -use crate::model::listing::layout; -use crate::model::listing::layout::LineView; +use crate::model::listing::window; +use crate::model::listing::window::LineView; use crate::view::listing::facet; use crate::view::listing::line; @@ -23,7 +23,7 @@ pub struct Scroller { cursor_direction: EnsureCursorInViewDirection, } -type Window = layout::Window; +type Window = window::Window; impl Scroller { pub fn new(config: sync::Arc) -> Scroller { diff --git a/src/view/listing/line.rs b/src/view/listing/line.rs index 96d085f..848fbbe 100644 --- a/src/view/listing/line.rs +++ b/src/view/listing/line.rs @@ -5,8 +5,9 @@ use std::task; use crate::model::addr; use crate::model::document; use crate::model::listing::cursor; -use crate::model::listing::layout; +use crate::model::listing::line as line_model; use crate::model::listing::token; +use crate::model::listing::window; use crate::model::selection; use crate::util; use crate::view::gsc; @@ -54,8 +55,8 @@ pub struct Line { render_node: Option, } -impl layout::LineView for Line { - fn from_line(line: layout::Line) -> Self { +impl window::LineView for Line { + fn from_line(line: line_model::Line) -> Self { Line { ev_draw: facet::Event::new(), ev_work: facet::Event::new_wanted(), @@ -80,20 +81,20 @@ impl layout::LineView for Line { } impl LineViewType { - fn from(line: layout::Line) -> Self { + fn from(line: line_model::Line) -> Self { match line.ty { - layout::LineType::Empty => Self::Empty, - layout::LineType::Blank(tok) => Self::Blank(tok.into()), - layout::LineType::Title(tok) => Self::Title(tok.into()), - layout::LineType::Hexdump { title, node, node_path, node_addr, line_extent, tokens } => Self::Hexdump { + line_model::LineType::Empty => Self::Empty, + line_model::LineType::Blank(tok) => Self::Blank(tok.into()), + line_model::LineType::Title(tok) => Self::Title(tok.into()), + line_model::LineType::Hexdump { title, node, node_path, node_addr, line_extent, tokens } => Self::Hexdump { title: title.into(), hexdump: bucket::HexdumpBucket::new(node, node_path, node_addr, line_extent, tokens.into_iter()) }, - layout::LineType::Hexstring { title, token } => Self::Hexstring { + line_model::LineType::Hexstring { title, token } => Self::Hexstring { title: title.into(), hexstring: token.into() }, - layout::LineType::Summary { title, tokens } => Self::Summary { + line_model::LineType::Summary { title, tokens } => Self::Summary { title: title.into(), content: bucket::MultiTokenBucket::from_tokens(tokens.into_iter()) },