Skip to content

Commit

Permalink
Added cover scaling
Browse files Browse the repository at this point in the history
  • Loading branch information
HHogg committed Dec 14, 2024
1 parent 1dc1785 commit 74a0652
Show file tree
Hide file tree
Showing 42 changed files with 761 additions and 247 deletions.
33 changes: 18 additions & 15 deletions workspaces/circular-sequence/src/Article/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,24 +4,37 @@ import {
ArticleFigLink,
ArticleFigs,
ArticlePage,
DeepPartial,
ProjectPageLink,
} from '@hogg/common';
import { ColorMode, TilingRenderer, meta as tilingsMeta } from '@hogg/tilings';
import {
Layer,
Options,
ScaleMode,
TilingRenderer,
meta as tilingsMeta,
} from '@hogg/tilings';
import {
ArticleHeading,
ArticleParagraph,
ArticleSection,
Code,
Link,
Text,
sizeX12Px,
} from 'preshape';
import fileContentsGetMatch from '../../src-rust/get_match.rs?raw';
import fileContentsMinPermutation from '../../src-rust/min_permutation.rs?raw';
import fileContentsSequence from '../../src-rust/sequence.rs?raw';
import ConcatenatedSequencesFig from './Figs/ConcatenatedSequencesFig';
import MinPermutationFigWithWasApi from './Figs/MinPermutationFig';

const tilingRendererOptions: DeepPartial<Options> = {
scaleMode: ScaleMode.Contain,
showLayers: {
[Layer.Transform]: true,
},
};

const Article = () => {
return (
<ArticlePage>
Expand Down Expand Up @@ -52,9 +65,7 @@ const Article = () => {
<TilingRenderer
height="200px"
notation="12-3,4,6,4,3,4,6,4,3,4,6,4"
options={{
colorMode: ColorMode.None,
}}
options={tilingRendererOptions}
/>
</ArticleFig>
</ArticleFigs>
Expand Down Expand Up @@ -174,10 +185,7 @@ let seq_2: Sequence = [6, 6, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0];
height="200px"
validations={[]}
notation="4-3,4,3,12"
options={{
colorMode: ColorMode.None,
padding: sizeX12Px,
}}
options={tilingRendererOptions}
/>
}
language="rust"
Expand Down Expand Up @@ -208,12 +216,7 @@ let seq_2: Sequence = [6, 6, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0];
<TilingRenderer
height="200px"
notation="4-3,3,4,12"
validations={[]}
options={{
colorMode: ColorMode.None,
padding: sizeX12Px,
isValid: true,
}}
options={tilingRendererOptions}
/>
}
language="rust"
Expand Down
6 changes: 6 additions & 0 deletions workspaces/common/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,3 +27,9 @@ export type Project = {
deploy?: boolean;
wip?: boolean;
};

export type DeepPartial<T> = T extends object
? {
[P in keyof T]?: DeepPartial<T[P]>;
}
: T;
13 changes: 4 additions & 9 deletions workspaces/line-segment-extending/src/Article.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,13 @@ import {
ArticleFigLink,
ArticleFigs,
ArticlePage,
DeepPartial,
ProjectPageLink,
} from '@hogg/common';
import {
Layer,
Options,
ScaleMode,
TilingRenderer,
meta as tilingsMeta,
} from '@hogg/tilings';
Expand All @@ -26,18 +28,11 @@ import {

type Props = {};

const tilingRendererOptions: Partial<Options> = {
const tilingRendererOptions: DeepPartial<Options> = {
scaleMode: ScaleMode.Contain,
showTransformIndex: 1,
showLayers: {
[Layer.Axis]: false,
[Layer.BoundingBoxes]: false,
[Layer.GridLineSegment]: false,
[Layer.GridPolygon]: false,
[Layer.PlaneOutline]: false,
[Layer.ShapeBorder]: true,
[Layer.ShapeFill]: true,
[Layer.Transform]: true,
[Layer.TransformPoints]: false,
},
};

Expand Down
62 changes: 14 additions & 48 deletions workspaces/spatial-grid-map/src-rust/bucket.rs
Original file line number Diff line number Diff line change
@@ -1,25 +1,24 @@
use std::{
cmp::Ordering,
collections::{BTreeSet, HashMap},
collections::HashMap,
ops::{Deref, DerefMut},
};

use serde::{Deserialize, Serialize};
use typeshare::typeshare;

use crate::utils::{compare_coordinate, compare_radians, coordinate_equals, normalize_radian};
use crate::utils::{coordinate_equals, normalize_radian};

#[derive(Clone, Debug, Default, Deserialize, Serialize)]
#[typeshare]
pub struct Bucket<TEntryValue: Clone + Default> {
#[typeshare(serialized_as = "Vec<BucketEntry<TEntryValue>>")]
entries: BTreeSet<BucketEntry<TEntryValue>>,
pub entries: Vec<BucketEntry<TEntryValue>>,
}

impl<TEntryValue: Clone + std::fmt::Debug + Default> Bucket<TEntryValue> {
pub fn new(point: (f64, f64), value: TEntryValue, size: f32) -> Self {
Bucket {
entries: BTreeSet::from([BucketEntry {
entries: Vec::from([BucketEntry {
point,
value,
size,
Expand Down Expand Up @@ -55,7 +54,7 @@ impl<TEntryValue: Clone + std::fmt::Debug + Default> Bucket<TEntryValue> {
pub fn get_entry(&self, point: &(f64, f64)) -> Option<&BucketEntry<TEntryValue>> {
self
.get_entry_index(point)
.and_then(|index| self.entries.iter().nth(index))
.and_then(|index| self.entries.get(index))
}

pub fn get_entry_mut(&mut self, point: &(f64, f64)) -> Option<MutBucketEntry<'_, TEntryValue>> {
Expand All @@ -65,13 +64,6 @@ impl<TEntryValue: Clone + std::fmt::Debug + Default> Bucket<TEntryValue> {
})
}

pub fn take_entry(&mut self, point: &(f64, f64)) -> Option<BucketEntry<TEntryValue>> {
self
.get_entry_index(point)
.and_then(|index| self.entries.iter().nth(index).cloned())
.and_then(|entry| self.entries.take(&entry))
}

pub fn get_value(&self, point: &(f64, f64)) -> Option<&TEntryValue> {
self.get_entry(point).map(|entry| &entry.value)
}
Expand All @@ -81,18 +73,18 @@ impl<TEntryValue: Clone + std::fmt::Debug + Default> Bucket<TEntryValue> {
}

pub fn insert(&mut self, entry: BucketEntry<TEntryValue>) -> bool {
if !self.contains(&entry.point) {
return self.entries.insert(entry);
if self.contains(&entry.point) {
return false;
}

false
self.entries.push(entry);
true
}

pub fn remove(&mut self, point: &(f64, f64)) -> Option<BucketEntry<TEntryValue>> {
self
.get_entry(point)
.cloned()
.and_then(|entry| self.entries.take(&entry))
.get_entry_index(point)
.map(|index| self.entries.remove(index))
}

pub fn increment_counter(&mut self, point: &(f64, f64), counter: &str) {
Expand Down Expand Up @@ -158,36 +150,10 @@ impl<TEntryValue: Default> BucketEntry<TEntryValue> {
}
}

impl<TEntryValue: Clone + Default> Eq for BucketEntry<TEntryValue> {}

impl<TEntryValue: Clone + Default> PartialEq for BucketEntry<TEntryValue> {
fn eq(&self, other: &Self) -> bool {
coordinate_equals(self.point.0, other.point.0) && coordinate_equals(self.point.0, other.point.0)
}
}

impl<TEntryValue: Clone + Default> Ord for BucketEntry<TEntryValue> {
fn cmp(&self, other: &Self) -> Ordering {
let theta_comparison = compare_radians(self.theta(), other.theta());

if theta_comparison != Ordering::Equal {
return theta_comparison;
}

compare_coordinate(self.distance_to_center(), other.distance_to_center())
}
}

impl<TEntryValue: Clone + Default> PartialOrd for BucketEntry<TEntryValue> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}

// Helper struct to mimic a mutable reference
pub struct MutBucketEntry<'a, TEntryValue: Clone + Default> {
item: BucketEntry<TEntryValue>,
parent: &'a mut Bucket<TEntryValue>,
pub item: BucketEntry<TEntryValue>,
pub parent: &'a mut Bucket<TEntryValue>,
}

impl<TEntryValue: Clone + Default> Deref for MutBucketEntry<'_, TEntryValue> {
Expand All @@ -208,6 +174,6 @@ impl<TEntryValue: Clone + Default> DerefMut for MutBucketEntry<'_, TEntryValue>
impl<TEntryValue: Clone + Default> Drop for MutBucketEntry<'_, TEntryValue> {
fn drop(&mut self) {
let item = std::mem::take(&mut self.item);
self.parent.entries.insert(item);
self.parent.entries.push(item);
}
}
28 changes: 20 additions & 8 deletions workspaces/spatial-grid-map/src-rust/grid.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,13 @@ use serde::{Deserialize, Serialize};
use typeshare::typeshare;

use core::f64;
use std::cmp::Ordering;
use std::collections::{BTreeSet, HashMap};
use std::mem;

use crate::bucket::{Bucket, BucketEntry, MutBucketEntry};
use crate::location::{self, Location};
use crate::utils::compare_coordinate;
use crate::visitor::Visitor;

#[derive(Debug, Clone, Copy, Deserialize, Serialize)]
Expand Down Expand Up @@ -100,7 +102,7 @@ impl<TEntryValue: Clone + std::fmt::Debug + Default> SpatialGridMap<TEntryValue>
fn get_bucket_by_point_mut(&mut self, point: &(f64, f64)) -> Option<&mut Bucket<TEntryValue>> {
self
.get_location(point)
.and_then(|location| self.store.get_mut(&location.key))
.and_then(|location| self.get_bucket_by_location_mut(&location))
}

pub fn get_value(&self, point: &(f64, f64)) -> Option<&TEntryValue> {
Expand Down Expand Up @@ -157,19 +159,26 @@ impl<TEntryValue: Clone + std::fmt::Debug + Default> SpatialGridMap<TEntryValue>
self.get_value(point).is_some()
}

fn insert_entry(&mut self, entry: BucketEntry<TEntryValue>) -> MutBucketEntry<TEntryValue> {
fn insert_entry(
&mut self,
entry: BucketEntry<TEntryValue>,
update_size_check: bool,
) -> MutBucketEntry<TEntryValue> {
match self.get_location(&entry.point) {
None => {
self.increase_size();
self.insert_entry(entry)
self.insert_entry(entry, update_size_check)
}
Some(location) => {
let point = entry.point;
let size = entry.size;

if self.store.entry(location.key).or_default().insert(entry) {
self.locations.insert(location);
self.update_spacing(size);
self.locations.insert(location.clone());

if update_size_check {
self.update_spacing(size);
}
}

self
Expand All @@ -192,6 +201,7 @@ impl<TEntryValue: Clone + std::fmt::Debug + Default> SpatialGridMap<TEntryValue>
.with_point(point)
.with_value(value)
.with_size(size as f32),
true,
)
}

Expand Down Expand Up @@ -264,14 +274,14 @@ impl<TEntryValue: Clone + std::fmt::Debug + Default> SpatialGridMap<TEntryValue>
return;
}
ResizeMethod::Maximum => {
if new_spacing > self.get_spacing() as f32 {
if compare_coordinate(new_spacing as f64, self.get_spacing()) == Ordering::Greater {
self.spacing = Some(new_spacing);
} else {
return;
}
}
ResizeMethod::Minimum => {
if new_spacing < self.get_spacing() as f32 {
if compare_coordinate(new_spacing as f64, self.get_spacing()) == Ordering::Less {
self.spacing = Some(new_spacing);
} else {
return;
Expand All @@ -289,14 +299,16 @@ impl<TEntryValue: Clone + std::fmt::Debug + Default> SpatialGridMap<TEntryValue>
.get_mut(&location.key)
.expect("Bucket not found while updating spacing");
let bucket_entry = bucket
.take_entry(&location.point)
.remove(&location.point)
.expect("Bucket entry not found while updating spacing");

self.insert_entry(
BucketEntry::default()
.with_point(location.point)
.with_value(bucket_entry.value)
.with_size(bucket_entry.size)
.with_counters(bucket_entry.counters),
false,
);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ impl Arrow {
}

fn get_chevron(&self, scale: &Scale) -> Chevron {
let direction = self.line_segment.p2.radian_to(&self.line_segment.p1) - PI * 0.5;
let point = self.line_segment.p2;
let direction = self.line_segment.end.radian_to(&self.line_segment.start) - PI * 0.5;
let point = self.line_segment.end;

Chevron::default()
.with_point(point)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ pub fn get_extended_points_to_bbox(
.with_start(points[0])
.with_end(points[1])
.extend_to_bbox(bbox, extend_start, false);
point = line_segment.p1;
point = line_segment.start;
}

if extend_end && index == points.len() - 1 {
Expand All @@ -181,7 +181,7 @@ pub fn get_extended_points_to_bbox(
.with_end(points[points.len() - 1])
.extend_to_bbox(bbox, false, extend_end);

point = line_segment.p2;
point = line_segment.end;
}

extended_points.push(point);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,16 +63,17 @@ impl LineSegmentArrows {
while shift < line_segment.length() {
let p1 = line_segment
.set_length(line_segment.length() - shift, LineSegmentOrigin::End)
.p1;
.start;

let p2 = line_segment
.set_length(
line_segment.length() - shift - length - gap_from_line_segment,
LineSegmentOrigin::End,
)
.p1;
.start;

let mut arrow_line_segment = LineSegment { p1, p2 }.rotate(self.direction, Some(&p1));
let mut arrow_line_segment =
LineSegment { start: p1, end: p2 }.rotate(self.direction, Some(&p1));

arrow_line_segment = arrow_line_segment.set_length(
arrow_line_segment.length() - gap_from_line_segment,
Expand Down
Loading

0 comments on commit 74a0652

Please sign in to comment.