Skip to content

Commit

Permalink
Fix clippy lints
Browse files Browse the repository at this point in the history
  • Loading branch information
lhvy committed Dec 23, 2024
1 parent 9d87777 commit ce874a7
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 22 deletions.
9 changes: 3 additions & 6 deletions src/class_scraper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ impl ClassScraper {
println!("Currently working on {:?}", self.course_code);
let html = fetch_url(&self.url)
.await
.expect(&format!("Something was wrong with the URL: {}", self.url));
.unwrap_or_else(|_| panic!("Something was wrong with the URL: {}", self.url));
let document = scraper::Html::parse_document(&html);

// Selectors
Expand Down Expand Up @@ -215,18 +215,15 @@ fn parse_class_info(class_data: Vec<String>, course_id: String, career: String)
map.get("Class Nbr").unwrap_or(&String::new()),
map.get("Teaching Period")
.unwrap_or(&"".to_string())
.to_string()
.split(" - ")
.next()
.expect("Could not split teaching periods properly!")
.to_string(),
.expect("Could not split teaching periods properly!"),
year,
),
section: map.get("Section").unwrap_or(&"".to_string()).to_string(),
term: map
.get("Teaching Period")
.unwrap_or(&"".to_string())
.to_string()
.split(" - ")
.next()
.expect("Could not split teaching periods properly!")
Expand Down Expand Up @@ -270,7 +267,7 @@ fn parse_class_info(class_data: Vec<String>, course_id: String, career: String)
}

fn parse_meeting_info(vec: &[String], career: String) -> Vec<Time> {
let days = vec!["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"];
let days = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"];
let mut meetings = Vec::new();
let mut iter: Box<dyn Iterator<Item = &String>> = Box::new(vec.iter());

Expand Down
26 changes: 13 additions & 13 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -111,9 +111,9 @@ async fn run_course_classes_page_scraper_job(
courses_vec
}

fn convert_courses_to_json(course_vec: &mut Vec<Course>) -> Vec<serde_json::Value> {
fn convert_courses_to_json(courses: &[Course]) -> Vec<serde_json::Value> {
let mut json_courses = Vec::new();
for course in course_vec.iter() {
for course in courses.iter() {
// Modes come from HashSet, so order is non-deterministic
// This is annoying for our json diff, so quickly sort first
let mut modes = course.modes.iter().collect::<Vec<_>>();
Expand All @@ -138,12 +138,12 @@ fn convert_courses_to_json(course_vec: &mut Vec<Course>) -> Vec<serde_json::Valu
fn generate_time_id(class: &Class, time: &Time) -> String {
class.class_id.to_string() + &time.day + &time.location + &time.time + &time.weeks
}
fn convert_classes_times_to_json(course_vec: &mut Vec<Course>) -> Vec<serde_json::Value> {
fn convert_classes_times_to_json(courses: &[Course]) -> Vec<serde_json::Value> {
let mut times_json = Vec::<serde_json::Value>::new();
for course in course_vec.iter() {
for course in courses.iter() {
for class in course.classes.iter() {
if class.times.is_some() {
for time in class.times.as_ref().unwrap().into_iter() {
for time in class.times.as_ref().unwrap().iter() {
times_json.push(json!({
"id": generate_time_id(class, time),
"class_id": class.class_id,
Expand All @@ -161,9 +161,9 @@ fn convert_classes_times_to_json(course_vec: &mut Vec<Course>) -> Vec<serde_json

times_json
}
fn convert_classes_to_json(course_vec: &mut Vec<Course>) -> Vec<serde_json::Value> {
fn convert_classes_to_json(courses: &[Course]) -> Vec<serde_json::Value> {
let mut json_classes = Vec::new();
for course in course_vec.iter() {
for course in courses.iter() {
for class in course.classes.iter() {
json_classes.push(json!({
"course_id": class.course_id,
Expand Down Expand Up @@ -214,9 +214,9 @@ async fn handle_scrape_write_to_file() -> Result<(), Box<dyn Error>> {
.await
.expect("Something went wrong with scraping!");
println!("Writing to disk!");
let json_classes = convert_classes_to_json(&mut course_vec);
let json_courses = convert_courses_to_json(&mut course_vec);
let json_times = convert_classes_times_to_json(&mut course_vec);
let json_classes = convert_classes_to_json(&course_vec);
let json_courses = convert_courses_to_json(&course_vec);
let json_times = convert_classes_times_to_json(&course_vec);

let file_classes = File::create("classes.json")?;
let file_courses = File::create("courses.json")?;
Expand Down Expand Up @@ -259,9 +259,9 @@ async fn handle_scrape_n_batch_insert() -> Result<(), Box<dyn Error>> {
handle_scrape(&mut course_vec, current_year)
.await
.expect("Something went wrong with scraping!");
let json_classes = convert_classes_to_json(&mut course_vec);
let json_courses = convert_courses_to_json(&mut course_vec);
let json_times = convert_classes_times_to_json(&mut course_vec);
let json_classes = convert_classes_to_json(&course_vec);
let json_courses = convert_courses_to_json(&course_vec);
let json_times = convert_classes_times_to_json(&course_vec);
let rfm = ReadFromMemory {
courses_vec: json_courses,
classes_vec: json_classes,
Expand Down
6 changes: 3 additions & 3 deletions src/text_manipulators.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@ pub fn extract_year(url: &str) -> Option<u32> {
}
}

pub fn mutate_string_to_include_curr_year(curr_base_url: &mut String, year_str: String) -> String {
pub fn mutate_string_to_include_curr_year(curr_base_url: &mut str, year_str: String) -> String {
let pattern = Regex::new("year").unwrap();
pattern.replace(&curr_base_url, year_str).to_string()
pattern.replace(curr_base_url, year_str).to_string()
}

pub fn get_html_link_to_page(year: i32, html_fragment: &str) -> String {
Expand All @@ -29,7 +29,7 @@ pub fn get_html_link_to_page(year: i32, html_fragment: &str) -> String {
}
Err(e) => {
warn!("Timetable URL has NOT been parsed properly from env file and error report: {e}");
return "".to_string();
"".to_string()
}
}
}

0 comments on commit ce874a7

Please sign in to comment.