فهرست منبع

Cleaned up warnings. Use cli.work.

Renamed bible to version (since it's what version we're working
on.)
Steve Thielemann 11 ماه پیش
والد
کامیت
b686e0f5c1
1فایلهای تغییر یافته به همراه15 افزوده شده و 16 حذف شده
  1. 15 16
      src/main.rs

+ 15 - 16
src/main.rs

@@ -1,5 +1,4 @@
 use clap::{Parser, Subcommand};
-use core::slice::Iter;
 use reqwest;
 use scraper;
 use std::{
@@ -23,7 +22,7 @@ struct Cli {
 
     /// Bible Version
     #[arg(short, long, default_value = "ESV")]
-    bible: String,
+    version: String,
 
     #[command(subcommand)]
     command: Option<Commands>,
@@ -81,15 +80,15 @@ static BOOK_MAP: LazyLock<HashMap<&str, usize>> =
     LazyLock::new(|| { 
         HashMap::from_iter(BOOKS.iter().enumerate().map(|x| (*x.1, x.0 + 1)))});
 
-// find_files in base_dir that end with extension bible.
-fn find_files(base_dir: &str, bible: &str) -> Vec<String> {
+// find_files in base_dir that end with extension bible version.
+fn find_files(base_dir: &str, version: &str) -> Vec<String> {
     let paths = std::fs::read_dir(base_dir).unwrap();
     let mut result = Vec::<String>::new();
 
     for path in paths {
         if let Ok(dir) = path {
             let filename = dir.file_name().to_string_lossy().to_string();
-            if filename.ends_with(bible) {
+            if filename.ends_with(version) {
                 result.push(filename);
                 // result.push(dir.path().to_string_lossy().to_string());
             }
@@ -142,9 +141,9 @@ struct FetchResult {
     html: String,
 }
 
-fn fetch_cache(client: &reqwest::blocking::Client, url: &str) -> FetchResult {
+fn fetch_cache(work_dir: &str, client: &reqwest::blocking::Client, url: &str) -> FetchResult {
     let (_, filename) = url.rsplit_once('/').unwrap();
-    let path = Path::new("bible").join(filename);
+    let path = Path::new(work_dir).join(filename);
 
     if path.exists() {
         // File already exists -- use cached version.
@@ -172,8 +171,8 @@ fn main() {
     // println!("Work Dir: {:?}", cli.work);
     // println!("Bible: {:?}", cli.bible);
 
-    if !VERSION_URLS.contains_key(cli.bible.as_str()) {
-        println!("Sorry, I don't know about Bible [{}].", cli.bible);
+    if !VERSION_URLS.contains_key(cli.version.as_str()) {
+        println!("Sorry, I don't know about Bible Version [{}].", cli.version);
         println!("I do know about the following:");
 
         // Keys sorted in order.
@@ -189,12 +188,12 @@ fn main() {
                 .user_agent(APP_USER_AGENT)
                 .build()
                 .unwrap();
-            let mut url = VERSION_URLS[cli.bible.as_str()].to_string();
-            println!("Fetch! [{}] with delay {} secs.", cli.bible, delay);
+            let mut url = VERSION_URLS[cli.version.as_str()].to_string();
+            println!("Fetch! [{}] with delay {} secs.", cli.version, delay);
             let mut more = true;
 
             while more {
-                let result = fetch_cache(&client, url.as_str());
+                let result = fetch_cache(cli.work.as_os_str().to_str().unwrap(), &client, url.as_str());
                 more = false;
 
                 let document = scraper::Html::parse_document(&result.html);
@@ -246,8 +245,8 @@ fn main() {
 
         Some(Commands::Extract { count, all }) => {
             println!("Extract...");
-            let mut files = find_files(cli.work.to_str().unwrap(), cli.bible.as_str());
-            let mut filepath = Path::new(&cli.work);
+            let files = find_files(cli.work.to_str().unwrap(), cli.version.as_str());
+            let filepath = Path::new(&cli.work);
 
             let mut chapters: HashMap<String, String> = HashMap::<String, String>::new();
 
@@ -275,7 +274,7 @@ fn main() {
                 // OK!  ~= probably locates a matching attr line <span class="this that content"> but does not
                 // match <span class="contains_content">!
 
-                let span_class_content =
+                let _span_class_content =
                     scraper::Selector::parse(r#"span[class="ChapterContent_content__RrUqA"]"#)
                         .unwrap();
 
@@ -359,7 +358,7 @@ fn main() {
         Some(Commands::Test {}) => {
             println!("Testing...");
 
-            let path = Path::new("bible").join("GEN.1.NIV");
+            let path = Path::new(&cli.work).join("GEN.1.NIV");
             let buffer = std::fs::read_to_string(path).unwrap();
             let document = scraper::Html::parse_document(&buffer);