Quellcode durchsuchen

Use custom Error for cache.

I see now why you don't use anyhow for libraries --
type information is erased, you can't get the error back out.

I do miss context in the error.  Maybe add that to cache::Error.
Steve Thielemann vor 2 Wochen
Ursprung
Commit
96778f50ab
2 geänderte Dateien mit 224 neuen und 145 gelöschten Zeilen
  1. 190 116
      src/cache.rs
  2. 34 29
      src/main.rs

+ 190 - 116
src/cache.rs

@@ -1,10 +1,14 @@
 // use sha256;
-use anyhow::{Context, Result, bail};
+
 use std::fs::{File, create_dir_all, read_dir, remove_file};
 use std::io::{BufRead, BufReader, Write};
 use std::path::PathBuf;
+use std::result::Result;
 use std::time::{Duration, SystemTime};
 use url::Url;
+// Error
+use std::error::Error as Errorr;
+use std::fmt;
 
 #[deny(missing_docs)]
 // #[warn(missing_docs)]
@@ -13,11 +17,12 @@ use url::Url;
 ///
 /// This can fail if Url is unable to parse, or Url is unable to join.
 #[must_use]
-pub fn relative_to_absolute(base_url: &str, relative_href: &str) -> Result<String> {
-    let base_url = Url::parse(base_url).context(format!("Url::parse({})", base_url))?;
-    let new_url = base_url
-        .join(relative_href)
-        .context(format!("join({})", relative_href))?;
+pub fn relative_to_absolute(
+    base_url: &str,
+    relative_href: &str,
+) -> Result<String, url::ParseError> {
+    let base_url = Url::parse(base_url)?;
+    let new_url = base_url.join(relative_href)?;
     Ok(new_url.to_string())
 }
 
@@ -33,7 +38,7 @@ pub fn save_headermap(
     filename: &str,
     url: &str,
     header: &reqwest::header::HeaderMap,
-) -> Result<()> {
+) -> Result<(), std::io::Error> {
     let mut fp = File::create(filename)?;
 
     fp.write_all(format!("url: {}\n", url).as_bytes())?;
@@ -48,7 +53,7 @@ pub fn save_headermap(
 /// Load reqwest::header::HeaderMap from file.
 ///
 /// This will have the url of the original call in the "url" section.
-pub fn load_headermap(filename: &str) -> Result<reqwest::header::HeaderMap> {
+pub fn load_headermap(filename: &str) -> Result<reqwest::header::HeaderMap, std::io::Error> {
     let fp = File::open(filename)?;
     let mut buffer = BufReader::new(fp);
     let mut line = String::new();
@@ -102,28 +107,58 @@ pub enum Status {
     Fetched(PathBuf),
     /// File was retrieved from cache.
     Cached(PathBuf),
-    /// Reqwest error (unable to connect)
-    Error(reqwest::Error),
+}
+
+impl Status {
+    /// Return pathbuf, always
+    pub fn download_path(&self) -> &PathBuf {
+        match self {
+            Status::Fetched(path) | Status::Cached(path) => {
+                return path;
+            }
+        }
+    }
+}
+
+#[derive(Debug)]
+pub enum Error {
+    /// Reqwest error (unable to connect), or IO Error  std::io::Error
+    ReqwestError(reqwest::Error),
+    IOError(std::io::Error),
     /// Content-Type wasn't allowed, see Cache.accept.
     Unacceptable(String), // Content-Type
     /// Content was too big, see Cache.max_size.
     TooBig(u64),
     /// HTTP Error/status code.
-    ErrorStatus(u16),
+    HttpErrorStatus(u16),
 }
 
-impl Status {
-    /// Return pathbuf, if Status was success.
-    pub fn download_path(&self) -> Option<&PathBuf> {
+impl From<std::io::Error> for Error {
+    fn from(e: std::io::Error) -> Self {
+        Self::IOError(e)
+    }
+}
+
+impl From<reqwest::Error> for Error {
+    fn from(e: reqwest::Error) -> Self {
+        Self::ReqwestError(e)
+    }
+}
+
+impl fmt::Display for Error {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match self {
-            Status::Fetched(path) | Status::Cached(path) => {
-                return Some(path);
-            }
-            _ => None,
+            Error::ReqwestError(e) => write!(f, "ReqwestError: {:?}", e),
+            Error::IOError(e) => write!(f, "IOError: {:?}", e),
+            Error::Unacceptable(ct) => write!(f, "Content-Type {} not allowed", ct),
+            Error::TooBig(size) => write!(f, "Content-Size {} too big", size),
+            Error::HttpErrorStatus(status) => write!(f, "Status Code: {}", status)
         }
     }
 }
 
+impl Errorr for Error {}
+
 /*
 Some possible content-type values:  We're only interested in a few of these...
 
@@ -143,20 +178,28 @@ static HEADER_EXT: &str = ".header";
 
 impl Cache {
     /// Construct Cache using given directory for caching, and useragent.
-    pub fn new(dir: PathBuf, useragent: Option<&str>) -> Result<Self> {
+    pub fn new(dir: PathBuf, useragent: Option<&str>) -> Result<Self, Error> {
         // Verify the directory exists
 
         let path = dir.as_path();
         if path.exists() {
             if !path.is_dir() {
                 // It exists, but it isn't a directory!  What?!
-                bail!(
-                    "Can't create Cache dir {}, it already exists.",
-                    dir.display()
+                std::io::Error::new(
+                    std::io::ErrorKind::Other,
+                    format!(
+                        "Can't create Cache dir {}, it already exists.",
+                        dir.display()
+                    ),
                 );
             }
         } else {
-            create_dir_all(path).context(format!("Create cache dir {}", path.display()))?;
+            match create_dir_all(path) {
+                Err(e) => {
+                    return Err(Error::IOError(e));
+                }
+                Ok(_) => {}
+            }
         }
 
         let user_agent = if let Some(ua) = useragent {
@@ -166,17 +209,22 @@ impl Cache {
         };
 
         // This is where we select async or blocking.
-        let client = reqwest::blocking::Client::builder()
+        match reqwest::blocking::Client::builder()
             .user_agent(user_agent)
-            .build()?;
-
-        Ok(Self {
-            directory: dir,
-            client: client,
-            accept: vec![], // Accept all content-type.
-            max_size: None, // Accept any sized content.
-                            // Some(256 * 1024 * 1024), // 256 MB
-        })
+            .build()
+        {
+            Ok(client) => {
+                Ok(Self {
+                    directory: dir,
+                    client: client,
+                    accept: vec![], // Accept all content-type.
+                    max_size: None, // Some(256 * 1024 * 1024), // 256 MB
+                })
+            }
+            Err(e) => {
+                return Err(Error::ReqwestError(e));
+            }
+        }
     }
 
     #[allow(dead_code)]
@@ -190,7 +238,7 @@ impl Cache {
     }
 
     /// Create safe filename from url for header/content files.
-    pub fn url_to_basename(url: &str) -> Result<String> {
+    pub fn url_to_basename(url: &str) -> String {
         let filename = if url.ends_with("/") {
             ""
         } else {
@@ -213,9 +261,9 @@ impl Cache {
             if path.ends_with("-") {
                 path.pop();
             }
-            return Ok(path);
+            return path;
         }
-        Ok(filename.to_string())
+        filename.to_string()
     }
 
     /// Expire files in the cache older then given age
@@ -223,7 +271,7 @@ impl Cache {
     /// Use DirEntry.modified, since it updates when a file is freshened/downloaded.
     /// DirEntry.created isn't updated when file is rewritten.
     #[allow(dead_code)]
-    pub fn expire(&self, age: Duration) -> Result<bool> {
+    pub fn expire(&self, age: Duration) -> Result<bool, Error> {
         let now = SystemTime::now();
         let mut result: bool = false;
 
@@ -295,7 +343,7 @@ impl Cache {
     pub fn filename_for_url(&self, url: &str) -> PathBuf {
         self.directory
             .as_path()
-            .join(Self::url_to_basename(url).unwrap())
+            .join(Self::url_to_basename(url))
     }
 
     /// Given a url, return an open file
@@ -352,14 +400,16 @@ impl Cache {
     ///
     /// This deletes the .header cache file, which forces a fetch.
     #[allow(dead_code)]
-    pub fn fetch_nocache(&self, url: &str) -> Result<Status> {
+    pub fn fetch_nocache(&self, url: &str) -> Result<Status, Error> {
         let mut base = self.filename_for_url(url);
         Self::append_to_filename(&mut base, HEADER_EXT);
         if base.exists() {
-            let r = remove_file(&base);
-            // unlink failed - panic.
-            if r.is_err() {
-                panic!("Unlink {base:?}: {r:?}");
+            match remove_file(&base) {
+                Err(e) => {
+                    // unlink failed
+                    return Err(Error::IOError(e));
+                }
+                Ok(_) => {}
             }
         }
         return self.fetch(url);
@@ -372,7 +422,7 @@ impl Cache {
     ///
     /// This returns Status, which could be Fetched or Cached copy (among other things).
     #[must_use]
-    pub fn fetch(&self, url: &str) -> Result<Status> {
+    pub fn fetch(&self, url: &str) -> Result<Status, Error> {
         let base = self.filename_for_url(url);
         /*
         let base = self
@@ -389,85 +439,110 @@ impl Cache {
 
         if header_file.exists() {
             // Ok! We have existing information.  Retrieve it.
-            let old_header = load_headermap(header_file.to_str().unwrap()).unwrap();
-
-            // Look for: ETag, Last-Modified
-            if let Some(lastmod) = old_header.get("Last-Modified") {
-                builder = builder.header("If-Modified-Since", lastmod);
-            } else if let Some(date) = old_header.get("Date") {
-                // Keep trying...
-                builder = builder.header("If-Modified-Since", date);
-            }
+            match load_headermap(header_file.to_str().unwrap()) {
+                Ok(old_header) => {
+                    // Look for: ETag, Last-Modified
+                    if let Some(lastmod) = old_header.get("Last-Modified") {
+                        builder = builder.header("If-Modified-Since", lastmod);
+                    } else if let Some(date) = old_header.get("Date") {
+                        // Keep trying...
+                        builder = builder.header("If-Modified-Since", date);
+                    }
 
-            if let Some(etag) = old_header.get("etag") {
-                builder = builder.header("If-None-Match", etag);
+                    if let Some(etag) = old_header.get("etag") {
+                        builder = builder.header("If-None-Match", etag);
+                    }
+                }
+                Err(e) => {
+                    return Err(Error::IOError(e));
+                }
             }
         };
 
-        let mut result = builder.send()?;
-
-        if result.status() == 304 {
-            // Cache hit!
-            return Ok(Status::Cached(base));
-        }
+        match builder.send() {
+            Ok(mut result) => {
+                if result.status() == 304 {
+                    // Cache hit!
+                    return Ok(Status::Cached(base));
+                }
 
-        // Ok!  Success!
-        if result.status() == 200 {
-            // Success!
-
-            // When caching fails ―
-            //
-            // If content_length (from previous fetch) matches current?
-            // Could we assume it hasn't changed, and just use cache?
-            // Or would that be a big assumption?
-
-            // Only check content_length size, if we have been
-            // given a max_size.
-            if let Some(max_size) = self.max_size {
-                if let Some(len) = result.content_length() {
-                    if len > max_size {
-                        // Is there a way to abort this safely?  Apparently yes! :D
-
-                        // let byte = Byte::from_u64(len);
-                        // let adjusted_byte = byte.get_appropriate_unit(UnitType::Binary);
-                        // println!("Too Big! {adjusted_byte:.2} {}", url);
-                        return Ok(Status::TooBig(len));
+                // Ok!  Success!
+                if result.status() == 200 {
+                    // Success!
+
+                    // When caching fails ―
+                    //
+                    // If content_length (from previous fetch) matches current?
+                    // Could we assume it hasn't changed, and just use cache?
+                    // Or would that be a big assumption?
+
+                    // Only check content_length size, if we have been
+                    // given a max_size.
+                    if let Some(max_size) = self.max_size {
+                        if let Some(len) = result.content_length() {
+                            if len > max_size {
+                                // Is there a way to abort this safely?  Apparently yes! :D
+
+                                // let byte = Byte::from_u64(len);
+                                // let adjusted_byte = byte.get_appropriate_unit(UnitType::Binary);
+                                // println!("Too Big! {adjusted_byte:.2} {}", url);
+                                return Err(Error::TooBig(len));
+                            }
+                        }
                     }
-                }
-            }
 
-            // Only check acceptable content_types if given.
-            if !self.accept.is_empty() {
-                if let Some(content_type) = result.headers().get("content-type") {
-                    // Check to see if accepted content.
-                    let mut ct = content_type.to_str().unwrap();
-                    let possible = content_type.to_str().unwrap().split_once(';');
-                    if let Some((ct_part, _)) = possible {
-                        ct = ct_part;
+                    // Only check acceptable content_types if given.
+                    if !self.accept.is_empty() {
+                        if let Some(content_type) = result.headers().get("content-type") {
+                            // Check to see if accepted content.
+                            let mut ct = content_type.to_str().unwrap();
+                            let possible = content_type.to_str().unwrap().split_once(';');
+                            if let Some((ct_part, _)) = possible {
+                                ct = ct_part;
+                            }
+                            if !self.accept.contains(&ct.to_string()) {
+                                // println!("Unacceptable content-type {} {}", ct, url);
+                                return Err(Error::Unacceptable(ct.to_string()));
+                            }
+                        }
                     }
-                    if !self.accept.contains(&ct.to_string()) {
-                        // println!("Unacceptable content-type {} {}", ct, url);
-                        return Ok(Status::Unacceptable(ct.to_string()));
+
+                    match save_headermap(header_file.to_str().unwrap(), url, result.headers()) {
+                        Err(e) => {
+                            return Err(Error::IOError(e));
+                        }
+                        Ok(()) => {}
                     }
-                }
-            }
 
-            save_headermap(header_file.to_str().unwrap(), url, result.headers())
-                .context("save_headermap: {header_file}")?;
+                    match File::create(base.to_str().unwrap()) {
+                        Ok(mut fp) => match result.copy_to(&mut fp) {
+                            Ok(_) => {}
+                            Err(e) => {
+                                return Err(Error::ReqwestError(e));
+                            }
+                        }
+                        Err(e) => {
+                            return Err(Error::IOError(e));
+                        }
+                    }
 
-            let mut fp = File::create(base.to_str().unwrap())?;
-            result.copy_to(&mut fp)?;
+                    // result.copy_to(&mut fp)?;
 
-            /*  // async
-            while let Ok(Some(chunk)) = result.chunk().await {
-                let _ = fp.write(&chunk);
+                    /*  // async
+                    while let Ok(Some(chunk)) = result.chunk().await {
+                        let _ = fp.write(&chunk);
+                    }
+                    */
+                    return Ok(Status::Fetched(base));
+                } else {
+                    // Status error
+                    // println!("Error {} {}", result.status(), url);
+                    return Err(Error::HttpErrorStatus(u16::from(result.status())));
+                }
+            }
+            Err(e) => {
+                return Err(Error::ReqwestError(e));
             }
-            */
-            return Ok(Status::Fetched(base));
-        } else {
-            // Status error
-            // println!("Error {} {}", result.status(), url);
-            return Ok(Status::ErrorStatus(u16::from(result.status())));
         }
     }
 }
@@ -547,11 +622,8 @@ mod tests {
 
         for (url, base) in url_base {
             // Verify url_to_basename.
-            if let Ok(basename) = Cache::url_to_basename(url) {
+            let basename = Cache::url_to_basename(url);
                 assert_eq!(base, basename, "{} -> {}", url, base);
-            } else {
-                panic!("filename_for_url({}) failed.", url);
-            }
             // Verify filename_for_url.
             let path = cache.filename_for_url(url);
             let mut newpath = temp.clone();
@@ -610,8 +682,10 @@ mod tests {
             } else {
                 panic!("Cache Status is not Status::Fetched, is: {:?}", r);
             }
+        } else {
+            panic!("cache.fetch: {:?}", r);
         }
-        // println!("Dir: {:?}, Status: {:?}", t, r); // r has been partially moved.
+
     }
 
     /*
@@ -645,7 +719,7 @@ mod tests {
 
         let r = cache.fetch(&teapot_url);
         if let Ok(r) = r {
-            if let Status::ErrorStatus(code) = r {
+            if let Status::HttpErrorStatus(code) = r {
                 assert_eq!(code, 418);
             } else {
                 panic!("Not an ErrorStatus");

+ 34 - 29
src/main.rs

@@ -242,42 +242,47 @@ fn main() -> Result<()> {
             // Since the go.dev site doesn't allow caching or knowing when it changes,
             // we always end up in Status::Fetched arm.
 
-            if let Some(filename) = status.download_path() {
-                // Make this function the same whether or not we have a cache hit.
-
-                let fp = File::open(filename)?;
-                let link = find_arch_link(&go_os_arch, &fp);
-
-                if let Ok(relative) = link {
-                    // Download link for arch located.  Make absolute URL.
-                    let latest_version = version_from_url(&relative, &go_os_arch);
-                    if let Some(latest) = latest_version {
-                        println!("Version: {} [have {}]", latest, go_version);
-                        if go_version != latest {
-                            println!("Downloading newer version...");
-                            let abs = relative_to_absolute(GO_URL, &relative).unwrap();
-
-                            let latest_status = cache.fetch(&abs)?;
-                            if let Some(update) = latest_status.download_path() {
-                                // Ok, we have the update!  Now what?
-                                println!("Ready to install update.");
+            let filename = status.download_path();
 
-                            } else {
-                                println!("Download failed: {:?}", latest_status);
-                            }
-                            // println!("Latest: {:?}", latest_status);
-                        } else {
-                            println!("You're already good to GO.");
-                        }
+            // Make this function the same whether or not we have a cache hit.
+
+            let fp = File::open(filename)?;
+            let link = find_arch_link(&go_os_arch, &fp);
+
+            if let Ok(relative) = link {
+                // Download link for arch located.  Make absolute URL.
+                let latest_version = version_from_url(&relative, &go_os_arch);
+                if let Some(latest) = latest_version {
+                    println!("Version: {} [have {}]", latest, go_version);
+                    if go_version != latest {
+                        println!("Downloading newer version...");
+                        let abs = relative_to_absolute(GO_URL, &relative).unwrap();
+
+                        let latest_status = cache.fetch(&abs)?;
+                        let update = latest_status.download_path();
+
+                        // Ok, we have the update!  Now what?
+                        println!("Ready to install update.");
+                        /*
+                        Clear GOROOT.
+                        mkdir_all(GOROOT).
+                        If GOPATH exists
+                            IF GOPATH != GOROOT, clear it.
+                            mkdir_all(GOPATH)
+                        Untarball go into GOROOT.
+                        Check/Verify go version?
+                         */
+                        // println!("Latest: {:?}", latest_status);
                     } else {
-                        println!("Finding version failed: [{}]", relative);
+                        println!("You're already good to GO.");
                     }
                 } else {
-                    bail!("Unable to locate download link");
+                    println!("Finding version failed: [{}]", relative);
                 }
             } else {
-                println!("Status = {:?}", status);
+                bail!("Unable to locate download link");
             }
+
             /*
             match status {
                 cache::Status::Fetched(filename) |