@@ -9,15 +9,16 @@ use std::path::Path;
99use std:: str:: FromStr ;
1010use std:: time:: Duration ;
1111
12- use anyhow:: { Context , Result , anyhow, bail} ;
1312use globset:: GlobSet ;
1413use hard_xml:: XmlRead ;
1514use log:: { debug, info, warn} ;
16- use reqwest:: { StatusCode , blocking:: Client , redirect:: Policy } ;
15+ use reqwest:: { blocking:: Client , redirect:: Policy } ;
1716use url:: Url ;
1817
1918use crate :: { Package , PackageStatus } ;
2019use omaha:: { Sha1Digest , Sha256Digest } ;
20+ use crate :: error:: Error ;
21+ use crate :: Result ;
2122
2223const DOWNLOAD_TIMEOUT : u64 = 3600 ;
2324const HTTP_CONN_TIMEOUT : u64 = 20 ;
@@ -33,9 +34,9 @@ pub struct DownloadResult {
3334}
3435
3536pub fn hash_on_disk < T : omaha:: Hasher > ( path : & Path , maxlen : Option < usize > ) -> Result < T :: Output > {
36- let file = File :: open ( path) . context ( format ! ( "File::open({:?})" , path ) ) ?;
37+ let file = File :: open ( path) . map_err ( Error :: OpenFile ) ?;
3738
38- let filelen = file. metadata ( ) . context ( format ! ( "failed to get metadata of {:?}" , path ) ) ?. len ( ) as usize ;
39+ let filelen = file. metadata ( ) . map_err ( Error :: GetFileMetadata ) ?. len ( ) as usize ;
3940
4041 let mut maxlen_to_read: usize = match maxlen {
4142 Some ( len) => {
@@ -61,7 +62,7 @@ pub fn hash_on_disk<T: omaha::Hasher>(path: &Path, maxlen: Option<usize>) -> Res
6162 databuf. truncate ( maxlen_to_read) ;
6263 }
6364
64- freader. read_exact ( & mut databuf) . context ( format ! ( "failed to read_exact(chunklen {:?})" , databuf . len ( ) ) ) ?;
65+ freader. read_exact ( & mut databuf) . map_err ( Error :: ReadFromFile ) ?;
6566
6667 maxlen_to_read -= databuf. len ( ) ;
6768
@@ -78,33 +79,23 @@ where
7879{
7980 let client_url = url. clone ( ) ;
8081
81- #[ rustfmt:: skip]
82- let mut res = client. get ( url. clone ( ) )
83- . send ( )
84- . context ( format ! ( "client get & send{:?} failed " , client_url. as_str( ) ) ) ?;
82+ let mut res = client. get ( url. clone ( ) ) . send ( ) . map_err ( |err| Error :: SendGetRequest ( url. into ( ) , err) ) ?;
8583
8684 // Redirect was already handled at this point, so there is no need to touch
8785 // response or url again. Simply print info and continue.
8886 if <U as Into < Url > >:: into ( client_url) != * res. url ( ) {
8987 info ! ( "redirected to URL {:?}" , res. url( ) ) ;
9088 }
9189
92- // Return immediately on download failure on the client side.
93- let status = res. status ( ) ;
94-
95- if !status. is_success ( ) {
96- match status {
97- StatusCode :: FORBIDDEN | StatusCode :: NOT_FOUND => {
98- bail ! ( "cannnot fetch remotely with status code {:?}" , status) ;
99- }
100- _ => bail ! ( "general failure with status code {:?}" , status) ,
101- }
90+ match res. status ( ) {
91+ status if !status. is_success ( ) => return Err ( Error :: GetRequestFailed ( status) ) ,
92+ _ => { }
10293 }
10394
10495 println ! ( "writing to {}" , path. display( ) ) ;
10596
106- let mut file = File :: create ( path) . context ( format ! ( "failed to create path ({:?})" , path . display ( ) ) ) ?;
107- res. copy_to ( & mut file) ?;
97+ let mut file = File :: create ( path) . map_err ( Error :: CreateFile ) ?;
98+ res. copy_to ( & mut file) . map_err ( Error :: CopyRequestBodyToFile ) ?;
10899
109100 let calculated_sha256 = hash_on_disk :: < omaha:: Sha256 > ( path, None ) ?;
110101 let calculated_sha1 = hash_on_disk :: < omaha:: Sha1 > ( path, None ) ?;
@@ -116,11 +107,14 @@ where
116107 debug ! ( " calculated sha1: {calculated_sha1:?}" ) ;
117108 debug ! ( " sha1 match? {}" , expected_sha1 == Some ( calculated_sha1) ) ;
118109
119- if expected_sha256. is_some ( ) && expected_sha256 != Some ( calculated_sha256) {
120- bail ! ( "checksum mismatch for sha256" ) ;
110+ match expected_sha256 {
111+ Some ( exp) if exp != calculated_sha256 => return Err ( Error :: Sha256ChecksumMismatch ( exp, calculated_sha256) ) ,
112+ _ => { }
121113 }
122- if expected_sha1. is_some ( ) && expected_sha1 != Some ( calculated_sha1) {
123- bail ! ( "checksum mismatch for sha1" ) ;
114+
115+ match expected_sha1 {
116+ Some ( exp) if exp != calculated_sha1 => return Err ( Error :: Sha1ChecksumMismatch ( exp, calculated_sha1) ) ,
117+ _ => { }
124118 }
125119
126120 Ok ( DownloadResult {
@@ -189,13 +183,13 @@ where
189183 U : reqwest:: IntoUrl + From < U > + std:: clone:: Clone + std:: fmt:: Debug ,
190184 Url : From < U > ,
191185{
192- let r = download_and_hash ( client, input_url. clone ( ) , path, None , None ) . context ( format ! ( "unable to download data(url {input_url:?})" ) ) ?;
186+ let r = download_and_hash ( client, input_url. clone ( ) , path, None , None ) ?;
193187
194188 Ok ( Package {
195189 name : Cow :: Borrowed ( path. file_name ( ) . unwrap_or ( OsStr :: new ( "fakepackage" ) ) . to_str ( ) . unwrap_or ( "fakepackage" ) ) ,
196190 hash_sha256 : Some ( r. hash_sha256 ) ,
197191 hash_sha1 : Some ( r. hash_sha1 ) ,
198- size : r. data . metadata ( ) . context ( format ! ( "failed to get metadata, path ({:?})" , path . display ( ) ) ) ?. len ( ) as usize ,
192+ size : r. data . metadata ( ) . map_err ( Error :: GetFileMetadata ) ?. len ( ) as usize ,
199193 url : input_url. into ( ) ,
200194 status : PackageStatus :: Unverified ,
201195 } )
@@ -204,19 +198,19 @@ where
204198fn do_download_verify ( pkg : & mut Package < ' _ > , output_filename : Option < String > , output_dir : & Path , unverified_dir : & Path , pubkey_file : & str , client : & Client ) -> Result < ( ) > {
205199 pkg. check_download ( unverified_dir) ?;
206200
207- pkg. download ( unverified_dir, client) . context ( format ! ( "unable to download \" {:?} \" " , pkg . name ) ) ?;
201+ pkg. download ( unverified_dir, client) ?;
208202
209203 // Unverified payload is stored in e.g. "output_dir/.unverified/oem.gz".
210204 // Verified payload is stored in e.g. "output_dir/oem.raw".
211205 let pkg_unverified = unverified_dir. join ( & * pkg. name ) ;
212206 let mut pkg_verified = output_dir. join ( output_filename. as_ref ( ) . map ( OsStr :: new) . unwrap_or ( pkg_unverified. with_extension ( "raw" ) . file_name ( ) . unwrap_or_default ( ) ) ) ;
213207 pkg_verified. set_extension ( "raw" ) ;
214208
215- let datablobspath = pkg. verify_signature_on_disk ( & pkg_unverified, pubkey_file) . context ( format ! ( "unable to verify signature \" {} \" " , pkg . name ) ) ?;
209+ let datablobspath = pkg. verify_signature_on_disk ( & pkg_unverified, pubkey_file) ?;
216210
217211 // write extracted data into the final data.
218212 debug ! ( "data blobs written into file {pkg_verified:?}" ) ;
219- fs:: rename ( datablobspath, pkg_verified) ?;
213+ fs:: rename ( datablobspath, pkg_verified) . map_err ( Error :: RenameFile ) ?;
220214
221215 Ok ( ( ) )
222216}
@@ -264,27 +258,28 @@ impl DownloadVerify {
264258
265259 let unverified_dir = output_dir. join ( UNVERFIED_SUFFIX ) ;
266260 let temp_dir = output_dir. join ( TMP_SUFFIX ) ;
267- fs:: create_dir_all ( & unverified_dir) ?;
268- fs:: create_dir_all ( & temp_dir) ?;
261+ fs:: create_dir_all ( & unverified_dir) . map_err ( Error :: CreateDirectory ) ?;
262+ fs:: create_dir_all ( & temp_dir) . map_err ( Error :: CreateDirectory ) ?;
269263
270264 // The default policy of reqwest Client supports max 10 attempts on HTTP redirect.
271265 let client = Client :: builder ( )
272266 . tcp_keepalive ( Duration :: from_secs ( HTTP_CONN_TIMEOUT ) )
273267 . connect_timeout ( Duration :: from_secs ( HTTP_CONN_TIMEOUT ) )
274268 . timeout ( Duration :: from_secs ( DOWNLOAD_TIMEOUT ) )
275269 . redirect ( Policy :: default ( ) )
276- . build ( ) ?;
270+ . build ( )
271+ . map_err ( Error :: BuildClient ) ?;
277272
278273 if self . payload_url . is_some ( ) {
279274 let url = self . payload_url . clone ( ) . unwrap ( ) ;
280- let u = Url :: parse ( & url) ?;
281- let fname = u. path_segments ( ) . ok_or ( anyhow ! ( "failed to get path segments, url ({:?})" , u ) ) ?. next_back ( ) . ok_or ( anyhow ! ( "failed to get path segments, url ({:?})" , u ) ) ?;
275+ let u = Url :: parse ( & url) . map_err ( Error :: ParseUrl ) ?;
276+ let fname = u. path_segments ( ) . ok_or ( Error :: InvalidBaseUrl ( u . clone ( ) ) ) ?. next_back ( ) . ok_or ( Error :: EmptyUrlIterator ) ?;
282277 let mut pkg_fake: Package ;
283278
284279 let temp_payload_path = unverified_dir. join ( fname) ;
285280 pkg_fake = fetch_url_to_file (
286281 & temp_payload_path,
287- Url :: from_str ( url. as_str ( ) ) . context ( anyhow ! ( "failed to convert into url ({:?})" , self . payload_url ) ) ?,
282+ Url :: from_str ( url. as_str ( ) ) . map_err ( Error :: ParseUrl ) ?,
288283 & client,
289284 ) ?;
290285 do_download_verify (
@@ -303,7 +298,7 @@ impl DownloadVerify {
303298 ////
304299 // parse response
305300 ////
306- let resp = omaha:: Response :: from_str ( & self . input_xml ) ?;
301+ let resp = omaha:: Response :: from_str ( & self . input_xml ) . map_err ( Error :: InvalidHashDigestString ) ?;
307302
308303 let mut pkgs_to_dl = get_pkgs_to_download ( & resp, & self . glob_set ) ?;
309304
@@ -329,7 +324,7 @@ impl DownloadVerify {
329324 }
330325
331326 // clean up data
332- fs:: remove_dir_all ( temp_dir) ?;
327+ fs:: remove_dir_all ( temp_dir) . map_err ( Error :: RemoveDirectory ) ?;
333328
334329 Ok ( ( ) )
335330 }
0 commit comments