From 384188a45c8ce8ff1aab7d4f4f699e635b0a5b62 Mon Sep 17 00:00:00 2001 From: Dongsu Park Date: Mon, 30 Oct 2023 15:52:18 +0100 Subject: [PATCH 1/3] update-format-crau: check result of verify_rsa_pkcs We should check error from verify_rsa_pkcs to handle errors correctly. --- update-format-crau/src/delta_update.rs | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/update-format-crau/src/delta_update.rs b/update-format-crau/src/delta_update.rs index 18704e1..4a95809 100644 --- a/update-format-crau/src/delta_update.rs +++ b/update-format-crau/src/delta_update.rs @@ -1,7 +1,7 @@ use std::io::{Read, Seek, SeekFrom}; use std::error::Error; use std::fs::File; -use log::debug; +use log::{error, debug}; use protobuf::Message; @@ -125,8 +125,14 @@ pub fn verify_sig_pubkey(testdata: &[u8], sig: &Signature, pubkeyfile: &str) -> debug!("special_fields: {:?}", sig.special_fields()); // verify signature with pubkey - _ = verify_sig::verify_rsa_pkcs(testdata, sig.data(), get_public_key_pkcs_pem(pubkeyfile, KeyTypePkcs8)); - _ = pubkeyfile; + let res_verify = verify_sig::verify_rsa_pkcs(testdata, sig.data(), get_public_key_pkcs_pem(pubkeyfile, KeyTypePkcs8)); + match res_verify { + Ok(res_verify) => res_verify, + Err(err) => { + error!("verify_rsa_pkcs signature ({}) failed with {}", sig, err); + return None; + } + }; sigvec.cloned() } From b64f4796cb3981a7fa04f764d9211a1e667d4147 Mon Sep 17 00:00:00 2001 From: Dongsu Park Date: Thu, 9 Nov 2023 15:27:20 +0100 Subject: [PATCH 2/3] download_sysext: skip checking for downloads if that does not exist We need to skip checking for existing downloads, if the file does not exist. Otherwise, check_download will simply fail in the beginning, due to missing files. --- src/bin/download_sysext.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/bin/download_sysext.rs b/src/bin/download_sysext.rs index aebfeb0..2bd7680 100644 --- a/src/bin/download_sysext.rs +++ b/src/bin/download_sysext.rs @@ -52,6 +52,13 @@ impl<'a> Package<'a> { #[rustfmt::skip] fn check_download(&mut self, in_dir: &Path) -> Result<(), Box> { let path = in_dir.join(&*self.name); + + if !path.exists() { + // skip checking for existing downloads + info!("{} does not exist, skipping existing downloads.", path.display()); + return Ok(()); + } + let md = fs::metadata(&path)?; let size_on_disk = md.len() as usize; From 0088652408ab1af38f0a81101872bd9a20455edd Mon Sep 17 00:00:00 2001 From: Dongsu Park Date: Thu, 16 Nov 2023 09:27:48 +0100 Subject: [PATCH 3/3] download_sysext: fix issues of reading into buffer Fix bugs when reading from File to buffer. We need to first create a BufReader for reading from the buffer, pass that into parsing functions. That would make the code much easier to maintain, instead of passing File itself. Then we can read data without having to first open the file and track read positions. We need to get length of header and data, reading from the begining of the stream including the whole data including delta update header as well as manifest. And pass the length to hash_on_disk to calculate the hash without having to read the while data into memory. Doing that, signature verification works well. Also introduce get_data_blob() to read only data without header, manifest. --- Cargo.lock | 44 +++++++--- Cargo.toml | 2 + omaha/src/hash_types.rs | 8 ++ src/bin/download_sysext.rs | 109 +++++++++++++++++++++---- test/crau_verify.rs | 44 ++++++++-- update-format-crau/Cargo.toml | 1 + update-format-crau/src/delta_update.rs | 85 ++++++++++++++++--- update-format-crau/src/verify_sig.rs | 21 ++++- 8 files changed, 264 insertions(+), 50 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a78bab0..df2673f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -139,6 +139,27 @@ version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" +[[package]] +name = "bzip2" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bdb116a6ef3f6c3698828873ad02c3014b3c85cadb88496095628e3ef1e347f8" +dependencies = [ + "bzip2-sys", + "libc", +] + +[[package]] +name = "bzip2-sys" +version = "0.1.11+1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc" +dependencies = [ + "cc", + "libc", + "pkg-config", +] + [[package]] name = "cc" version = "1.0.83" @@ -581,9 +602,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.149" +version = "0.2.150" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" +checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" [[package]] name = "libm" @@ -593,9 +614,9 @@ checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4" [[package]] name = "linux-raw-sys" -version = "0.4.10" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f" +checksum = "969488b55f8ac402214f3f5fd243ebb7206cf82de60d3172994707a4bcc2b829" [[package]] name = "log" @@ -909,9 +930,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.3.5" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" dependencies = [ "bitflags 1.3.2", ] @@ -1013,9 +1034,9 @@ checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" [[package]] name = "rustix" -version = "0.38.20" +version = "0.38.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67ce50cb2e16c2903e30d1cbccfd8387a74b9d4c938b6a4c5ec6cc7556f7a8a0" +checksum = "ffb93593068e9babdad10e4fce47dc9b3ac25315a72a59766ffd9e9a71996a04" dependencies = [ "bitflags 2.4.0", "errno", @@ -1207,9 +1228,9 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.8.0" +version = "3.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" +checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5" dependencies = [ "cfg-if", "fastrand", @@ -1357,6 +1378,7 @@ name = "ue-rs" version = "0.1.0" dependencies = [ "argh", + "bzip2", "env_logger", "globset", "hard-xml", @@ -1365,6 +1387,7 @@ dependencies = [ "protobuf", "reqwest", "sha2", + "tempfile", "tokio", "update-format-crau", "url", @@ -1396,6 +1419,7 @@ dependencies = [ name = "update-format-crau" version = "0.1.0" dependencies = [ + "bzip2", "log", "protobuf", "rsa", diff --git a/Cargo.toml b/Cargo.toml index 3e9f907..2a80494 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,6 +17,8 @@ log = "0.4" argh = "0.1" globset = "0.4" protobuf = "3.2.0" +bzip2 = "0.4.4" +tempfile = "3.8.1" [dependencies.hard-xml] path = "vendor/hard-xml" diff --git a/omaha/src/hash_types.rs b/omaha/src/hash_types.rs index 35f807f..a0bdcc4 100644 --- a/omaha/src/hash_types.rs +++ b/omaha/src/hash_types.rs @@ -72,6 +72,14 @@ impl str::FromStr for Hash { } } +impl Into> for Hash { + fn into(self) -> Vec { + let mut vec = Vec::new(); + vec.append(&mut self.0.as_ref().to_vec()); + vec + } +} + impl Hash { #[inline] fn decode(hash: &str) -> Result { diff --git a/src/bin/download_sysext.rs b/src/bin/download_sysext.rs index 2bd7680..ed1dd85 100644 --- a/src/bin/download_sysext.rs +++ b/src/bin/download_sysext.rs @@ -1,9 +1,11 @@ use std::error::Error; use std::borrow::Cow; -use std::path::Path; +use std::path::{Path, PathBuf}; use std::fs::File; use std::fs; use std::io; +use std::io::{Read, Seek, SeekFrom}; +use std::io::BufReader; #[macro_use] extern crate log; @@ -36,13 +38,49 @@ struct Package<'a> { impl<'a> Package<'a> { #[rustfmt::skip] - fn hash_on_disk(&mut self, path: &Path) -> Result, Box> { + // Return Sha256 hash of data in the given path. + // If maxlen is None, a simple read to the end of the file. + // If maxlen is Some, read only until the given length. + fn hash_on_disk(&mut self, path: &Path, maxlen: Option) -> Result, Box> { use sha2::{Sha256, Digest}; - let mut file = File::open(path)?; + let file = File::open(path)?; let mut hasher = Sha256::new(); - io::copy(&mut file, &mut hasher)?; + let filelen = file.metadata().unwrap().len() as usize; + + let mut maxlen_to_read: usize = match maxlen { + Some(len) => { + if filelen < len { + filelen + } else { + len + } + } + None => filelen, + }; + + const CHUNKLEN: usize = 10485760; // 10M + + let mut freader = BufReader::new(file); + let mut chunklen: usize; + + freader.seek(SeekFrom::Start(0))?; + while maxlen_to_read > 0 { + if maxlen_to_read < CHUNKLEN { + chunklen = maxlen_to_read; + } else { + chunklen = CHUNKLEN; + } + + let mut databuf = vec![0u8; chunklen]; + + freader.read_exact(&mut databuf)?; + + maxlen_to_read -= chunklen; + + hasher.update(&databuf); + } Ok(omaha::Hash::from_bytes( hasher.finalize().into() @@ -75,7 +113,7 @@ impl<'a> Package<'a> { if size_on_disk == expected_size { info!("{}: download complete, checking hash...", path.display()); - let hash = self.hash_on_disk(&path)?; + let hash = self.hash_on_disk(&path, None)?; if self.verify_checksum(hash) { info!("{}: good hash, will continue without re-download", path.display()); } else { @@ -120,30 +158,57 @@ impl<'a> Package<'a> { } } - fn verify_signature_on_disk(&mut self, from_path: &Path, pubkey_path: &str) -> Result<(), Box> { + fn verify_signature_on_disk(&mut self, from_path: &Path, pubkey_path: &str) -> Result> { let upfile = File::open(from_path)?; + // create a BufReader to pass down to parsing functions. + let upfreader = &mut BufReader::new(upfile); + // Read update payload from file, read delta update header from the payload. - let res_data = fs::read_to_string(from_path); + let header = delta_update::read_delta_update_header(upfreader)?; - let header = delta_update::read_delta_update_header(&upfile)?; + let mut delta_archive_manifest = delta_update::get_manifest_bytes(upfreader, &header)?; // Extract signature from header. - let sigbytes = delta_update::get_signatures_bytes(&upfile, &header)?; + let sigbytes = delta_update::get_signatures_bytes(upfreader, &header, &mut delta_archive_manifest)?; + + // tmp dir == "/var/tmp/outdir/.tmp" + let tmpdirpathbuf = from_path.parent().unwrap().parent().unwrap().join(".tmp"); + let tmpdir = tmpdirpathbuf.as_path(); + let datablobspath = tmpdir.join("ue_data_blobs"); + + // Get length of header and data, including header and manifest. + let header_data_length = delta_update::get_header_data_length(&header, &delta_archive_manifest); + let hdhash = self.hash_on_disk(from_path, Some(header_data_length))?; + let hdhashvec: Vec = hdhash.into(); + + // Extract data blobs into a file, datablobspath. + delta_update::get_data_blobs(upfreader, &header, &delta_archive_manifest, datablobspath.as_path())?; - // Parse signature data from the signature containing data, version, special fields. - let _sigdata = match delta_update::parse_signature_data(res_data.unwrap().as_bytes(), &sigbytes, pubkey_path) { - Some(data) => data, + // Check for hash of data blobs with new_partition_info hash. + let pinfo_hash = match &delta_archive_manifest.new_partition_info.hash { + Some(hash) => hash, + None => return Err("unable to parse signature data".into()), + }; + + let datahash = self.hash_on_disk(datablobspath.as_path(), None)?; + if datahash != omaha::Hash::from_bytes(pinfo_hash.as_slice()[..].try_into().unwrap_or_default()) { + return Err("data hash mismatch with new_partition_info hash".into()); + } + + // Parse signature data from sig blobs, data blobs, public key, and verify. + match delta_update::parse_signature_data(&sigbytes, hdhashvec.as_slice(), pubkey_path) { + Some(_) => (), _ => { self.status = PackageStatus::BadSignature; - return Err("unable to parse signature data".into()); + return Err("unable to parse and verify signature data".into()); } }; println!("Parsed and verified signature data from file {:?}", from_path); self.status = PackageStatus::Verified; - Ok(()) + Ok(datablobspath) } } @@ -249,7 +314,9 @@ async fn main() -> Result<(), Box> { } let unverified_dir = output_dir.join(".unverified"); + let temp_dir = output_dir.join(".tmp"); fs::create_dir_all(&unverified_dir)?; + fs::create_dir_all(&temp_dir)?; //// // parse response @@ -271,17 +338,23 @@ async fn main() -> Result<(), Box> { pkg.download(&unverified_dir, &client).await?; + // Unverified payload is stored in e.g. "output_dir/.unverified/oem.gz". + // Verified payload is stored in e.g. "output_dir/oem.raw". let pkg_unverified = unverified_dir.join(&*pkg.name); - let pkg_verified = output_dir.join(&*pkg.name); + let pkg_verified = output_dir.join(pkg_unverified.with_extension("raw").file_name().unwrap_or_default()); match pkg.verify_signature_on_disk(&pkg_unverified, &args.pubkey_file) { - Ok(_) => { - // move the verified file back from unverified_dir to output_dir - fs::rename(&pkg_unverified, &pkg_verified)?; + Ok(datablobspath) => { + // write extracted data into the final data. + fs::rename(datablobspath, pkg_verified.clone())?; + debug!("data blobs written into file {:?}", pkg_verified); } _ => return Err(format!("unable to verify signature \"{}\"", pkg.name).into()), }; } + // clean up data + fs::remove_dir_all(temp_dir)?; + Ok(()) } diff --git a/test/crau_verify.rs b/test/crau_verify.rs index 28d8a3c..2c4fbca 100644 --- a/test/crau_verify.rs +++ b/test/crau_verify.rs @@ -1,8 +1,12 @@ -use std::io::Write; +use std::io; +use std::io::{BufReader, Write}; use std::error::Error; use std::fs; +use std::fs::File; +use std::path::Path; +use tempfile; -use update_format_crau::delta_update; +use update_format_crau::{delta_update, proto}; use argh::FromArgs; @@ -20,6 +24,17 @@ struct Args { sig_path: String, } +fn hash_on_disk(path: &Path) -> Result, Box> { + use sha2::{Sha256, Digest}; + + let mut file = File::open(path)?; + let mut hasher = Sha256::new(); + + io::copy(&mut file, &mut hasher)?; + + Ok(omaha::Hash::from_bytes(hasher.finalize().into())) +} + fn main() -> Result<(), Box> { let args: Args = argh::from_env(); @@ -28,15 +43,32 @@ fn main() -> Result<(), Box> { // Read update payload from srcpath, read delta update header from the payload. let upfile = fs::File::open(srcpath.clone())?; - let header = delta_update::read_delta_update_header(&upfile)?; + + let freader = &mut BufReader::new(upfile); + let header = delta_update::read_delta_update_header(freader)?; + + let mut delta_archive_manifest: proto::DeltaArchiveManifest = Default::default(); // Extract signature from header. - let sigbytes = delta_update::get_signatures_bytes(&upfile, &header)?; + let sigbytes = delta_update::get_signatures_bytes(freader, &header, &mut delta_archive_manifest)?; + + // Parse signature data from the signature containing data, version, special fields. + let tmpdir = tempfile::tempdir()?.into_path(); + fs::create_dir_all(tmpdir.clone())?; + + let headerdatapath = tmpdir.join("ue_header_data"); + + let hdhash = hash_on_disk(headerdatapath.as_path())?; + let hdhashvec: Vec = hdhash.into(); + + // Get length of header and data + let datablobspath = tmpdir.join("ue_data_blobs"); - const TESTDATA: &str = "test data for verifying signature"; + // Extract data blobs into file path. + delta_update::get_data_blobs(freader, &header, &delta_archive_manifest, datablobspath.as_path())?; // Parse signature data from the signature containing data, version, special fields. - let sigdata = match delta_update::parse_signature_data(TESTDATA.as_bytes(), &sigbytes, PUBKEY_FILE) { + let sigdata = match delta_update::parse_signature_data(&sigbytes, hdhashvec.as_slice(), PUBKEY_FILE) { Some(data) => Box::leak(data), _ => return Err("unable to parse signature data".into()), }; diff --git a/update-format-crau/Cargo.toml b/update-format-crau/Cargo.toml index b27198f..a6585e7 100644 --- a/update-format-crau/Cargo.toml +++ b/update-format-crau/Cargo.toml @@ -6,6 +6,7 @@ edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +bzip2 = "0.4.4" log = "0.4.19" protobuf = "3" rsa = { version = "0.9.2", features = ["sha2"] } diff --git a/update-format-crau/src/delta_update.rs b/update-format-crau/src/delta_update.rs index 4a95809..8fa5c37 100644 --- a/update-format-crau/src/delta_update.rs +++ b/update-format-crau/src/delta_update.rs @@ -1,7 +1,10 @@ -use std::io::{Read, Seek, SeekFrom}; +use std::io::{BufReader, Read, Seek, SeekFrom, Write}; use std::error::Error; +use std::fs; use std::fs::File; +use std::path::Path; use log::{error, debug}; +use bzip2::read::BzDecoder; use protobuf::Message; @@ -29,7 +32,7 @@ impl DeltaUpdateFileHeader { } // Read delta update header from the given file, return DeltaUpdateFileHeader. -pub fn read_delta_update_header(mut f: &File) -> Result> { +pub fn read_delta_update_header(f: &mut BufReader) -> Result> { let mut header = DeltaUpdateFileHeader { magic: [0; 4], file_format_version: 0, @@ -54,17 +57,23 @@ pub fn read_delta_update_header(mut f: &File) -> Result(mut f: &'a File, header: &'a DeltaUpdateFileHeader) -> Result, Box> { +// Take a buffer stream and DeltaUpdateFileHeader, +// return DeltaArchiveManifest that contains manifest. +pub fn get_manifest_bytes(f: &mut BufReader, header: &DeltaUpdateFileHeader) -> Result> { let manifest_bytes = { let mut buf = vec![0u8; header.manifest_size as usize]; f.read_exact(&mut buf)?; buf.into_boxed_slice() }; - let manifest = proto::DeltaArchiveManifest::parse_from_bytes(&manifest_bytes)?; + let delta_archive_manifest = proto::DeltaArchiveManifest::parse_from_bytes(&manifest_bytes)?; + + Ok(delta_archive_manifest) +} +// Take a buffer stream and DeltaUpdateFileHeader, +// return a bytes slice of the actual signature data as well as its length. +pub fn get_signatures_bytes<'a>(f: &'a mut BufReader, header: &'a DeltaUpdateFileHeader, manifest: &mut proto::DeltaArchiveManifest) -> Result, Box> { // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! // !!! signature offsets are from the END of the manifest !!! // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! @@ -85,10 +94,58 @@ pub fn get_signatures_bytes<'a>(mut f: &'a File, header: &'a DeltaUpdateFileHead Ok(signatures_bytes.unwrap()) } +// Return data length, including header and manifest. +pub fn get_header_data_length(header: &DeltaUpdateFileHeader, manifest: &proto::DeltaArchiveManifest) -> usize { + // Read from the beginning of the stream, which means the whole buffer including + // delta update header as well as manifest. That is because data that must be verified + // with signatures start from the beginning. + // + // Payload data structure: + // | header | manifest | data blobs | signatures | + + header.translate_offset(manifest.signatures_offset.unwrap()) as usize +} + +// Take a buffer reader, delta file header, manifest as input. +// Return path to data blobs, without header, manifest, or signatures. +pub fn get_data_blobs<'a>(f: &'a mut BufReader, header: &'a DeltaUpdateFileHeader, manifest: &proto::DeltaArchiveManifest, tmppath: &Path) -> Result> { + fs::create_dir_all(tmppath.parent().unwrap())?; + let mut outfile = File::create(tmppath)?; + + // Read from the beginning of header, which means buffer including only data blobs. + // It means it is necessary to call header.translate_offset(), in contrast to + // get_header_data_length. + // Iterate each partition_operations to get data offset and data length. + for pop in &manifest.partition_operations { + let data_offset = pop.data_offset.unwrap(); + let data_length = pop.data_length.unwrap(); + + let mut partdata = vec![0u8; data_length as usize]; + + f.seek(SeekFrom::Start(header.translate_offset(data_offset.into())))?; + f.read_exact(&mut partdata)?; + + // In case of bzip2-compressed chunks, extract. + if pop.type_.unwrap() == proto::install_operation::Type::REPLACE_BZ.into() { + let mut bzdecoder = BzDecoder::new(&partdata[..]); + let mut partdata_unpacked = Vec::new(); + bzdecoder.read_to_end(&mut partdata_unpacked)?; + + outfile.write_all(&partdata_unpacked)?; + } else { + outfile.write_all(&partdata)?; + } + outfile.flush()?; + } + + Ok(outfile) +} + #[rustfmt::skip] -// parse_signature_data takes a bytes slice for signature and public key file path. -// Return only actual data, without version and special fields. -pub fn parse_signature_data(testdata: &[u8], sigbytes: &[u8], pubkeyfile: &str) -> Option> { +// parse_signature_data takes bytes slices for signature and digest of data blobs, +// and path to public key, to parse and verify the signature. +// Return only actual signature data, without version and special fields. +pub fn parse_signature_data(sigbytes: &[u8], digest: &[u8], pubkeyfile: &str) -> Option> { // Signatures has a container of the fields, i.e. version, data, and // special fields. let sigmessage = match proto::Signatures::parse_from_bytes(sigbytes) { @@ -102,12 +159,13 @@ pub fn parse_signature_data(testdata: &[u8], sigbytes: &[u8], pubkeyfile: &str) // Return the first valid signature, iterate into the next slot if invalid. sigmessage.signatures.iter() .find_map(|sig| - verify_sig_pubkey(testdata, sig, pubkeyfile) + verify_sig_pubkey(digest, sig, pubkeyfile) .map(Vec::into_boxed_slice)) } -// Verify signature with public key -pub fn verify_sig_pubkey(testdata: &[u8], sig: &Signature, pubkeyfile: &str) -> Option> { +// verify_sig_pubkey verifies signature with the given digest and the public key. +// Return the verified signature data. +pub fn verify_sig_pubkey(digest: &[u8], sig: &Signature, pubkeyfile: &str) -> Option> { // The signature version is actually a numeration of the present signatures, // with the index starting at 2 if only one signature is present. // The Flatcar dev payload has only one signature but @@ -121,11 +179,12 @@ pub fn verify_sig_pubkey(testdata: &[u8], sig: &Signature, pubkeyfile: &str) -> _ => None, }; + debug!("digest: {:?}", digest); debug!("data: {:?}", sig.data()); debug!("special_fields: {:?}", sig.special_fields()); // verify signature with pubkey - let res_verify = verify_sig::verify_rsa_pkcs(testdata, sig.data(), get_public_key_pkcs_pem(pubkeyfile, KeyTypePkcs8)); + let res_verify = verify_sig::verify_rsa_pkcs_prehash(&digest, sig.data(), get_public_key_pkcs_pem(pubkeyfile, KeyTypePkcs8)); match res_verify { Ok(res_verify) => res_verify, Err(err) => { diff --git a/update-format-crau/src/verify_sig.rs b/update-format-crau/src/verify_sig.rs index c64e134..7a6ac16 100644 --- a/update-format-crau/src/verify_sig.rs +++ b/update-format-crau/src/verify_sig.rs @@ -3,6 +3,7 @@ use rsa::pkcs1::{DecodeRsaPrivateKey, DecodeRsaPublicKey}; use rsa::pkcs8::{DecodePrivateKey, DecodePublicKey}; use rsa::pkcs1v15; use rsa::signature::{SignatureEncoding, Signer, Verifier}; +use rsa::signature::hazmat::PrehashVerifier; use rsa::sha2::Sha256; use std::{fs, str}; use std::error::Error; @@ -30,7 +31,9 @@ pub fn sign_rsa_pkcs(databuf: &[u8], private_key: RsaPrivateKey) -> Result Result<(), Box> { +// As databuf is an in-memory buffer, the function has a limitation of max size +// of the input data, like a few GiB. Going over that, it could result in OOM. +pub fn verify_rsa_pkcs_buf(databuf: &[u8], signature: &[u8], public_key: RsaPublicKey) -> Result<(), Box> { // Equivalent of: // openssl rsautl -verify -pubin -key |public_key_path| // - in |sig_data| -out |out_hash_data| @@ -40,6 +43,18 @@ pub fn verify_rsa_pkcs(databuf: &[u8], signature: &[u8], public_key: RsaPublicKe Ok(verifying_key.verify(databuf, &pkcs1v15::Signature::try_from(signature).unwrap())?) } +// Takes a data buffer, signature and a public key, to verify the data +// with the public key. +// In contrast to verify_rsa_pkcs_buf, the function takes a digest of an input +// buffer, so it does not have a limitation of max size of input data. +// It relies on RSA PrehashVerifier. +// TODO: consider migrating to RSA DigestVerifier. +pub fn verify_rsa_pkcs_prehash(digestbuf: &[u8], signature: &[u8], public_key: RsaPublicKey) -> Result<(), Box> { + let verifying_key = pkcs1v15::VerifyingKey::::new(public_key); + + Ok(verifying_key.verify_prehash(digestbuf, &pkcs1v15::Signature::try_from(signature).unwrap())?) +} + pub fn get_private_key_pkcs_pem(private_key_path: &str, key_type: KeyType) -> RsaPrivateKey { let private_key_buf = fs::read_to_string(private_key_path).unwrap(); let out_key = match key_type { @@ -92,7 +107,7 @@ mod tests { panic!("failed to sign data: {:?}", error); }); - _ = verify_rsa_pkcs( + _ = verify_rsa_pkcs_buf( TESTDATA.as_bytes(), signature.as_slice(), get_public_key_pkcs_pem(PUBKEY_PKCS1_PATH, KeyTypePkcs1), @@ -106,7 +121,7 @@ mod tests { panic!("failed to sign data: {:?}", error); }); - _ = verify_rsa_pkcs( + _ = verify_rsa_pkcs_buf( TESTDATA.as_bytes(), signature.as_slice(), get_public_key_pkcs_pem(PUBKEY_PKCS8_PATH, KeyTypePkcs8),