nardl: unpack
This commit is contained in:
parent
279dea8167
commit
af8b7b2384
|
@ -4,11 +4,12 @@ use ed25519_dalek::{Signature, VerifyingKey};
|
||||||
use sha2::Digest;
|
use sha2::Digest;
|
||||||
use std::{
|
use std::{
|
||||||
collections::{HashMap, HashSet},
|
collections::{HashMap, HashSet},
|
||||||
|
io::{BufReader, Read, Seek},
|
||||||
os::unix::fs::MetadataExt,
|
os::unix::fs::MetadataExt,
|
||||||
path::Path,
|
path::Path,
|
||||||
pin::Pin,
|
pin::Pin,
|
||||||
};
|
};
|
||||||
use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeekExt};
|
use tokio::io::AsyncRead;
|
||||||
use tokio_stream::StreamExt;
|
use tokio_stream::StreamExt;
|
||||||
use tokio_util::io::StreamReader;
|
use tokio_util::io::StreamReader;
|
||||||
|
|
||||||
|
@ -22,7 +23,7 @@ const KEYS: [&str; 2] = [
|
||||||
|
|
||||||
const OUTPUT: &str = "/nix/store/n50jk09x9hshwx1lh6k3qaiygc7yxbv9-lix-2.90.0-rc1";
|
const OUTPUT: &str = "/nix/store/n50jk09x9hshwx1lh6k3qaiygc7yxbv9-lix-2.90.0-rc1";
|
||||||
|
|
||||||
const TEST_PREFIX: &str = "/tmp/nardl";
|
const TEST_PREFIX: &str = "/tmp/nardl/out";
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> eyre::Result<()> {
|
async fn main() -> eyre::Result<()> {
|
||||||
|
@ -63,7 +64,8 @@ async fn main() -> eyre::Result<()> {
|
||||||
})
|
})
|
||||||
.collect::<eyre::Result<HashMap<&str, VerifyingKey>>>()?;
|
.collect::<eyre::Result<HashMap<&str, VerifyingKey>>>()?;
|
||||||
|
|
||||||
let temp_dir = async_tempfile::TempDir::new().await?;
|
// let temp_dir = async_tempfile::TempDir::new().await?;
|
||||||
|
let temp_dir = Path::new("/tmp/nardl");
|
||||||
|
|
||||||
let client = reqwest::Client::new();
|
let client = reqwest::Client::new();
|
||||||
|
|
||||||
|
@ -102,7 +104,12 @@ async fn main() -> eyre::Result<()> {
|
||||||
let Some(output) = outputs_remaining.pop() else {
|
let Some(output) = outputs_remaining.pop() else {
|
||||||
break;
|
break;
|
||||||
};
|
};
|
||||||
outputs_done.insert(output.clone());
|
|
||||||
|
// If two outputs refer to the same reference before that reference is done
|
||||||
|
// then we can end up with it in the list twice, even with outputs_done filtering
|
||||||
|
if !outputs_done.insert(output.clone()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
log::debug!("Requesting output {}", output);
|
log::debug!("Requesting output {}", output);
|
||||||
|
|
||||||
|
@ -115,13 +122,16 @@ async fn main() -> eyre::Result<()> {
|
||||||
get_narinfo(client.clone(), cache_base_urls.as_slice(), fingerprint)
|
get_narinfo(client.clone(), cache_base_urls.as_slice(), fingerprint)
|
||||||
.await
|
.await
|
||||||
.wrap_err_with(|| format!("While processing {}", output))?;
|
.wrap_err_with(|| format!("While processing {}", output))?;
|
||||||
let narinfo_parsed = narinfo::NarInfo::parse(&narinfo_text).unwrap();
|
// fuck it, i'm too tired to do it properly
|
||||||
|
let narinfo_parsed =
|
||||||
|
narinfo::NarInfo::parse(Box::leak(narinfo_text.into_boxed_str())).unwrap();
|
||||||
|
|
||||||
verify_signature(&narinfo_parsed, &trusted_keys, store_dir, &output)
|
verify_signature(&narinfo_parsed, &trusted_keys, store_dir, &output)
|
||||||
.wrap_err_with(|| format!("While processing {}", output))?;
|
.wrap_err_with(|| format!("While processing {}", output))?;
|
||||||
|
|
||||||
for reference in narinfo_parsed.references.iter() {
|
for reference in narinfo_parsed.references.iter() {
|
||||||
if reference.is_empty() || outputs_done.contains(reference.as_ref()) {
|
if reference.is_empty() || outputs_done.contains(reference.as_ref()) {
|
||||||
|
log::trace!("skipping reference from {} to {}", output, reference);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
outputs_remaining.push(reference.to_string());
|
outputs_remaining.push(reference.to_string());
|
||||||
|
@ -129,7 +139,6 @@ async fn main() -> eyre::Result<()> {
|
||||||
|
|
||||||
// Download nar
|
// Download nar
|
||||||
let nar_url = cache_base_url.join(narinfo_parsed.url)?;
|
let nar_url = cache_base_url.join(narinfo_parsed.url)?;
|
||||||
log::trace!("Found nar url {}", nar_url);
|
|
||||||
|
|
||||||
let response = client_no_compression
|
let response = client_no_compression
|
||||||
.get(nar_url)
|
.get(nar_url)
|
||||||
|
@ -165,41 +174,52 @@ async fn main() -> eyre::Result<()> {
|
||||||
.await?;
|
.await?;
|
||||||
tokio::io::copy(&mut decompressed_stream, &mut out_file).await?;
|
tokio::io::copy(&mut decompressed_stream, &mut out_file).await?;
|
||||||
|
|
||||||
out_file.seek(std::io::SeekFrom::Start(0)).await?;
|
let mut out_file_blocking = out_file.into_std().await;
|
||||||
|
|
||||||
// Verify nar
|
tokio::task::spawn_blocking(move || {
|
||||||
let found_size = out_file.metadata().await?.size();
|
// Verify nar
|
||||||
if found_size != narinfo_parsed.nar_size as u64 {
|
let found_size = out_file_blocking.metadata()?.size();
|
||||||
eyre::bail!("Wrong nar size for {}", output);
|
if found_size != narinfo_parsed.nar_size as u64 {
|
||||||
}
|
eyre::bail!("Wrong nar size for {}", output);
|
||||||
|
|
||||||
let (hash_algorithm, hash_expected) = narinfo_parsed
|
|
||||||
.nar_hash
|
|
||||||
.as_ref()
|
|
||||||
.split_once(':')
|
|
||||||
.ok_or_eyre("Invalid hash in nar")?;
|
|
||||||
|
|
||||||
if hash_algorithm != "sha256" {
|
|
||||||
eyre::bail!("who is using hashes other than sha256????");
|
|
||||||
}
|
|
||||||
|
|
||||||
log::trace!("expected hash: {}", hash_expected);
|
|
||||||
|
|
||||||
let mut buf = [0u8; 1024];
|
|
||||||
let mut hasher = sha2::Sha256::new();
|
|
||||||
loop {
|
|
||||||
let num_read = out_file.read(&mut buf).await?;
|
|
||||||
if num_read == 0 {
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
hasher.update(&buf[0..num_read]);
|
|
||||||
}
|
|
||||||
|
|
||||||
let hash_found = hasher.finalize();
|
let (hash_algorithm, hash_expected) = narinfo_parsed
|
||||||
|
.nar_hash
|
||||||
|
.as_ref()
|
||||||
|
.split_once(':')
|
||||||
|
.ok_or_eyre("Invalid hash in nar")?;
|
||||||
|
|
||||||
if nix_base32::to_nix_base32(hash_found.as_ref()) != hash_expected {
|
if hash_algorithm != "sha256" {
|
||||||
eyre::bail!("Incorrect hash when downloading {}", output)
|
eyre::bail!("who is using hashes other than sha256????");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
out_file_blocking.seek(std::io::SeekFrom::Start(0))?;
|
||||||
|
let mut buf = [0u8; 1024];
|
||||||
|
let mut hasher = sha2::Sha256::new();
|
||||||
|
loop {
|
||||||
|
let num_read = out_file_blocking.read(&mut buf)?;
|
||||||
|
if num_read == 0 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
hasher.update(&buf[0..num_read]);
|
||||||
|
}
|
||||||
|
|
||||||
|
let hash_found = hasher.finalize();
|
||||||
|
|
||||||
|
if nix_base32::to_nix_base32(hash_found.as_ref()) != hash_expected {
|
||||||
|
eyre::bail!("Incorrect hash when downloading {}", output)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unpack nar
|
||||||
|
out_file_blocking.seek(std::io::SeekFrom::Start(0))?;
|
||||||
|
let reader = BufReader::new(out_file_blocking);
|
||||||
|
let decoder = nix_nar::Decoder::new(reader)?;
|
||||||
|
|
||||||
|
decoder.unpack(Path::new(TEST_PREFIX).join(output))?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
.await??;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -279,7 +299,6 @@ fn verify_signature(
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
.join(",")
|
.join(",")
|
||||||
);
|
);
|
||||||
log::trace!("narinfo fingerprint: `{}`", fingerprint);
|
|
||||||
|
|
||||||
key.verify_strict(fingerprint.as_bytes(), &signature)
|
key.verify_strict(fingerprint.as_bytes(), &signature)
|
||||||
.wrap_err("Invalid signature")?;
|
.wrap_err("Invalid signature")?;
|
||||||
|
|
Loading…
Reference in a new issue