From d4ae0a6d05d6bb583c78b3e3f96b410f81fe76c3 Mon Sep 17 00:00:00 2001 From: Matt Briggs Date: Wed, 24 Jun 2020 09:40:51 -0700 Subject: [PATCH 1/2] migrator: rename main.rs to run.rs --- sources/api/migration/migrator/src/{main.rs => run.rs} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename sources/api/migration/migrator/src/{main.rs => run.rs} (100%) diff --git a/sources/api/migration/migrator/src/main.rs b/sources/api/migration/migrator/src/run.rs similarity index 100% rename from sources/api/migration/migrator/src/main.rs rename to sources/api/migration/migrator/src/run.rs From 9fcfb47bc79762c1cd63f86289bd81fef5fb3a9c Mon Sep 17 00:00:00 2001 From: Matt Briggs Date: Wed, 24 Jun 2020 09:42:22 -0700 Subject: [PATCH 2/2] migrator: end-to-end test --- sources/Cargo.lock | 8 +- sources/api/migration/migrator/Cargo.toml | 6 +- .../migrator/src/{run.rs => main.rs} | 6 +- sources/api/migration/migrator/src/test.rs | 248 ++++++++++++++++++ .../migrator/tests/data/expired-root.json | 50 ++++ .../migrator/tests/data/snakeoil.pem | 28 ++ sources/api/storewolf/Cargo.toml | 8 + sources/api/storewolf/src/lib.rs | 110 ++++++++ sources/api/storewolf/src/main.rs | 87 +----- 9 files changed, 462 insertions(+), 89 deletions(-) rename sources/api/migration/migrator/src/{run.rs => main.rs} (99%) create mode 100644 sources/api/migration/migrator/src/test.rs create mode 100644 sources/api/migration/migrator/tests/data/expired-root.json create mode 100644 sources/api/migration/migrator/tests/data/snakeoil.pem create mode 100644 sources/api/storewolf/src/lib.rs diff --git a/sources/Cargo.lock b/sources/Cargo.lock index eb75848ac01..8316f264918 100644 --- a/sources/Cargo.lock +++ b/sources/Cargo.lock @@ -1433,16 +1433,18 @@ version = "0.1.0" dependencies = [ "bottlerocket-release 0.1.0", "cargo-readme 3.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "chrono 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "lz4 1.23.1 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)", - "pentacle 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "pentacle 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", "regex 1.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "simplelog 0.7.5 (registry+https://github.com/rust-lang/crates.io-index)", "snafu 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", + "storewolf 0.1.0", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "tough 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "update_metadata 0.1.0", @@ -1732,7 +1734,7 @@ dependencies = [ [[package]] name = "pentacle" -version = "0.1.1" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "libc 0.2.70 (registry+https://github.com/rust-lang/crates.io-index)", @@ -3227,7 +3229,7 @@ dependencies = [ "checksum parking_lot 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "92e98c49ab0b7ce5b222f2cc9193fc4efe11c6d0bd4f648e374684a6857b1cfc" "checksum parking_lot_core 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7582838484df45743c8434fbff785e8edf260c28748353d44bc0da32e0ceabf1" "checksum pem 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a1581760c757a756a41f0ee3ff01256227bdf64cb752839779b95ffb01c59793" -"checksum pentacle 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a68bcd5f6f87e5afcae276a85b759446e3dddaceedde1583b85f1a3e32dbe638" +"checksum pentacle 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "be3c5f1aadd60af94ddf9b279079a0a1fbec1e59781e23ce31671c02a4230f86" "checksum percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" "checksum pest 2.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53" "checksum pest_derive 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "833d1ae558dc601e9a60366421196a8d94bc0ac980476d0b67e1d0988d72b2d0" diff --git a/sources/api/migration/migrator/Cargo.toml b/sources/api/migration/migrator/Cargo.toml index 3665b293c9a..c3557a0c772 100644 --- a/sources/api/migration/migrator/Cargo.toml +++ b/sources/api/migration/migrator/Cargo.toml @@ -13,7 +13,7 @@ lazy_static = "1.2" log = "0.4" lz4 = "1.23.1" nix = "0.17" -pentacle = "0.1.1" +pentacle = "0.2.0" rand = { version = "0.7", default-features = false, features = ["std"] } regex = "1.1" semver = "0.9" @@ -27,6 +27,10 @@ url = "2.1.1" [build-dependencies] cargo-readme = "3.1" +[dev-dependencies] +chrono = "0.4.11" +storewolf = { path = "../../storewolf" } + [[bin]] name = "migrator" path = "src/main.rs" diff --git a/sources/api/migration/migrator/src/run.rs b/sources/api/migration/migrator/src/main.rs similarity index 99% rename from sources/api/migration/migrator/src/run.rs rename to sources/api/migration/migrator/src/main.rs index 7ebb3302d8f..9abec9ebc72 100644 --- a/sources/api/migration/migrator/src/run.rs +++ b/sources/api/migration/migrator/src/main.rs @@ -46,6 +46,8 @@ use update_metadata::{load_manifest, MIGRATION_FILENAME_RE}; mod args; mod direction; mod error; +#[cfg(test)] +mod test; lazy_static! { /// This is the last version of Bottlerocket that supports *only* unsigned migrations. @@ -131,7 +133,7 @@ where Version::parse(version_str).context(error::InvalidDataStoreVersion { path: &patch }) } -fn run(args: &Args) -> Result<()> { +pub(crate) fn run(args: &Args) -> Result<()> { // Get the directory we're working in. let datastore_dir = args .datastore_path @@ -782,7 +784,7 @@ where // =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= =^..^= #[cfg(test)] -mod test { +mod main_test { use super::*; #[test] diff --git a/sources/api/migration/migrator/src/test.rs b/sources/api/migration/migrator/src/test.rs new file mode 100644 index 00000000000..ecfc7edfb8a --- /dev/null +++ b/sources/api/migration/migrator/src/test.rs @@ -0,0 +1,248 @@ +//! Provides an end-to-end test of `migrator` via the `run` function. This module is conditionally +//! compiled for cfg(test) only. +use crate::args::Args; +use crate::run; +use chrono::{DateTime, Utc}; +use semver::Version; +use std::fs; +use std::fs::File; +use std::io::Write; +use std::path::{Path, PathBuf}; +use tempfile::TempDir; + +/// Provides the path to a folder where test data files reside. +fn test_data() -> PathBuf { + let mut p = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + p.pop(); + p.join("migrator").join("tests").join("data") +} + +/// Returns the filepath to a `root.json` file stored in tree for testing. This file declares +/// an expiration date of `1970-01-01` to ensure success with an expired TUF repository. +fn root() -> PathBuf { + test_data() + .join("expired-root.json") + .canonicalize() + .unwrap() +} + +/// Returns the filepath to a private key, stored in tree and used only for testing. +fn pem() -> PathBuf { + test_data().join("snakeoil.pem").canonicalize().unwrap() +} + +/// The name of a test migration. The prefix `b-` ensures we are not alphabetically sorting. +const FIRST_MIGRATION: &str = "b-first-migration"; + +/// The name of a test migration. The prefix `a-` ensures we are not alphabetically sorting. +const SECOND_MIGRATION: &str = "a-second-migration"; + +/// Creates a script that will serve as a migration during testing. The script writes its migrations +/// name to a file named `result.txt` in the parent directory of the datastore. `pentacle` does not +/// retain the name of the executing binary or script, so we take the `migration_name` as input, +/// and 'hardcode' it into the script. +fn create_test_migration>(migration_name: S) -> String { + format!( + r#"#!/usr/bin/env bash +set -eo pipefail +migration_name="{}" +datastore_parent_dir="$(dirname "${{3}}")" +outfile="${{datastore_parent_dir}}/result.txt" +echo "${{migration_name}}:" "${{@}}" >> "${{outfile}}" +"#, + migration_name.as_ref() + ) +} + +/// Holds the lifetime of a `TempDir` inside which a datastore directory and links are held for +/// testing. +struct TestDatastore { + tmp: TempDir, + datastore: PathBuf, +} + +impl TestDatastore { + /// Creates a `TempDir`, sets up the datastore links needed to represent the `from_version` + /// and returns a `TestDatastore` populated with this information. + fn new(from_version: Version) -> Self { + let tmp = TempDir::new().unwrap(); + let datastore = storewolf::create_new_datastore(tmp.path(), Some(from_version)).unwrap(); + TestDatastore { tmp, datastore } + } +} + +/// Represents a TUF repository, which is held in a tempdir. +struct TestRepo { + /// This field preserves the lifetime of the TempDir even though we never read it. When + /// `TestRepo` goes out of scope, `TempDir` will remove the temporary directory. + _tuf_dir: TempDir, + metadata_path: PathBuf, + targets_path: PathBuf, +} + +/// LZ4 compresses `source` bytes to a new file at `destination`. +fn compress(source: &[u8], destination: &Path) { + let output_file = File::create(destination).unwrap(); + let mut encoder = lz4::EncoderBuilder::new() + .level(4) + .build(output_file) + .unwrap(); + encoder.write_all(source).unwrap(); + let (_output, result) = encoder.finish(); + result.unwrap() +} + +/// Creates a test repository with a couple of versions defined in the manifest and a couple of +/// migrations. See the test description for for more info. +fn create_test_repo() -> TestRepo { + // This is where the signed TUF repo will exist when we are done. It is the + // root directory of the `TestRepo` we will return when we are done. + let test_repo_dir = TempDir::new().unwrap(); + let metadata_path = test_repo_dir.path().join("metadata"); + let targets_path = test_repo_dir.path().join("targets"); + + // This is where we will stage the TUF repository targets prior to signing them. We are using + // symlinks from `tuf_indir` to `tuf_outdir/targets` so we keep both in the same `TempDir`. + let tuf_indir = test_repo_dir.path(); + + // Create a Manifest and save it to the tuftool_indir for signing. + let mut manifest = update_metadata::Manifest::default(); + // insert the following migrations to the manifest. note that the first migration would sort + // later than the second migration alphabetically. this is to help ensure that migrations + // are running in their listed order (rather than sorted order as in previous + // implementations). + manifest.migrations.insert( + (Version::new(0, 99, 0), Version::new(0, 99, 1)), + vec![FIRST_MIGRATION.into(), SECOND_MIGRATION.into()], + ); + update_metadata::write_file(tuf_indir.join("manifest.json").as_path(), &manifest).unwrap(); + + // Create an script that we can use as the 'migration' that migrator will run. This script will + // write its name and arguments to a file named result.txt in the directory that is the parent + // of --source-datastore. result.txt can then be used to see what migrations ran, and in what + // order. Note that tests are sensitive to the order and number of arguments passed. If + // --source-datastore is given at a different position then the tests will fail and the script + // will need to be updated. + let migration_a = create_test_migration(FIRST_MIGRATION); + let migration_b = create_test_migration(SECOND_MIGRATION); + + // Save lz4 compressed copies of the migration script into the tuftool_indir. + compress(migration_a.as_bytes(), &tuf_indir.join(FIRST_MIGRATION)); + compress(migration_b.as_bytes(), &tuf_indir.join(SECOND_MIGRATION)); + + // Create and sign the TUF repository. + let mut editor = tough::editor::RepositoryEditor::new(root()).unwrap(); + let long_ago: DateTime = DateTime::parse_from_rfc3339("1970-01-01T00:00:00Z") + .unwrap() + .into(); + let one = std::num::NonZeroU64::new(1).unwrap(); + editor + .targets_version(one) + .targets_expires(long_ago) + .snapshot_version(one) + .snapshot_expires(long_ago) + .timestamp_version(one) + .timestamp_expires(long_ago); + + fs::read_dir(tuf_indir) + .unwrap() + .filter(|dir_entry_result| { + if let Ok(dir_entry) = dir_entry_result { + return dir_entry.path().is_file(); + } + false + }) + .for_each(|dir_entry_result| { + let dir_entry = dir_entry_result.unwrap(); + editor.add_target( + dir_entry.file_name().to_str().unwrap().into(), + tough::schema::Target::from_path(dir_entry.path()).unwrap(), + ); + }); + let signed_repo = editor + .sign(&[Box::new(tough::key_source::LocalKeySource { path: pem() })]) + .unwrap(); + signed_repo.link_targets(tuf_indir, &targets_path).unwrap(); + signed_repo.write(&metadata_path).unwrap(); + + TestRepo { + _tuf_dir: test_repo_dir, + metadata_path, + targets_path, + } +} + +/// Tests the migrator program end-to-end using the `run` function. Creates a TUF repo in a +/// tempdir which includes a `manifest.json` with a couple of migrations: +/// ``` +/// "(0.99.0, 0.99.1)": [ +/// "b-first-migration", +/// "a-second-migration" +/// ] +/// ``` +/// +/// The two 'migrations' are instances of the same bash script (see `create_test_repo`) which +/// writes its name (i.e. the migration name) and its arguments to a file at `./result.txt` +/// (i.e. since migrations run in the context of the datastore directory, `result.txt` is +/// written one directory above the datastore.) We can then inspect the contents of `result.txt` +/// to see that the expected migrations ran in the correct order. +#[test] +fn migrate_forward() { + let from_version = Version::parse("0.99.0").unwrap(); + let to_version = Version::parse("0.99.1").unwrap(); + let test_datastore = TestDatastore::new(from_version); + let test_repo = create_test_repo(); + let args = Args { + datastore_path: test_datastore.datastore.clone(), + log_level: log::LevelFilter::Info, + migration_directory: test_repo.targets_path.clone(), + migrate_to_version: to_version, + root_path: root(), + metadata_directory: test_repo.metadata_path.clone(), + }; + run(&args).unwrap(); + // the migrations should write to a file named result.txt. + let output_file = test_datastore.tmp.path().join("result.txt"); + let contents = std::fs::read_to_string(&output_file).unwrap(); + let lines: Vec<&str> = contents.split('\n').collect(); + assert_eq!(lines.len(), 3); + let first_line = *lines.get(0).unwrap(); + let want = format!("{}: --forward", FIRST_MIGRATION); + let got: String = first_line.chars().take(want.len()).collect(); + assert_eq!(got, want); + let second_line = *lines.get(1).unwrap(); + let want = format!("{}: --forward", SECOND_MIGRATION); + let got: String = second_line.chars().take(want.len()).collect(); + assert_eq!(got, want); +} + +/// This test ensures that migrations run when migrating from a newer to an older version. +/// See `migrate_forward` for a description of how these tests work. +#[test] +fn migrate_backward() { + let from_version = Version::parse("0.99.1").unwrap(); + let to_version = Version::parse("0.99.0").unwrap(); + let test_datastore = TestDatastore::new(from_version); + let test_repo = create_test_repo(); + let args = Args { + datastore_path: test_datastore.datastore.clone(), + log_level: log::LevelFilter::Info, + migration_directory: test_repo.targets_path.clone(), + migrate_to_version: to_version, + root_path: root(), + metadata_directory: test_repo.metadata_path.clone(), + }; + run(&args).unwrap(); + let output_file = test_datastore.tmp.path().join("result.txt"); + let contents = std::fs::read_to_string(&output_file).unwrap(); + let lines: Vec<&str> = contents.split('\n').collect(); + assert_eq!(lines.len(), 3); + let first_line = *lines.get(0).unwrap(); + let want = format!("{}: --backward", SECOND_MIGRATION); + let got: String = first_line.chars().take(want.len()).collect(); + assert_eq!(got, want); + let second_line = *lines.get(1).unwrap(); + let want = format!("{}: --backward", FIRST_MIGRATION); + let got: String = second_line.chars().take(want.len()).collect(); + assert_eq!(got, want); +} diff --git a/sources/api/migration/migrator/tests/data/expired-root.json b/sources/api/migration/migrator/tests/data/expired-root.json new file mode 100644 index 00000000000..3d2b3d788c8 --- /dev/null +++ b/sources/api/migration/migrator/tests/data/expired-root.json @@ -0,0 +1,50 @@ +{ + "signed": { + "_type": "root", + "spec_version": "1.0.0", + "consistent_snapshot": true, + "version": 1, + "expires": "1970-01-01T00:00:00Z", + "keys": { + "febb06e5853878c3b2447c5100d327ebcf0807832c942f5e93ab28e0e4644684": { + "keytype": "rsa", + "keyval": { + "public": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA7LU2gyhDDc7jglt2h3+q\n3+pHUprpe5hX2W4yE8NlM3U7EQRjiyd9doyXGAanBMd8IyqS3Q2ehuo2TZ5aVUFh\n+s/ZboEj+VMNPwPYhRv4QnNT79/kFsA5z0jMDFxCr3+IT2NFJv9GV+83PFVrvTZX\nNeqIZiAT/EJDENn7wS8p8G+eC/XkUcyA5kWxHXDdBgs+Xd+nXkh2v/8/lFKDJ+A4\nZlF9cIuAiWB7vNRMg29bhsLreD3F73O7iJCaFfg3I9EpofVUWWNZg4VM6Mmjksav\nFneTgjXTN9wPnNTjCBrUGwChLklBtInm+9C5iIfEoysqKZSwjeF9gchOANBlu7PD\nxwIDAQAB\n-----END PUBLIC KEY-----" + }, + "scheme": "rsassa-pss-sha256" + } + }, + "roles": { + "targets": { + "keyids": [ + "febb06e5853878c3b2447c5100d327ebcf0807832c942f5e93ab28e0e4644684" + ], + "threshold": 1 + }, + "snapshot": { + "keyids": [ + "febb06e5853878c3b2447c5100d327ebcf0807832c942f5e93ab28e0e4644684" + ], + "threshold": 1 + }, + "timestamp": { + "keyids": [ + "febb06e5853878c3b2447c5100d327ebcf0807832c942f5e93ab28e0e4644684" + ], + "threshold": 1 + }, + "root": { + "keyids": [ + "febb06e5853878c3b2447c5100d327ebcf0807832c942f5e93ab28e0e4644684" + ], + "threshold": 1 + } + } + }, + "signatures": [ + { + "keyid": "febb06e5853878c3b2447c5100d327ebcf0807832c942f5e93ab28e0e4644684", + "sig": "4ed06d6bd1b8cc145c2a872e6705f37038f52534c01d4f52c7bd0e520aa46bfb83ee1987fdbbeb415b3d42ed6f85abed640d9cb4e403a20f56a3d6661b00a174411b927cb064e214632f0bb5d7b1b2319d8064cedb58ae1abc68908ad8e6ce2c451b0d3aafbff3700d6cd74517ccf10f5f00ee0eb16eb4272afc3a9021ff9be8b4e00a69b24607039a8230803eb537293ce6b244d77cd58db512af7ee0a976612a7498f1b31c7e5918925ca3846e5d7f419e9d5825af16290a36eb1b8465de73b8bc1bbaf2e1ae0f7eeb6999fa06f09bee19cd30d8c6848c08d33970e66d8f49704e41c4f2c933be3a77a8a949309cdcdbd7c2262ca89243aff0b5e450e45d64" + } + ] +} diff --git a/sources/api/migration/migrator/tests/data/snakeoil.pem b/sources/api/migration/migrator/tests/data/snakeoil.pem new file mode 100644 index 00000000000..859872e3ba9 --- /dev/null +++ b/sources/api/migration/migrator/tests/data/snakeoil.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDstTaDKEMNzuOC +W3aHf6rf6kdSmul7mFfZbjITw2UzdTsRBGOLJ312jJcYBqcEx3wjKpLdDZ6G6jZN +nlpVQWH6z9lugSP5Uw0/A9iFG/hCc1Pv3+QWwDnPSMwMXEKvf4hPY0Um/0ZX7zc8 +VWu9Nlc16ohmIBP8QkMQ2fvBLynwb54L9eRRzIDmRbEdcN0GCz5d36deSHa//z+U +UoMn4DhmUX1wi4CJYHu81EyDb1uGwut4PcXvc7uIkJoV+Dcj0Smh9VRZY1mDhUzo +yaOSxq8Wd5OCNdM33A+c1OMIGtQbAKEuSUG0ieb70LmIh8SjKyoplLCN4X2ByE4A +0GW7s8PHAgMBAAECggEAMy/Mpfgi5+2BTUMyEoMNFHa5DafqLYv4E1C9C4EJdWM4 +bqmgTQsxGl20nsFxhJsOwRwHvj2ftlUXgRlpN154LaCcyESs3tGS37mZLM+rk9MU +p56m2DShS63WUiXxVboiY+h3n8M8Fu/n5egi8Du8CDC7ZjoxmC9MCWzaACCO1Ssj +XPLGRX2MUkuxfkKSID45yeOHYvQ1qJuan9rw/FwihRIAfvXaMWeU4rZE9GLDpkP/ +nbD+ZCW1l47D6vELwqCwYN1NIMvhS/pDq+a1iDqqLrQmj83Mb7lL4prFEktygv3+ +4rq+UBLmLero2osn/AUsfyvEIIR/3VwqvGrkVGM+IQKBgQD/U6DR2GgxuKcEEk2H +cH0UWfv8eqLUhEroFkK/qTh4kmZPDKsyU5HQr3rnzfajoRPeH8wQf0yOEkR63yIZ +EmD8yYvD87uBYjJFSOdaG+QdxyvnLOXRTOQ98vQxe8kycshicjkHGqSK3YkWCZLq +PUbMAM7vF9yJgha/zsZbP9QGEwKBgQDtVQPfGJK8k0hVuVgS14SW+VD3b1LReSu4 +nPVgFZN3QwmlBAYAeiDs6EwtPb/2FxzYkZZEsw86znEyw7dutfR+CDcFjpspa3dO +5rkl3J3htHugs3RY9TJszBQlIxsi8yOeUKmuFhSlcUgFqhe06Zefnb+y7r8TUu5R +/xHnPIOR/QKBgQDUsJRZ2fzJ1nkCUkDVHyaCiob3hi0X/O/tL4YA5HUPM10YYoRF +0wm3nkYyzQa0vdyv/yrdXz2lZ6A1/qQH4KB8U2QjImjC2cronldiX7OB/sUauPLk +jDnhRP6YLfanGKjGIUJSIaaJqwukGOuSRuMv00iOMQFcZsqOky/3GU+EkwKBgEeR +kohNTbqmI3MOYFvWN/qbFmp3XJ8kjiWu12PxPbYCNj9X1kOAkLtAVfBypZedYqqA +RLNU1+vg40vurzsojaEP8x+u4GV+Lc1SUFT1kXrFxV4xn41P67PZWt3UF1PTxFLy +eiiwKeu2HITwxRNeOcX9eRiJXH+ES6/YXlq/6eE1AoGBANbX5R1Rq4GbKbheCYwT +nvqtRinblPNbVJI/Fnnfk00fwPglDBBqcd+mEoZhhk4LO/HCaP/NxmyA/WdMDjtD +QO/i//ZpJb9O2xyf6AFID8DcAfsqZC3bxGlgNvlB+4ooIZZ9HQjDV2pKXXB/iYLh +P/am5DyfozybPqkQtj+czcIX +-----END PRIVATE KEY----- diff --git a/sources/api/storewolf/Cargo.toml b/sources/api/storewolf/Cargo.toml index 30e85ef76ca..8df1fc455e6 100644 --- a/sources/api/storewolf/Cargo.toml +++ b/sources/api/storewolf/Cargo.toml @@ -20,3 +20,11 @@ toml = "0.5" [build-dependencies] cargo-readme = "3.1" + +[[bin]] +name = "storewolf" +path = "src/main.rs" + +[lib] +name = "storewolf" +path = "src/lib.rs" diff --git a/sources/api/storewolf/src/lib.rs b/sources/api/storewolf/src/lib.rs new file mode 100644 index 00000000000..67dfbf9d733 --- /dev/null +++ b/sources/api/storewolf/src/lib.rs @@ -0,0 +1,110 @@ +#![deny(rust_2018_idioms)] + +use bottlerocket_release::BottlerocketRelease; +use rand::distributions::Alphanumeric; +use rand::{thread_rng, Rng}; +use semver::Version; +use snafu::ResultExt; +use std::fs; +use std::os::unix::fs::symlink; +use std::path::{Path, PathBuf}; + +pub mod error { + use std::io; + use std::path::PathBuf; + + use snafu::Snafu; + + /// Public error type for `libstorewolf` + #[derive(Debug, Snafu)] + #[snafu(visibility = "pub")] + pub enum Error { + #[snafu(display("Unable to create directory at '{}': {}", path.display(), source))] + DirectoryCreation { path: PathBuf, source: io::Error }, + + #[snafu(display("Failed to create symlink at '{}': {}", path.display(), source))] + LinkCreate { path: PathBuf, source: io::Error }, + + #[snafu(display("Unable to get OS version: {}", source))] + ReleaseVersion { source: bottlerocket_release::Error }, + } +} + +type Result = std::result::Result; + +/// Given a base path, create a brand new datastore with the appropriate +/// symlink structure for the desired datastore version. +/// +/// If `version` is given, uses it, otherwise pulls version from bottlerocket-release. +/// +/// An example setup for theoretical version 1.5: +/// /path/to/datastore/current +/// -> /path/to/datastore/v1 +/// -> /path/to/datastore/v1.5 +/// -> /path/to/datastore/v1.5.2 +/// -> /path/to/datastore/v1.5.2_0123456789abcdef +/// +/// Returns the path to the datastore (i.e. the last path in the above example). +pub fn create_new_datastore>( + base_path: P, + version: Option, +) -> Result { + let version = match version { + Some(v) => v, + None => { + let br = BottlerocketRelease::new().context(error::ReleaseVersion)?; + br.version_id + } + }; + + // Create random string to append to the end of the new datastore path + let random_id: String = thread_rng().sample_iter(&Alphanumeric).take(16).collect(); + + // Build the various paths to which we'll symlink + + // /path/to/datastore/v1 + let major_version_filename = format!("v{}", version.major); + let major_version_path = base_path.as_ref().join(&major_version_filename); + + // /path/to/datastore/v1.5 + let minor_version_filename = format!("v{}.{}", version.major, version.minor); + let minor_version_path = base_path.as_ref().join(&minor_version_filename); + + // /path/to/datastore/v1.5.2 + let patch_version_filename = format!("v{}.{}.{}", version.major, version.minor, version.patch); + let patch_version_path = base_path.as_ref().join(&patch_version_filename); + + // /path/to/datastore/v1.5_0123456789abcdef + let data_store_filename = format!( + "v{}.{}.{}_{}", + version.major, version.minor, version.patch, random_id + ); + let data_store_path = base_path.as_ref().join(&data_store_filename); + + // /path/to/datastore/current + let current_path = base_path.as_ref().join("current"); + + // Create the path to the datastore, i.e /path/to/datastore/v1.5_0123456789abcdef + fs::create_dir_all(&data_store_path).context(error::DirectoryCreation { + path: &base_path.as_ref(), + })?; + + // Build our symlink chain (See example in docstring above) + // /path/to/datastore/v1.5.2 -> v1.5.2_0123456789abcdef + symlink(&data_store_filename, &patch_version_path).context(error::LinkCreate { + path: &patch_version_path, + })?; + // /path/to/datastore/v1.5 -> v1.5.2 + symlink(&patch_version_filename, &minor_version_path).context(error::LinkCreate { + path: &minor_version_path, + })?; + // /path/to/datastore/v1 -> v1.5 + symlink(&minor_version_filename, &major_version_path).context(error::LinkCreate { + path: &major_version_path, + })?; + // /path/to/datastore/current -> v1 + symlink(&major_version_filename, ¤t_path).context(error::LinkCreate { + path: ¤t_path, + })?; + Ok(data_store_path) +} diff --git a/sources/api/storewolf/src/main.rs b/sources/api/storewolf/src/main.rs index 184ab9eb30e..9d5b20f81ec 100644 --- a/sources/api/storewolf/src/main.rs +++ b/sources/api/storewolf/src/main.rs @@ -11,14 +11,12 @@ settings given in the defaults.toml file, unless they already exist. #[macro_use] extern crate log; -use rand::{distributions::Alphanumeric, thread_rng, Rng}; use semver::Version; use simplelog::{Config as LogConfig, LevelFilter, TermLogger, TerminalMode}; use snafu::{ensure, OptionExt, ResultExt}; use std::collections::{HashMap, HashSet}; use std::convert::TryFrom; use std::io; -use std::os::unix::fs::symlink; use std::path::Path; use std::str::FromStr; use std::{env, fs, process}; @@ -27,7 +25,6 @@ use toml::{map::Entry, Value}; use apiserver::datastore::key::{Key, KeyType}; use apiserver::datastore::serialization::{to_pairs, to_pairs_with_prefix}; use apiserver::datastore::{self, DataStore, FilesystemDataStore, ScalarError}; -use bottlerocket_release::BottlerocketRelease; use model::modeled_types::SingleLineString; // FIXME Get these from configuration in the future @@ -50,13 +47,8 @@ mod error { #[snafu(display("Unable to clear pending transactions: {}", source))] DeletePending { source: io::Error }, - #[snafu(display("Unable to create datastore at '{}': {}", path.display(), source))] - DatastoreCreation { path: PathBuf, source: io::Error }, - - #[snafu(display("Unable to get OS version: {}", source))] - ReleaseVersion { - source: bottlerocket_release::Error, - }, + #[snafu(display("Unable to create datastore: {}", source))] + DatastoreCreation { source: storewolf::error::Error }, #[snafu(display("{} is not valid TOML: {}", file, source))] DefaultsFormatting { @@ -107,9 +99,6 @@ mod error { #[snafu(display("Unable to write metadata to the datastore: {}", source))] WriteMetadata { source: datastore::Error }, - #[snafu(display("Failed to create symlink at '{}': {}", path.display(), source))] - LinkCreate { path: PathBuf, source: io::Error }, - #[snafu(display("Data store link '{}' points to /", path.display()))] DataStoreLinkToRoot { path: PathBuf }, @@ -125,78 +114,10 @@ mod error { } use error::StorewolfError; +use storewolf::create_new_datastore; type Result = std::result::Result; -/// Given a base path, create a brand new datastore with the appropriate -/// symlink structure for the desired datastore version. -/// -/// If `version` is given, uses it, otherwise pulls version from bottlerocket-release. -/// -/// An example setup for theoretical version 1.5: -/// /path/to/datastore/current -/// -> /path/to/datastore/v1 -/// -> /path/to/datastore/v1.5 -/// -> /path/to/datastore/v1.5.2 -/// -> /path/to/datastore/v1.5.2_0123456789abcdef -fn create_new_datastore>(base_path: P, version: Option) -> Result<()> { - let version = match version { - Some(v) => v, - None => { - let br = BottlerocketRelease::new().context(error::ReleaseVersion)?; - br.version_id - }, - }; - - // Create random string to append to the end of the new datastore path - let random_id: String = thread_rng().sample_iter(&Alphanumeric).take(16).collect(); - - // Build the various paths to which we'll symlink - - // /path/to/datastore/v1 - let major_version_filename = format!("v{}", version.major); - let major_version_path = base_path.as_ref().join(&major_version_filename); - - // /path/to/datastore/v1.5 - let minor_version_filename = format!("v{}.{}", version.major, version.minor); - let minor_version_path = base_path.as_ref().join(&minor_version_filename); - - // /path/to/datastore/v1.5.2 - let patch_version_filename = format!("v{}.{}.{}", version.major, version.minor, version.patch); - let patch_version_path = base_path.as_ref().join(&patch_version_filename); - - // /path/to/datastore/v1.5_0123456789abcdef - let data_store_filename = format!("v{}.{}.{}_{}", version.major, version.minor, version.patch, random_id); - let data_store_path = base_path.as_ref().join(&data_store_filename); - - // /path/to/datastore/current - let current_path = base_path.as_ref().join("current"); - - // Create the path to the datastore, i.e /path/to/datastore/v1.5_0123456789abcdef - fs::create_dir_all(&data_store_path).context(error::DatastoreCreation { - path: &base_path.as_ref(), - })?; - - // Build our symlink chain (See example in docstring above) - // /path/to/datastore/v1.5.2 -> v1.5.2_0123456789abcdef - symlink(&data_store_filename, &patch_version_path).context(error::LinkCreate { - path: &patch_version_path, - })?; - // /path/to/datastore/v1.5 -> v1.5.2 - symlink(&patch_version_filename, &minor_version_path).context(error::LinkCreate { - path: &minor_version_path, - })?; - // /path/to/datastore/v1 -> v1.5 - symlink(&minor_version_filename, &major_version_path).context(error::LinkCreate { - path: &major_version_path, - })?; - // /path/to/datastore/current -> v1 - symlink(&major_version_filename, ¤t_path).context(error::LinkCreate { - path: ¤t_path, - })?; - Ok(()) -} - /// Convert the generic toml::Value representing metadata into a /// Vec that can be used to write the metadata to the datastore. // The input to this function is a toml::Value that represents the metadata @@ -373,7 +294,7 @@ fn populate_default_datastore>( .context(error::QueryData)?; } else { info!("Creating datastore at: {}", &live_path.display()); - create_new_datastore(&base_path, version)?; + create_new_datastore(&base_path, version).context(error::DatastoreCreation)?; } // Read and parse shared defaults