parent
1fe66e91d0
commit
8b1bc9894e
@ -60,11 +60,17 @@ fn postprocess_useradd(rootfs_dfd: &openat::Dir) -> Fallible<()> {
|
|||||||
// enabled; let's just do this rather than trying to propagate the
|
// enabled; let's just do this rather than trying to propagate the
|
||||||
// preset everywhere.
|
// preset everywhere.
|
||||||
fn postprocess_presets(rootfs_dfd: &openat::Dir) -> Fallible<()> {
|
fn postprocess_presets(rootfs_dfd: &openat::Dir) -> Fallible<()> {
|
||||||
let mut o = rootfs_dfd.write_file("usr/lib/systemd/system-preset/40-rpm-ostree-auto.preset", 0o644)?;
|
let mut o = rootfs_dfd.write_file(
|
||||||
o.write(r###"# Written by rpm-ostree compose tree
|
"usr/lib/systemd/system-preset/40-rpm-ostree-auto.preset",
|
||||||
|
0o644,
|
||||||
|
)?;
|
||||||
|
o.write(
|
||||||
|
r###"# Written by rpm-ostree compose tree
|
||||||
enable ostree-remount.service
|
enable ostree-remount.service
|
||||||
enable ostree-finalize-staged.path
|
enable ostree-finalize-staged.path
|
||||||
"###.as_bytes())?;
|
"###
|
||||||
|
.as_bytes(),
|
||||||
|
)?;
|
||||||
o.flush()?;
|
o.flush()?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -101,7 +107,11 @@ fn postprocess_subs_dist(rootfs_dfd: &openat::Dir) -> Fallible<()> {
|
|||||||
// This function is called from rpmostree_postprocess_final(); think of
|
// This function is called from rpmostree_postprocess_final(); think of
|
||||||
// it as the bits of that function that we've chosen to implement in Rust.
|
// it as the bits of that function that we've chosen to implement in Rust.
|
||||||
fn compose_postprocess_final(rootfs_dfd: &openat::Dir) -> Fallible<()> {
|
fn compose_postprocess_final(rootfs_dfd: &openat::Dir) -> Fallible<()> {
|
||||||
let tasks = [postprocess_useradd, postprocess_presets, postprocess_subs_dist];
|
let tasks = [
|
||||||
|
postprocess_useradd,
|
||||||
|
postprocess_presets,
|
||||||
|
postprocess_subs_dist,
|
||||||
|
];
|
||||||
tasks.par_iter().try_for_each(|f| f(rootfs_dfd))
|
tasks.par_iter().try_for_each(|f| f(rootfs_dfd))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -19,11 +19,11 @@
|
|||||||
use gio_sys;
|
use gio_sys;
|
||||||
use glib_sys;
|
use glib_sys;
|
||||||
use libc;
|
use libc;
|
||||||
use std::ffi::{CStr, OsStr};
|
|
||||||
use std::ffi::CString;
|
use std::ffi::CString;
|
||||||
|
use std::ffi::{CStr, OsStr};
|
||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
use std::os::unix::io::{FromRawFd, IntoRawFd};
|
|
||||||
use std::os::unix::ffi::OsStrExt;
|
use std::os::unix::ffi::OsStrExt;
|
||||||
|
use std::os::unix::io::{FromRawFd, IntoRawFd};
|
||||||
use std::ptr;
|
use std::ptr;
|
||||||
|
|
||||||
use openat;
|
use openat;
|
||||||
|
@ -18,9 +18,9 @@
|
|||||||
|
|
||||||
extern crate systemd;
|
extern crate systemd;
|
||||||
|
|
||||||
use failure::Fallible;
|
|
||||||
use self::systemd::id128::Id128;
|
use self::systemd::id128::Id128;
|
||||||
use self::systemd::journal;
|
use self::systemd::journal;
|
||||||
|
use failure::Fallible;
|
||||||
|
|
||||||
static OSTREE_FINALIZE_STAGED_SERVICE: &'static str = "ostree-finalize-staged.service";
|
static OSTREE_FINALIZE_STAGED_SERVICE: &'static str = "ostree-finalize-staged.service";
|
||||||
static OSTREE_DEPLOYMENT_FINALIZING_MSG_ID: &'static str = "e8646cd63dff4625b77909a8e7a40994";
|
static OSTREE_DEPLOYMENT_FINALIZING_MSG_ID: &'static str = "e8646cd63dff4625b77909a8e7a40994";
|
||||||
|
@ -27,8 +27,8 @@ extern crate glib_sys;
|
|||||||
extern crate indicatif;
|
extern crate indicatif;
|
||||||
extern crate libc;
|
extern crate libc;
|
||||||
extern crate openat;
|
extern crate openat;
|
||||||
extern crate tempfile;
|
|
||||||
extern crate rayon;
|
extern crate rayon;
|
||||||
|
extern crate tempfile;
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate lazy_static;
|
extern crate lazy_static;
|
||||||
|
@ -16,8 +16,8 @@
|
|||||||
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
|
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
|
||||||
*/
|
*/
|
||||||
|
|
||||||
use std::{fs, io};
|
|
||||||
use openat;
|
use openat;
|
||||||
|
use std::{fs, io};
|
||||||
|
|
||||||
/// Helper functions for openat::Dir
|
/// Helper functions for openat::Dir
|
||||||
pub(crate) trait OpenatDirExt {
|
pub(crate) trait OpenatDirExt {
|
||||||
@ -54,4 +54,3 @@ impl OpenatDirExt for openat::Dir {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -21,6 +21,8 @@
|
|||||||
* */
|
* */
|
||||||
|
|
||||||
use c_utf8::CUtf8Buf;
|
use c_utf8::CUtf8Buf;
|
||||||
|
use failure::Fallible;
|
||||||
|
use failure::ResultExt;
|
||||||
use openat;
|
use openat;
|
||||||
use serde_json;
|
use serde_json;
|
||||||
use serde_yaml;
|
use serde_yaml;
|
||||||
@ -29,8 +31,6 @@ use std::io::prelude::*;
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::{collections, fs, io};
|
use std::{collections, fs, io};
|
||||||
use utils;
|
use utils;
|
||||||
use failure::Fallible;
|
|
||||||
use failure::ResultExt;
|
|
||||||
|
|
||||||
const INCLUDE_MAXDEPTH: u32 = 50;
|
const INCLUDE_MAXDEPTH: u32 = 50;
|
||||||
|
|
||||||
@ -101,9 +101,12 @@ fn treefile_parse_stream<R: io::Read>(
|
|||||||
if treearch != arch {
|
if treearch != arch {
|
||||||
return Err(io::Error::new(
|
return Err(io::Error::new(
|
||||||
io::ErrorKind::InvalidInput,
|
io::ErrorKind::InvalidInput,
|
||||||
format!("Invalid basearch {} on {}: cross-composes are not supported",
|
format!(
|
||||||
treearch, arch),
|
"Invalid basearch {} on {}: cross-composes are not supported",
|
||||||
).into())
|
treearch, arch
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.into());
|
||||||
} else {
|
} else {
|
||||||
Some(treearch)
|
Some(treearch)
|
||||||
}
|
}
|
||||||
@ -121,7 +124,8 @@ fn treefile_parse_stream<R: io::Read>(
|
|||||||
return Err(io::Error::new(
|
return Err(io::Error::new(
|
||||||
io::ErrorKind::InvalidInput,
|
io::ErrorKind::InvalidInput,
|
||||||
format!("Unknown fields: {}", keys.join(", ")),
|
format!("Unknown fields: {}", keys.join(", ")),
|
||||||
).into());
|
)
|
||||||
|
.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Substitute ${basearch}
|
// Substitute ${basearch}
|
||||||
@ -162,39 +166,52 @@ fn treefile_parse_stream<R: io::Read>(
|
|||||||
/// matching the current basearch.
|
/// matching the current basearch.
|
||||||
fn take_archful_pkgs(
|
fn take_archful_pkgs(
|
||||||
basearch: Option<&str>,
|
basearch: Option<&str>,
|
||||||
treefile: &mut TreeComposeConfig
|
treefile: &mut TreeComposeConfig,
|
||||||
) -> Fallible<Option<Vec<String>>> {
|
) -> Fallible<Option<Vec<String>>> {
|
||||||
let mut archful_pkgs: Option<Vec<String>> = None;
|
let mut archful_pkgs: Option<Vec<String>> = None;
|
||||||
|
|
||||||
for key in treefile.extra.keys().filter(|k| k.starts_with("packages-")) {
|
for key in treefile.extra.keys().filter(|k| k.starts_with("packages-")) {
|
||||||
if !treefile.extra[key].is_array() ||
|
if !treefile.extra[key].is_array()
|
||||||
treefile.extra[key].as_array().unwrap().iter().any(|v| !v.is_string()) {
|
|| treefile.extra[key]
|
||||||
|
.as_array()
|
||||||
|
.unwrap()
|
||||||
|
.iter()
|
||||||
|
.any(|v| !v.is_string())
|
||||||
|
{
|
||||||
return Err(io::Error::new(
|
return Err(io::Error::new(
|
||||||
io::ErrorKind::InvalidInput,
|
io::ErrorKind::InvalidInput,
|
||||||
format!("Invalid field {}: expected array of strings", key),
|
format!("Invalid field {}: expected array of strings", key),
|
||||||
).into());
|
)
|
||||||
|
.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(basearch) = basearch {
|
if let Some(basearch) = basearch {
|
||||||
if basearch == &key["packages-".len()..] {
|
if basearch == &key["packages-".len()..] {
|
||||||
assert!(archful_pkgs == None);
|
assert!(archful_pkgs == None);
|
||||||
archful_pkgs = Some(treefile.extra[key].as_array().unwrap().iter().map(|v| {
|
archful_pkgs = Some(
|
||||||
v.as_str().unwrap().into()
|
treefile.extra[key]
|
||||||
}).collect());
|
.as_array()
|
||||||
|
.unwrap()
|
||||||
|
.iter()
|
||||||
|
.map(|v| v.as_str().unwrap().into())
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// and drop it from the map
|
// and drop it from the map
|
||||||
treefile.extra.retain(|ref k, _| !k.starts_with("packages-"));
|
treefile
|
||||||
|
.extra
|
||||||
|
.retain(|ref k, _| !k.starts_with("packages-"));
|
||||||
|
|
||||||
Ok(archful_pkgs)
|
Ok(archful_pkgs)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Open file and provide context containing filename on failures.
|
/// Open file and provide context containing filename on failures.
|
||||||
fn open_file<P: AsRef<Path>>(filename: P) -> Fallible<fs::File> {
|
fn open_file<P: AsRef<Path>>(filename: P) -> Fallible<fs::File> {
|
||||||
return Ok(fs::File::open(filename.as_ref()).with_context(
|
return Ok(fs::File::open(filename.as_ref())
|
||||||
|e| format!("Can't open file {:?}: {}", filename.as_ref().display(), e))?);
|
.with_context(|e| format!("Can't open file {:?}: {}", filename.as_ref().display(), e))?);
|
||||||
}
|
}
|
||||||
|
|
||||||
// If a passwd/group file is provided explicitly, load it as a fd
|
// If a passwd/group file is provided explicitly, load it as a fd
|
||||||
@ -364,7 +381,8 @@ fn treefile_parse_recurse<P: AsRef<Path>>(
|
|||||||
return Err(io::Error::new(
|
return Err(io::Error::new(
|
||||||
io::ErrorKind::InvalidInput,
|
io::ErrorKind::InvalidInput,
|
||||||
format!("Reached maximum include depth {}", INCLUDE_MAXDEPTH),
|
format!("Reached maximum include depth {}", INCLUDE_MAXDEPTH),
|
||||||
).into());
|
)
|
||||||
|
.into());
|
||||||
}
|
}
|
||||||
let parent = filename.parent().unwrap();
|
let parent = filename.parent().unwrap();
|
||||||
let include_path = parent.join(include_path);
|
let include_path = parent.join(include_path);
|
||||||
@ -426,7 +444,8 @@ impl Treefile {
|
|||||||
return Err(io::Error::new(
|
return Err(io::Error::new(
|
||||||
io::ErrorKind::InvalidInput,
|
io::ErrorKind::InvalidInput,
|
||||||
format!("Unsupported path in add-files: {}", dest),
|
format!("Unsupported path in add-files: {}", dest),
|
||||||
).into());
|
)
|
||||||
|
.into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -683,10 +702,11 @@ impl TreeComposeConfig {
|
|||||||
return Err(io::Error::new(
|
return Err(io::Error::new(
|
||||||
io::ErrorKind::InvalidInput,
|
io::ErrorKind::InvalidInput,
|
||||||
format!("Cannot use new and legacy forms of {}", stringify!($field)),
|
format!("Cannot use new and legacy forms of {}", stringify!($field)),
|
||||||
).into());
|
)
|
||||||
|
.into());
|
||||||
}
|
}
|
||||||
self.$field = self.$field.or(self.legacy_fields.$field.take());
|
self.$field = self.$field.or(self.legacy_fields.$field.take());
|
||||||
}}
|
}};
|
||||||
};
|
};
|
||||||
|
|
||||||
migrate_field!(gpg_key);
|
migrate_field!(gpg_key);
|
||||||
@ -787,24 +807,24 @@ remove-files:
|
|||||||
let buf = VALID_PRELUDE.to_string() + data;
|
let buf = VALID_PRELUDE.to_string() + data;
|
||||||
let mut input = io::BufReader::new(buf.as_bytes());
|
let mut input = io::BufReader::new(buf.as_bytes());
|
||||||
match treefile_parse_stream(InputFormat::YAML, &mut input, Some(ARCH_X86_64)) {
|
match treefile_parse_stream(InputFormat::YAML, &mut input, Some(ARCH_X86_64)) {
|
||||||
Err(ref e) => {
|
Err(ref e) => match e.downcast_ref::<io::Error>() {
|
||||||
match e.downcast_ref::<io::Error>() {
|
Some(ref ioe) if ioe.kind() == io::ErrorKind::InvalidInput => {}
|
||||||
Some(ref ioe) if ioe.kind() == io::ErrorKind::InvalidInput => {},
|
|
||||||
_ => panic!("Expected invalid treefile, not {}", e.to_string()),
|
_ => panic!("Expected invalid treefile, not {}", e.to_string()),
|
||||||
}
|
},
|
||||||
}
|
|
||||||
Ok(_) => panic!("Expected invalid treefile"),
|
Ok(_) => panic!("Expected invalid treefile"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn basic_valid_legacy() {
|
fn basic_valid_legacy() {
|
||||||
let treefile = append_and_parse("
|
let treefile = append_and_parse(
|
||||||
|
"
|
||||||
gpg_key: foo
|
gpg_key: foo
|
||||||
boot_location: both
|
boot_location: both
|
||||||
default_target: bar
|
default_target: bar
|
||||||
automatic_version_prefix: baz
|
automatic_version_prefix: baz
|
||||||
");
|
",
|
||||||
|
);
|
||||||
assert!(treefile.gpg_key.unwrap() == "foo");
|
assert!(treefile.gpg_key.unwrap() == "foo");
|
||||||
assert!(treefile.boot_location.unwrap() == BootLocation::Both);
|
assert!(treefile.boot_location.unwrap() == BootLocation::Both);
|
||||||
assert!(treefile.default_target.unwrap() == "bar");
|
assert!(treefile.default_target.unwrap() == "bar");
|
||||||
@ -813,12 +833,14 @@ automatic_version_prefix: baz
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn basic_valid_legacy_new() {
|
fn basic_valid_legacy_new() {
|
||||||
let treefile = append_and_parse("
|
let treefile = append_and_parse(
|
||||||
|
"
|
||||||
gpg-key: foo
|
gpg-key: foo
|
||||||
boot-location: both
|
boot-location: both
|
||||||
default-target: bar
|
default-target: bar
|
||||||
automatic-version-prefix: baz
|
automatic-version-prefix: baz
|
||||||
");
|
",
|
||||||
|
);
|
||||||
assert!(treefile.gpg_key.unwrap() == "foo");
|
assert!(treefile.gpg_key.unwrap() == "foo");
|
||||||
assert!(treefile.boot_location.unwrap() == BootLocation::Both);
|
assert!(treefile.boot_location.unwrap() == BootLocation::Both);
|
||||||
assert!(treefile.default_target.unwrap() == "bar");
|
assert!(treefile.default_target.unwrap() == "bar");
|
||||||
@ -827,22 +849,30 @@ automatic-version-prefix: baz
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn basic_invalid_legacy_both() {
|
fn basic_invalid_legacy_both() {
|
||||||
test_invalid("
|
test_invalid(
|
||||||
|
"
|
||||||
gpg-key: foo
|
gpg-key: foo
|
||||||
gpg_key: bar
|
gpg_key: bar
|
||||||
");
|
",
|
||||||
test_invalid("
|
);
|
||||||
|
test_invalid(
|
||||||
|
"
|
||||||
boot-location: new
|
boot-location: new
|
||||||
boot_location: both
|
boot_location: both
|
||||||
");
|
",
|
||||||
test_invalid("
|
);
|
||||||
|
test_invalid(
|
||||||
|
"
|
||||||
default-target: foo
|
default-target: foo
|
||||||
default_target: bar
|
default_target: bar
|
||||||
");
|
",
|
||||||
test_invalid("
|
);
|
||||||
|
test_invalid(
|
||||||
|
"
|
||||||
automatic-version-prefix: foo
|
automatic-version-prefix: foo
|
||||||
automatic_version_prefix: bar
|
automatic_version_prefix: bar
|
||||||
");
|
",
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -887,8 +917,11 @@ automatic_version_prefix: bar
|
|||||||
let mut tf_stream = io::BufWriter::new(fs::File::create(&tf_path)?);
|
let mut tf_stream = io::BufWriter::new(fs::File::create(&tf_path)?);
|
||||||
tf_stream.write_all(contents.as_bytes())?;
|
tf_stream.write_all(contents.as_bytes())?;
|
||||||
}
|
}
|
||||||
let tf =
|
let tf = Treefile::new_boxed(
|
||||||
Treefile::new_boxed(tf_path.as_path(), basearch, openat::Dir::open(workdir.path())?)?;
|
tf_path.as_path(),
|
||||||
|
basearch,
|
||||||
|
openat::Dir::open(workdir.path())?,
|
||||||
|
)?;
|
||||||
Ok(TreefileTest { tf, workdir })
|
Ok(TreefileTest { tf, workdir })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -931,7 +964,8 @@ rojig:
|
|||||||
r###"
|
r###"
|
||||||
packages:
|
packages:
|
||||||
- some layered packages
|
- some layered packages
|
||||||
"###.as_bytes(),
|
"###
|
||||||
|
.as_bytes(),
|
||||||
);
|
);
|
||||||
let mut mid = treefile_parse_stream(InputFormat::YAML, &mut mid_input, basearch).unwrap();
|
let mut mid = treefile_parse_stream(InputFormat::YAML, &mut mid_input, basearch).unwrap();
|
||||||
let mut top_input = io::BufReader::new(ROJIG_YAML.as_bytes());
|
let mut top_input = io::BufReader::new(ROJIG_YAML.as_bytes());
|
||||||
@ -948,8 +982,9 @@ packages:
|
|||||||
fn test_open_file_nonexistent() {
|
fn test_open_file_nonexistent() {
|
||||||
let path = "/usr/share/empty/manifest.yaml";
|
let path = "/usr/share/empty/manifest.yaml";
|
||||||
match treefile_parse(path, None) {
|
match treefile_parse(path, None) {
|
||||||
Err(ref e) => assert!(e.to_string().starts_with(
|
Err(ref e) => assert!(e
|
||||||
format!("Can't open file {:?}:", path).as_str())),
|
.to_string()
|
||||||
|
.starts_with(format!("Can't open file {:?}:", path).as_str())),
|
||||||
Ok(_) => panic!("Expected nonexistent treefile error for {}", path),
|
Ok(_) => panic!("Expected nonexistent treefile error for {}", path),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -104,7 +104,8 @@ mod tests {
|
|||||||
let r = varsubst(
|
let r = varsubst(
|
||||||
"ostree/${osvendor}/${basearch}/blah/${basearch}/whee",
|
"ostree/${osvendor}/${basearch}/blah/${basearch}/whee",
|
||||||
&subs,
|
&subs,
|
||||||
).unwrap();
|
)
|
||||||
|
.unwrap();
|
||||||
assert_eq!(r, "ostree/fedora/ppc64le/blah/ppc64le/whee");
|
assert_eq!(r, "ostree/fedora/ppc64le/blah/ppc64le/whee");
|
||||||
let r = varsubst("${osvendor}${basearch}", &subs).unwrap();
|
let r = varsubst("${osvendor}${basearch}", &subs).unwrap();
|
||||||
assert_eq!(r, "fedorappc64le");
|
assert_eq!(r, "fedorappc64le");
|
||||||
|
Loading…
Reference in New Issue
Block a user