aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authornotgne22020-11-22 21:56:43 -0700
committernotgne22020-11-22 21:56:43 -0700
commit551dd1c3042743c373bf10cb8d14fac52bf26351 (patch)
treee8081150f443ae5be6063b6cb66867531763dae9 /src
parent819eca679ee4038b8b9fc1fbb2e662bcbe9f2d44 (diff)
Remove ref/cows from DeployDefs, add interactive flag (resolves #4)
Diffstat (limited to 'src')
-rw-r--r--src/main.rs83
-rw-r--r--src/utils/deploy.rs2
-rw-r--r--src/utils/mod.rs34
-rw-r--r--src/utils/push.rs2
4 files changed, 67 insertions, 54 deletions
diff --git a/src/main.rs b/src/main.rs
index 5b597e9..d72051f 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -33,6 +33,9 @@ struct Opts {
/// Check signatures when using `nix copy`
#[clap(short, long)]
checksigs: bool,
+ /// Use the interactive prompt before deployment
+ #[clap(short, long)]
+ interactive: bool,
/// Extra arguments to be passed to nix build
extra_build_args: Vec<String>,
@@ -216,21 +219,9 @@ struct PromptPart<'a> {
ssh_opts: &'a [String],
}
-#[derive(Error, Debug)]
-enum PromptChangesError {
- #[error("Failed to make printable TOML of deployment: {0}")]
- TomlFormat(#[from] toml::ser::Error),
- #[error("Failed to flush stdout prior to query: {0}")]
- StdoutFlush(std::io::Error),
- #[error("Failed to read line from stdin: {0}")]
- StdinRead(std::io::Error),
- #[error("User cancelled deployment")]
- Cancelled,
-}
-
-fn prompt_changes(
- parts: Vec<(&utils::DeployData, &utils::DeployDefs)>,
-) -> Result<(), PromptChangesError> {
+fn print_deployment(
+ parts: &[(utils::DeployData, utils::DeployDefs)],
+) -> Result<(), toml::ser::Error> {
let mut part_map: HashMap<String, HashMap<String, PromptPart>> = HashMap::new();
for (data, defs) in parts {
@@ -253,30 +244,53 @@ fn prompt_changes(
warn!("The following profiles are going to be deployed:\n{}", toml);
+ Ok(())
+}
+#[derive(Error, Debug)]
+enum PromptDeploymentError {
+ #[error("Failed to make printable TOML of deployment: {0}")]
+ TomlFormat(#[from] toml::ser::Error),
+ #[error("Failed to flush stdout prior to query: {0}")]
+ StdoutFlush(std::io::Error),
+ #[error("Failed to read line from stdin: {0}")]
+ StdinRead(std::io::Error),
+ #[error("User cancelled deployment")]
+ Cancelled,
+}
+
+fn prompt_deployment(
+ parts: &[(utils::DeployData, utils::DeployDefs)],
+) -> Result<(), PromptDeploymentError> {
+ print_deployment(parts)?;
+
info!("Are you sure you want to deploy these profiles?");
print!("> ");
- stdout().flush().map_err(PromptChangesError::StdoutFlush)?;
+ stdout()
+ .flush()
+ .map_err(PromptDeploymentError::StdoutFlush)?;
let mut s = String::new();
stdin()
.read_line(&mut s)
- .map_err(PromptChangesError::StdinRead)?;
+ .map_err(PromptDeploymentError::StdinRead)?;
if !yn::yes(&s) {
if yn::is_somewhat_yes(&s) {
info!("Sounds like you might want to continue, to be more clear please just say \"yes\". Do you want to deploy these profiles?");
print!("> ");
- stdout().flush().map_err(PromptChangesError::StdoutFlush)?;
+ stdout()
+ .flush()
+ .map_err(PromptDeploymentError::StdoutFlush)?;
let mut s = String::new();
stdin()
.read_line(&mut s)
- .map_err(PromptChangesError::StdinRead)?;
+ .map_err(PromptDeploymentError::StdinRead)?;
if !yn::yes(&s) {
- return Err(PromptChangesError::Cancelled);
+ return Err(PromptDeploymentError::Cancelled);
}
} else {
if !yn::no(&s) {
@@ -285,7 +299,7 @@ fn prompt_changes(
);
}
- return Err(PromptChangesError::Cancelled);
+ return Err(PromptDeploymentError::Cancelled);
}
}
@@ -307,7 +321,7 @@ enum RunDeployError {
#[error("Error processing deployment definitions: {0}")]
DeployDataDefsError(#[from] utils::DeployDataDefsError),
#[error("{0}")]
- PromptChangesError(#[from] PromptChangesError),
+ PromptDeploymentError(#[from] PromptDeploymentError),
}
async fn run_deploy(
@@ -315,6 +329,7 @@ async fn run_deploy(
data: utils::data::Data,
supports_flakes: bool,
check_sigs: bool,
+ interactive: bool,
cmd_overrides: utils::CmdOverrides,
keep_result: bool,
result_path: Option<&str>,
@@ -405,6 +420,8 @@ async fn run_deploy(
(None, Some(_)) => return Err(RunDeployError::ProfileWithoutNode),
};
+ let mut parts: Vec<(utils::DeployData, utils::DeployDefs)> = Vec::new();
+
for ((node_name, node), (profile_name, profile)) in &to_deploy {
let deploy_data = utils::make_deploy_data(
&data.generic_settings,
@@ -417,6 +434,14 @@ async fn run_deploy(
let deploy_defs = deploy_data.defs()?;
+ parts.push((deploy_data, deploy_defs));
+ }
+
+ if interactive {
+ prompt_deployment(&parts[..])?;
+ }
+
+ for (deploy_data, deploy_defs) in &parts {
utils::push::push_profile(
supports_flakes,
check_sigs,
@@ -430,18 +455,7 @@ async fn run_deploy(
.await?;
}
- for ((node_name, node), (profile_name, profile)) in &to_deploy {
- let deploy_data = utils::make_deploy_data(
- &data.generic_settings,
- node,
- node_name,
- profile,
- profile_name,
- &cmd_overrides,
- );
-
- let deploy_defs = deploy_data.defs()?;
-
+ for (deploy_data, deploy_defs) in &parts {
utils::deploy::deploy_profile(&deploy_data, &deploy_defs).await?;
}
@@ -509,6 +523,7 @@ async fn run() -> Result<(), RunError> {
data,
supports_flakes,
opts.checksigs,
+ opts.interactive,
cmd_overrides,
opts.keep_result,
result_path,
diff --git a/src/utils/deploy.rs b/src/utils/deploy.rs
index adcde64..a82fa6c 100644
--- a/src/utils/deploy.rs
+++ b/src/utils/deploy.rs
@@ -82,7 +82,7 @@ pub enum DeployProfileError {
pub async fn deploy_profile(
deploy_data: &super::DeployData<'_>,
- deploy_defs: &super::DeployDefs<'_>,
+ deploy_defs: &super::DeployDefs,
) -> Result<(), DeployProfileError> {
info!(
"Activating profile `{}` for node `{}`",
diff --git a/src/utils/mod.rs b/src/utils/mod.rs
index 76d638d..2c114f5 100644
--- a/src/utils/mod.rs
+++ b/src/utils/mod.rs
@@ -2,7 +2,6 @@
//
// SPDX-License-Identifier: MPL-2.0
-use std::borrow::Cow;
use std::path::PathBuf;
use merge::Merge;
@@ -96,7 +95,7 @@ fn test_parse_flake() {
);
}
-#[derive(Debug)]
+#[derive(Debug, Clone)]
pub struct DeployData<'a> {
pub node_name: &'a str,
pub node: &'a data::Node,
@@ -109,10 +108,10 @@ pub struct DeployData<'a> {
}
#[derive(Debug)]
-pub struct DeployDefs<'a> {
- pub ssh_user: Cow<'a, str>,
- pub profile_user: Cow<'a, str>,
- pub profile_path: Cow<'a, str>,
+pub struct DeployDefs {
+ pub ssh_user: String,
+ pub profile_user: String,
+ pub profile_path: String,
pub current_exe: PathBuf,
pub sudo: Option<String>,
}
@@ -128,16 +127,16 @@ pub enum DeployDataDefsError {
}
impl<'a> DeployData<'a> {
- pub fn defs(&'a self) -> Result<DeployDefs<'a>, DeployDataDefsError> {
- let ssh_user: Cow<str> = match self.merged_settings.ssh_user {
- Some(ref u) => u.into(),
- None => whoami::username().into(),
+ pub fn defs(&'a self) -> Result<DeployDefs, DeployDataDefsError> {
+ let ssh_user = match self.merged_settings.ssh_user {
+ Some(ref u) => u.clone(),
+ None => whoami::username(),
};
- let profile_user: Cow<str> = match self.merged_settings.user {
- Some(ref x) => x.into(),
+ let profile_user = match self.merged_settings.user {
+ Some(ref x) => x.clone(),
None => match self.merged_settings.ssh_user {
- Some(ref x) => x.into(),
+ Some(ref x) => x.clone(),
None => {
return Err(DeployDataDefsError::NoProfileUser(
self.profile_name.to_owned(),
@@ -147,16 +146,15 @@ impl<'a> DeployData<'a> {
},
};
- let profile_path: Cow<str> = match self.profile.profile_settings.profile_path {
+ let profile_path = match self.profile.profile_settings.profile_path {
None => match &profile_user[..] {
- "root" => format!("/nix/var/nix/profiles/{}", self.profile_name).into(),
+ "root" => format!("/nix/var/nix/profiles/{}", self.profile_name),
_ => format!(
"/nix/var/nix/profiles/per-user/{}/{}",
profile_user, self.profile_name
- )
- .into(),
+ ),
},
- Some(ref x) => x.into(),
+ Some(ref x) => x.clone(),
};
let sudo: Option<String> = match self.merged_settings.user {
diff --git a/src/utils/push.rs b/src/utils/push.rs
index 2ea259a..18b97b5 100644
--- a/src/utils/push.rs
+++ b/src/utils/push.rs
@@ -30,7 +30,7 @@ pub async fn push_profile(
check_sigs: bool,
repo: &str,
deploy_data: &super::DeployData<'_>,
- deploy_defs: &super::DeployDefs<'_>,
+ deploy_defs: &super::DeployDefs,
keep_result: bool,
result_path: Option<&str>,
extra_build_args: &[String],