diff --git a/CHANGELOG.md b/CHANGELOG.md index 9dd59cc1..2669045f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -28,6 +28,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) a - `script`: Allow flist formats to use `only-sources`/`only-includes`/`only-defines` flags. - Add check to ensure files referenced in all manifests exist. - Add warnings for unknown fields in manifest. +- Add support to indicate and read in external file lists in manifest. +- Enumerate warnings and add `--suppress` flag to hide warnings. ### Changed - `update`: Clean up alignment of manual resolution output. diff --git a/README.md b/README.md index d28ade17..d825ffa2 100644 --- a/README.md +++ b/README.md @@ -150,6 +150,11 @@ sources: # Source files can use glob patterns to include all matching files: - src/more_stuff/**/*.sv + # File list in another external file, supporting simple file names, `+define+` and `+incdir+` + - external_flists: + - other_file_list.f + files: [] + # A list of include directories which should implicitly be added to source # file groups of packages that have the current package as a dependency. # Optional. diff --git a/src/cli.rs b/src/cli.rs index 7dd6a7c8..53d7d3ec 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -63,6 +63,15 @@ pub fn main() -> Result<()> { .value_parser(value_parser!(usize)) .help("Sets the maximum number of concurrent git operations"), ) + .arg( + Arg::new("suppress") + .long("suppress") + .global(true) + .num_args(1) + .action(ArgAction::Append) + .help("Suppresses specific warnings. Use `all` to suppress all warnings.") + .value_parser(value_parser!(String)), + ) .subcommand(cmd::update::new()) .subcommand(cmd::path::new()) .subcommand(cmd::parents::new()) @@ -95,6 +104,19 @@ pub fn main() -> Result<()> { // Parse the arguments. let matches = app.clone().get_matches(); + let suppressed_warnings: IndexSet = matches + .get_many::("suppress") + .unwrap_or_default() + .map(|s| s.to_owned()) + .collect(); + + let suppressed_warnings: IndexSet = + if suppressed_warnings.contains("all") || suppressed_warnings.contains("Wall") { + (1..22).map(|i| format!("W{:02}", i)).collect() + } else { + suppressed_warnings + }; + // Enable debug outputs if needed. if matches.contains_id("debug") && matches.get_flag("debug") { ENABLE_DEBUG.store(true, std::sync::atomic::Ordering::Relaxed); @@ -111,7 +133,7 @@ pub fn main() -> Result<()> { let mut force_fetch = false; if let Some(("update", intern_matches)) = matches.subcommand() { - force_fetch = cmd::update::setup(intern_matches)?; + force_fetch = cmd::update::setup(intern_matches, &suppressed_warnings)?; } // Determine the root working directory, which has either been provided via @@ -128,13 +150,14 @@ pub fn main() -> Result<()> { // Parse the manifest file of the package. let manifest_path = root_dir.join("Bender.yml"); - let manifest = read_manifest(&manifest_path)?; + let manifest = read_manifest(&manifest_path, &suppressed_warnings)?; debugln!("main: {:#?}", manifest); // Gather and parse the tool configuration. let config = load_config( &root_dir, - matches!(matches.subcommand(), Some(("update", _))), + matches!(matches.subcommand(), Some(("update", _))) && !suppressed_warnings.contains("W02"), + &suppressed_warnings, )?; debugln!("main: {:#?}", config); @@ -156,6 +179,7 @@ pub fn main() -> Result<()> { matches.get_flag("local"), force_fetch, git_throttle, + suppressed_warnings, ); if let Some(("clean", intern_matches)) = matches.subcommand() { @@ -223,11 +247,13 @@ pub fn main() -> Result<()> { ) })?; if !meta.file_type().is_symlink() { - warnln!( - "Skipping link to package {} at {:?} since there is something there", - pkg_name, - path - ); + if !sess.suppress_warnings.contains("W01") { + warnln!( + "[W01] Skipping link to package {} at {:?} since there is something there", + pkg_name, + path + ); + } continue; } if path.read_link().map(|d| d != pkg_path).unwrap_or(true) { @@ -358,7 +384,7 @@ fn find_package_root(from: &Path) -> Result { } /// Read a package manifest from a file. -pub fn read_manifest(path: &Path) -> Result { +pub fn read_manifest(path: &Path, suppress_warnings: &IndexSet) -> Result { use crate::config::PartialManifest; use std::fs::File; debugln!("read_manifest: {:?}", path); @@ -369,12 +395,16 @@ pub fn read_manifest(path: &Path) -> Result { partial .prefix_paths(path.parent().unwrap()) .map_err(|cause| Error::chain(format!("Error in manifest prefixing {:?}.", path), cause))? - .validate("", false) + .validate("", false, suppress_warnings) .map_err(|cause| Error::chain(format!("Error in manifest {:?}.", path), cause)) } /// Load a configuration by traversing a directory hierarchy upwards. -fn load_config(from: &Path, warn_config_loaded: bool) -> Result { +fn load_config( + from: &Path, + warn_config_loaded: bool, + suppress_warnings: &IndexSet, +) -> Result { #[cfg(unix)] use std::os::unix::fs::MetadataExt; @@ -447,7 +477,7 @@ fn load_config(from: &Path, warn_config_loaded: bool) -> Result { // Validate the configuration. let mut out = out - .validate("", false) + .validate("", false, suppress_warnings) .map_err(|cause| Error::chain("Invalid configuration:", cause))?; out.overrides = out @@ -471,7 +501,7 @@ fn maybe_load_config(path: &Path, warn_config_loaded: bool) -> Result Result<()> { { Err(Error::new("git fetch failed".to_string()))?; } - } else { - warnln!("fetch not performed due to --local argument."); + } else if !sess.suppress_warnings.contains("W14") { + warnln!("[W14] fetch not performed due to --local argument."); } println!( @@ -263,7 +263,7 @@ pub fn run(sess: &Session, path: &Path, matches: &ArgMatches) -> Result<()> { })?; if !meta.file_type().is_symlink() { warnln!( - "Skipping link to package {} at {:?} since there is something there", + "[W15] Skipping link to package {} at {:?} since there is something there", pkg_name, link_path ); diff --git a/src/cmd/fusesoc.rs b/src/cmd/fusesoc.rs index 82e9c91a..62e1889d 100644 --- a/src/cmd/fusesoc.rs +++ b/src/cmd/fusesoc.rs @@ -155,8 +155,8 @@ pub fn run_single(sess: &Session, matches: &ArgMatches) -> Result<()> { Error::chain(format!("Unable to write corefile for {:?}.", &name), cause) })?; - if fuse_depend_string.len() > 1 { - warnln!("Depend strings may be wrong for the included dependencies!"); + if fuse_depend_string.len() > 1 && !sess.suppress_warnings.contains("W16") { + warnln!("[W16] Depend strings may be wrong for the included dependencies!"); } Ok(()) diff --git a/src/cmd/parents.rs b/src/cmd/parents.rs index 5acc69ee..4479261b 100644 --- a/src/cmd/parents.rs +++ b/src/cmd/parents.rs @@ -59,7 +59,9 @@ pub fn run(sess: &Session, matches: &ArgMatches) -> Result<()> { .unwrap(); // Filter out dependencies without a manifest if dep_manifest.is_none() { - warnln!("{} is shown to include dependency, but manifest does not have this information.", pkg_name.to_string()); + if !sess.suppress_warnings.contains("W17") { + warnln!("[W17] {} is shown to include dependency, but manifest does not have this information.", pkg_name.to_string()); + } continue; } let dep_manifest = dep_manifest.unwrap(); @@ -77,9 +79,9 @@ pub fn run(sess: &Session, matches: &ArgMatches) -> Result<()> { ), ], ); - } else { + } else if !sess.suppress_warnings.contains("W17") { // Filter out dependencies with mismatching manifest - warnln!("{} is shown to include dependency, but manifest does not have this information.", pkg_name.to_string()); + warnln!("[W17] {} is shown to include dependency, but manifest does not have this information.", pkg_name.to_string()); } } } @@ -130,9 +132,9 @@ pub fn run(sess: &Session, matches: &ArgMatches) -> Result<()> { } ); - if sess.config.overrides.contains_key(dep) { + if sess.config.overrides.contains_key(dep) && !sess.suppress_warnings.contains("W18") { warnln!( - "An override is configured for {} to {:?}", + "[W18] An override is configured for {} to {:?}", dep, sess.config.overrides[dep] ) diff --git a/src/cmd/script.rs b/src/cmd/script.rs index ef8a9f6b..19b0154e 100644 --- a/src/cmd/script.rs +++ b/src/cmd/script.rs @@ -297,7 +297,7 @@ pub fn run(sess: &Session, matches: &ArgMatches) -> Result<()> { let srcs = srcs .flatten() .into_iter() - .map(|f| f.validate("", false)) + .map(|f| f.validate("", false, &sess.suppress_warnings)) .collect::>>()?; // Validate format-specific options. diff --git a/src/cmd/update.rs b/src/cmd/update.rs index 67fd895b..9b010e1f 100644 --- a/src/cmd/update.rs +++ b/src/cmd/update.rs @@ -62,10 +62,13 @@ pub fn new() -> Command { } /// Execute the `update` subcommand. -pub fn setup(matches: &ArgMatches) -> Result { +pub fn setup(matches: &ArgMatches, suppress_warnings: &IndexSet) -> Result { let force_fetch = matches.get_flag("fetch"); - if matches.get_flag("local") && matches.get_flag("fetch") { - warnln!("As --local argument is set for bender command, no fetching will be performed."); + if matches.get_flag("local") && matches.get_flag("fetch") && !suppress_warnings.contains("W14") + { + warnln!( + "[W14] As --local argument is set for bender command, no fetching will be performed." + ); } Ok(force_fetch) } diff --git a/src/cmd/vendor.rs b/src/cmd/vendor.rs index 29a0faa0..8b1944c5 100644 --- a/src/cmd/vendor.rs +++ b/src/cmd/vendor.rs @@ -99,9 +99,9 @@ pub fn run(sess: &Session, matches: &ArgMatches) -> Result<()> { git.clone().spawn_with(|c| c.arg("clone").arg(url).arg(".")) .map_err(move |cause| { if url.contains("git@") { - warnln!("Please ensure your public ssh key is added to the git server."); + warnln!("[W07] Please ensure your public ssh key is added to the git server."); } - warnln!("Please ensure the url is correct and you have access to the repository."); + warnln!("[W07] Please ensure the url is correct and you have access to the repository."); Error::chain( format!("Failed to initialize git database in {:?}.", tmp_path), cause, @@ -280,7 +280,7 @@ pub fn run(sess: &Session, matches: &ArgMatches) -> Result<()> { } }, None => { - warnln!("No patch directory specified for package {}, mapping {} => {}. Skipping patch generation.", vendor_package.name.clone(), patch_link.from_prefix.to_str().unwrap(), patch_link.to_prefix.to_str().unwrap()); + warnln!("[W15] No patch directory specified for package {}, mapping {} => {}. Skipping patch generation.", vendor_package.name.clone(), patch_link.from_prefix.to_str().unwrap(), patch_link.to_prefix.to_str().unwrap()); Ok(()) }, } @@ -330,7 +330,7 @@ pub fn init( // Check if includes exist for path in vendor_package.include_from_upstream.clone() { if !PathBuf::from(extend_paths(&[path.clone()], dep_path, true)?[0].clone()).exists() { - warnln!("{} not found in upstream, continuing.", path); + warnln!("[W16] {} not found in upstream, continuing.", path); } } @@ -361,7 +361,7 @@ pub fn init( })?; } else { warnln!( - "{} not found in upstream, continuing.", + "[W16] {} not found in upstream, continuing.", link_from.to_str().unwrap() ); } diff --git a/src/config.rs b/src/config.rs index d41544ec..acaa5d7f 100644 --- a/src/config.rs +++ b/src/config.rs @@ -11,13 +11,14 @@ use std; use std::collections::{BTreeMap, BTreeSet, HashMap}; use std::fmt; +use std::fs::File; use std::hash::Hash; +use std::io::{BufRead, BufReader}; use std::path::{Path, PathBuf}; use std::str::FromStr; use glob::glob; - -use indexmap::IndexMap; +use indexmap::{IndexMap, IndexSet}; use semver; use serde::de::{Deserialize, Deserializer}; use serde::ser::{Serialize, Serializer}; @@ -225,6 +226,7 @@ pub trait Validate { self, package_name: &str, pre_output: bool, + suppress_warnings: &IndexSet, ) -> std::result::Result; } @@ -240,12 +242,15 @@ where self, package_name: &str, pre_output: bool, + suppress_warnings: &IndexSet, ) -> std::result::Result { self.into_iter() - .map(|(k, v)| match v.validate(package_name, pre_output) { - Ok(v) => Ok((k, v)), - Err(e) => Err((k, e)), - }) + .map( + |(k, v)| match v.validate(package_name, pre_output, suppress_warnings) { + Ok(v) => Ok((k, v)), + Err(e) => Err((k, e)), + }, + ) .collect() } } @@ -260,12 +265,15 @@ where self, package_name: &str, pre_output: bool, + suppress_warnings: &IndexSet, ) -> std::result::Result { self.into_iter() - .map(|v| match v.validate(package_name, pre_output) { - Ok(v) => Ok(v), - Err(e) => Err(e), - }) + .map( + |v| match v.validate(package_name, pre_output, suppress_warnings) { + Ok(v) => Ok(v), + Err(e) => Err(e), + }, + ) .collect() } } @@ -281,8 +289,9 @@ where self, package_name: &str, pre_output: bool, + suppress_warnings: &IndexSet, ) -> std::result::Result { - self.0.validate(package_name, pre_output) + self.0.validate(package_name, pre_output, suppress_warnings) } } @@ -297,8 +306,9 @@ where self, package_name: &str, pre_output: bool, + suppress_warnings: &IndexSet, ) -> std::result::Result { - self.0.validate(package_name, pre_output) + self.0.validate(package_name, pre_output, suppress_warnings) } } @@ -356,9 +366,10 @@ impl PartialManifest { mut self, package_name: &str, pre_output: bool, + suppress_warnings: &IndexSet, ) -> Result { self.sources = Some(SeqOrStruct::new(PartialSources::new_empty())); - self.validate(package_name, pre_output) + self.validate(package_name, pre_output, suppress_warnings) } } @@ -392,17 +403,24 @@ impl PrefixPaths for PartialManifest { impl Validate for PartialManifest { type Output = Manifest; type Error = Error; - fn validate(self, _package_name: &str, pre_output: bool) -> Result { + fn validate( + self, + _package_name: &str, + pre_output: bool, + suppress_warnings: &IndexSet, + ) -> Result { let pkg = match self.package { Some(mut p) => { p.name = p.name.to_lowercase(); if !pre_output { p.extra.iter().for_each(|(k, _)| { - warnln!( - "Ignoring unknown field `{}` in manifest package for {}.", - k, - p.name - ); + if !suppress_warnings.contains("W03") { + warnln!( + "[W03] Ignoring unknown field `{}` in manifest package for {}.", + k, + p.name + ); + } }); } p @@ -414,7 +432,7 @@ impl Validate for PartialManifest { .into_iter() .map(|(k, v)| (k.to_lowercase(), v)) .collect::>() - .validate(&pkg.name, pre_output) + .validate(&pkg.name, pre_output, suppress_warnings) .map_err(|(key, cause)| { Error::chain( format!("In dependency `{}` of package `{}`:", key, pkg.name), @@ -424,9 +442,12 @@ impl Validate for PartialManifest { None => IndexMap::new(), }; let srcs = match self.sources { - Some(s) => Some(s.validate(&pkg.name, pre_output).map_err(|cause| { - Error::chain(format!("In source list of package `{}`:", pkg.name), cause) - })?), + Some(s) => Some( + s.validate(&pkg.name, pre_output, suppress_warnings) + .map_err(|cause| { + Error::chain(format!("In source list of package `{}`:", pkg.name), cause) + })?, + ), None => None, }; let exp_inc_dirs = self.export_include_dirs.unwrap_or_default(); @@ -440,23 +461,25 @@ impl Validate for PartialManifest { let frozen = self.frozen.unwrap_or(false); let workspace = match self.workspace { Some(w) => w - .validate(&pkg.name, pre_output) + .validate(&pkg.name, pre_output, suppress_warnings) .map_err(|cause| Error::chain("In workspace configuration:", cause))?, None => Workspace::default(), }; let vendor_package = match self.vendor_package { Some(vend) => vend - .validate(&pkg.name, pre_output) + .validate(&pkg.name, pre_output, suppress_warnings) .map_err(|cause| Error::chain("Unable to parse vendor_package", cause))?, None => Vec::new(), }; if !pre_output { self.extra.iter().for_each(|(k, _)| { - warnln!( - "Ignoring unknown field `{}` in manifest for {}.", - k, - pkg.name - ); + if !suppress_warnings.contains("W03") { + warnln!( + "[W03] Ignoring unknown field `{}` in manifest for {}.", + k, + pkg.name + ); + } }); } Ok(Manifest { @@ -465,7 +488,19 @@ impl Validate for PartialManifest { sources: srcs, export_include_dirs: exp_inc_dirs .iter() - .map(|path| env_path_from_string(path.to_string())) + .filter_map(|path| match env_path_from_string(path.to_string()) { + Ok(parsed_path) => Some(Ok(parsed_path)), + Err(cause) => { + if suppress_warnings.contains("E30") { + if !suppress_warnings.contains("W30") { + warnln!("[W30] File not added, ignoring: {}", cause); + } + None + } else { + Some(Err(Error::chain("[E30]", cause))) + } + } + }) .collect::>>()?, plugins, frozen, @@ -528,7 +563,12 @@ impl PrefixPaths for PartialDependency { impl Validate for PartialDependency { type Output = Dependency; type Error = Error; - fn validate(self, package_name: &str, pre_output: bool) -> Result { + fn validate( + self, + package_name: &str, + pre_output: bool, + suppress_warnings: &IndexSet, + ) -> Result { let version = match self.version { Some(v) => Some(semver::VersionReq::parse(&v).map_err(|cause| { Error::chain( @@ -545,11 +585,13 @@ impl Validate for PartialDependency { } if !pre_output { self.extra.iter().for_each(|(k, _)| { - warnln!( - "Ignoring unknown field `{}` in a dependency in manifest for {}.", - k, - package_name - ); + if !suppress_warnings.contains("W03") { + warnln!( + "[W03] Ignoring unknown field `{}` in a dependency in manifest for {}.", + k, + package_name + ); + } }); } if let Some(path) = self.path { @@ -600,6 +642,8 @@ pub struct PartialSources { pub defines: Option>>, /// The source file paths. pub files: Vec, + /// The list of external flists to include. + pub external_flists: Option>, /// Unknown extra fields #[serde(flatten)] extra: HashMap, @@ -614,6 +658,7 @@ impl PartialSources { include_dirs: None, defines: None, files: Vec::new(), + external_flists: None, extra: HashMap::new(), } } @@ -626,6 +671,7 @@ impl PrefixPaths for PartialSources { include_dirs: self.include_dirs.prefix_paths(prefix)?, defines: self.defines, files: self.files.prefix_paths(prefix)?, + external_flists: self.external_flists.prefix_paths(prefix)?, extra: self.extra, }) } @@ -638,6 +684,7 @@ impl From> for PartialSources { include_dirs: None, defines: None, files: v, + external_flists: None, extra: HashMap::new(), } } @@ -646,15 +693,133 @@ impl From> for PartialSources { impl Validate for PartialSources { type Output = Sources; type Error = Error; - fn validate(self, package_name: &str, pre_output: bool) -> Result { + fn validate( + self, + package_name: &str, + pre_output: bool, + suppress_warnings: &IndexSet, + ) -> Result { + let external_flists: Result> = self + .external_flists + .clone() + .unwrap_or_default() + .iter() + .filter_map(|path| match env_path_from_string(path.to_string()) { + Ok(p) => Some(Ok(p)), + Err(cause) => { + if suppress_warnings.contains("E30") { + if !suppress_warnings.contains("W30") { + warnln!("[W30] File not added, ignoring: {}", cause); + } + None + } else { + Some(Err(Error::chain("[E30]", cause))) + } + } + }) + .collect(); + + let external_flist_list: Result)>> = external_flists? + .into_iter() + .map(|filename| { + let file = File::open(&filename).map_err(|cause| { + Error::chain( + format!("Unable to open external flist file {:?}", filename), + cause, + ) + })?; + let reader = BufReader::new(file); + let lines: Vec = reader + .lines() + .map(|line| { + line.map_err(|cause| { + Error::chain( + format!("Error reading external flist file {:?}", filename), + cause, + ) + }) + }) + .collect::>>()?; + Ok((filename.parent().unwrap().to_path_buf(), lines)) + }) + .collect(); + + let external_flist_groups: Result> = external_flist_list? + .into_iter() + .map(|(flist_dir, flist)| { + Ok(PartialSourceFile::Group(Box::new(PartialSources { + target: None, + include_dirs: Some( + flist + .clone() + .into_iter() + .filter_map(|file| { + if file.starts_with("+incdir+") { + Some(file.trim_start_matches("+incdir+").to_string()) + } else { + None + } + }) + .map(|dir| dir.prefix_paths(&flist_dir)) + .collect::>()?, + ), + defines: Some( + flist + .clone() + .into_iter() + .filter_map(|file| { + if let Some(stripped) = file.strip_prefix("+define+") { + if let Some(eq_idx) = stripped.find("=") { + Some(( + stripped[..eq_idx].to_string(), + Some(stripped[eq_idx + 1..].to_string()), + )) + } else { + Some((stripped.to_string(), None)) + } + } else { + None + } + }) + .collect(), + ), + files: flist + .into_iter() + .filter_map(|file| { + if file.starts_with("+") { + None + } else { + // prefix path + Some(PartialSourceFile::File(file)) + } + }) + .map(|file| file.prefix_paths(&flist_dir)) + .collect::>>()?, + external_flists: None, + extra: HashMap::new(), + }))) + }) + .collect(); + let post_env_files: Vec = self .files .into_iter() - .map(|file| match file { - PartialSourceFile::File(file) => { - Ok(PartialSourceFile::File(env_string_from_string(file)?)) - } - other => Ok(other), + .chain(external_flist_groups?.into_iter()) + .filter_map(|file| match file { + PartialSourceFile::File(file) => match env_string_from_string(file) { + Ok(p) => Some(Ok(PartialSourceFile::File(p))), + Err(cause) => { + if suppress_warnings.contains("E30") { + if !suppress_warnings.contains("W30") { + warnln!("[W30] File not added, ignoring: {}", cause); + } + None + } else { + Some(Err(Error::chain("[E30]", cause))) + } + } + }, + other => Some(Ok(other)), }) .collect::>>()?; let post_glob_files: Vec = post_env_files @@ -662,7 +827,7 @@ impl Validate for PartialSources { .map(|pre_glob_file| { if let PartialSourceFile::File(_) = pre_glob_file { // PartialSources .files item is pointing to PartialSourceFiles::file so do glob extension - pre_glob_file.glob_file() + pre_glob_file.glob_file(suppress_warnings) } else { // PartialSources .files item is pointing to PartialSourceFiles::group so pass on for recursion // to do glob extension in the groups.sources.files list of PartialSourceFiles::file @@ -678,27 +843,43 @@ impl Validate for PartialSources { .include_dirs .unwrap_or_default() .iter() - .map(|path| env_path_from_string(path.to_string())) + .filter_map(|path| match env_path_from_string(path.to_string()) { + Ok(p) => Some(Ok(p)), + Err(cause) => { + if suppress_warnings.contains("E30") { + if !suppress_warnings.contains("W30") { + warnln!("[W30] File not added, ignoring: {}", cause); + } + None + } else { + Some(Err(Error::chain("[E30]", cause))) + } + } + }) .collect(); + let defines = self.defines.unwrap_or_default(); let files: Result> = post_glob_files .into_iter() - .map(|f| f.validate(package_name, pre_output)) + .map(|f| f.validate(package_name, pre_output, suppress_warnings)) .collect(); - let files = files?; - if files.is_empty() && !pre_output { + let files: Vec> = files?; + let files: Vec = files.into_iter().flatten().collect(); + if files.is_empty() && !pre_output && !suppress_warnings.contains("W04") { warnln!( - "No source files specified in a sourcegroup in manifest for {}.", + "[W04] No source files specified in a sourcegroup in manifest for {}.", package_name ); } if !pre_output { self.extra.iter().for_each(|(k, _)| { - warnln!( - "Ignoring unknown field `{}` in sources in manifest for {}.", - k, - package_name - ); + if !suppress_warnings.contains("W03") { + warnln!( + "[W03] Ignoring unknown field `{}` in sources in manifest for {}.", + k, + package_name + ); + } }); } Ok(Sources { @@ -784,14 +965,19 @@ impl<'de> Deserialize<'de> for PartialSourceFile { } impl Validate for PartialSourceFile { - type Output = SourceFile; + type Output = Option; type Error = Error; - fn validate(self, package_name: &str, pre_output: bool) -> Result { + fn validate( + self, + package_name: &str, + pre_output: bool, + suppress_warnings: &IndexSet, + ) -> Result> { match self { - PartialSourceFile::File(path) => Ok(SourceFile::File(PathBuf::from(path))), - PartialSourceFile::Group(srcs) => Ok(SourceFile::Group(Box::new( - srcs.validate(package_name, pre_output)?, - ))), + PartialSourceFile::File(path) => Ok(Some(SourceFile::File(PathBuf::from(path)))), + PartialSourceFile::Group(srcs) => Ok(Some(SourceFile::Group(Box::new( + srcs.validate(package_name, pre_output, suppress_warnings)?, + )))), } } } @@ -803,14 +989,14 @@ pub trait GlobFile { /// The error type produced by validation. type Error; /// Validate self and convert to a full list of paths that exist - fn glob_file(self) -> Result; + fn glob_file(self, suppress_warnings: &IndexSet) -> Result; } impl GlobFile for PartialSourceFile { type Output = Vec; type Error = Error; - fn glob_file(self) -> Result> { + fn glob_file(self, suppress_warnings: &IndexSet) -> Result> { // let mut partial_source_files_vec: Vec = Vec::new(); // Only operate on files, not groups @@ -833,8 +1019,8 @@ impl GlobFile for PartialSourceFile { )) }) .collect::>>()?; - if out.is_empty() { - warnln!("No files found for glob pattern {:?}", path); + if out.is_empty() && !suppress_warnings.contains("W05") { + warnln!("[W05] No files found for glob pattern {:?}", path); } Ok(out) } else { @@ -881,7 +1067,12 @@ impl PrefixPaths for PartialWorkspace { impl Validate for PartialWorkspace { type Output = Workspace; type Error = Error; - fn validate(self, package_name: &str, pre_output: bool) -> Result { + fn validate( + self, + package_name: &str, + pre_output: bool, + suppress_warnings: &IndexSet, + ) -> Result { let package_links: Result> = self .package_links .unwrap_or_default() @@ -890,11 +1081,13 @@ impl Validate for PartialWorkspace { .collect(); if !pre_output { self.extra.iter().for_each(|(k, _)| { - warnln!( - "Ignoring unknown field `{}` in workspace configuration in manifest for {}.", - k, - package_name - ); + if !suppress_warnings.contains("W03") { + warnln!( + "[W03] Ignoring unknown field `{}` in workspace configuration in manifest for {}.", + k, + package_name + ); + } }); } Ok(Workspace { @@ -929,10 +1122,8 @@ impl PrefixPaths for PathBuf { impl PrefixPaths for String { fn prefix_paths(self, prefix: &Path) -> Result { - Ok(prefix - .join(env_path_from_string(self)?) - .display() - .to_string()) + // env is resolved later, convert back to string here. + Ok(prefix.join(PathBuf::from(&self)).display().to_string()) } } @@ -1060,7 +1251,12 @@ impl Merge for PartialConfig { impl Validate for PartialConfig { type Output = Config; type Error = Error; - fn validate(self, package_name: &str, pre_output: bool) -> Result { + fn validate( + self, + package_name: &str, + pre_output: bool, + suppress_warnings: &IndexSet, + ) -> Result { Ok(Config { database: match self.database { Some(db) => env_path_from_string(db)?, @@ -1072,7 +1268,7 @@ impl Validate for PartialConfig { }, overrides: match self.overrides { Some(d) => d - .validate(package_name, pre_output) + .validate(package_name, pre_output, suppress_warnings) .map_err(|(key, cause)| { Error::chain(format!("In override `{}`:", key), cause) })?, @@ -1080,7 +1276,7 @@ impl Validate for PartialConfig { }, plugins: match self.plugins { Some(d) => d - .validate(package_name, pre_output) + .validate(package_name, pre_output, suppress_warnings) .map_err(|(key, cause)| Error::chain(format!("In plugin `{}`:", key), cause))?, None => IndexMap::new(), }, @@ -1202,7 +1398,12 @@ impl PrefixPaths for PartialVendorPackage { impl Validate for PartialVendorPackage { type Output = VendorPackage; type Error = Error; - fn validate(self, package_name: &str, pre_output: bool) -> Result { + fn validate( + self, + package_name: &str, + pre_output: bool, + suppress_warnings: &IndexSet, + ) -> Result { Ok(VendorPackage { name: match self.name { Some(name) => name, @@ -1214,7 +1415,7 @@ impl Validate for PartialVendorPackage { }, upstream: match self.upstream { Some(upstream) => upstream - .validate(package_name, pre_output) + .validate(package_name, pre_output, suppress_warnings) .map_err(|cause| { Error::chain("Unable to parse external import upstream", cause) })?, diff --git a/src/resolver.rs b/src/resolver.rs index 4340d5ce..914ebd73 100644 --- a/src/resolver.rs +++ b/src/resolver.rs @@ -103,11 +103,13 @@ impl<'ctx> DependencyResolver<'ctx> { // - the dependency is not in a clean state (i.e., was modified) if !ignore_checkout { if !is_git_repo { - warnln!("Dependency `{}` in checkout_dir `{}` is not a git repository. Setting as path dependency.\n\ - \tPlease use `bender clone` to work on git dependencies.\n\ - \tRun `bender update --ignore-checkout-dir` to overwrite this at your own risk.", - dir.as_ref().unwrap().path().file_name().unwrap().to_str().unwrap(), - &checkout.display()); + if !self.sess.suppress_warnings.contains("W06") { + warnln!("[W06] Dependency `{}` in checkout_dir `{}` is not a git repository. Setting as path dependency.\n\ + \tPlease use `bender clone` to work on git dependencies.\n\ + \tRun `bender update --ignore-checkout-dir` to overwrite this at your own risk.", + dir.as_ref().unwrap().path().file_name().unwrap().to_str().unwrap(), + &checkout.display()); + } self.checked_out .insert(depname, config::Dependency::Path(dir.unwrap().path())); } else if !(SysCommand::new(&self.sess.config.git) // If not in a clean state @@ -118,10 +120,13 @@ impl<'ctx> DependencyResolver<'ctx> { .stdout .is_empty()) { - warnln!("Dependency `{}` in checkout_dir `{}` is not in a clean state. Setting as path dependency.\n\ - \tRun `bender update --ignore-checkout-dir` to overwrite this at your own risk.", - dir.as_ref().unwrap().path().file_name().unwrap().to_str().unwrap(), - &checkout.display()); + if !self.sess.suppress_warnings.contains("W06") { + warnln!("[W06] Dependency `{}` in checkout_dir `{}` is not in a clean state. Setting as path dependency.\n\ + \tPlease use `bender clone` to work on git dependencies.\n\ + \tRun `bender update --ignore-checkout-dir` to overwrite this at your own risk.", + dir.as_ref().unwrap().path().file_name().unwrap().to_str().unwrap(), + &checkout.display()); + } self.checked_out .insert(depname, config::Dependency::Path(dir.unwrap().path())); } @@ -374,10 +379,12 @@ impl<'ctx> DependencyResolver<'ctx> { match &locked_package.revision { Some(r) => r.clone(), None => { - warnln!( - "No revision found in lock file for git dependency `{}`", - name - ); + if !io.sess.suppress_warnings.contains("W21") { + warnln!( + "[W21] No revision found in lock file for git dependency `{}`", + name + ); + } return None; } }, @@ -622,12 +629,14 @@ impl<'ctx> DependencyResolver<'ctx> { if id == con_src { return Err(e); } - warnln!( - "Ignoring error for `{}` at `{}`: {}", - name, - self.sess.dependency_source(*con_src), - e - ); + if !self.sess.suppress_warnings.contains("W20") { + warnln!( + "[W20] Ignoring error for `{}` at `{}`: {}", + name, + self.sess.dependency_source(*con_src), + e + ); + } Ok((*id, IndexSet::new())) } } diff --git a/src/sess.rs b/src/sess.rs index a554ab3b..3dbd9c17 100644 --- a/src/sess.rs +++ b/src/sess.rs @@ -22,7 +22,7 @@ use std::fs::canonicalize; use dunce::canonicalize; use async_recursion::async_recursion; -use futures::future::{self, join_all}; +use futures::future::join_all; use futures::TryFutureExt; use indexmap::{IndexMap, IndexSet}; use semver::Version; @@ -78,6 +78,8 @@ pub struct Session<'ctx> { pub git_throttle: Arc, /// A toggle to disable remote fetches & clones pub local_only: bool, + /// A list of warnings to suppress. + pub suppress_warnings: IndexSet, } impl<'sess, 'ctx: 'sess> Session<'ctx> { @@ -91,6 +93,7 @@ impl<'sess, 'ctx: 'sess> Session<'ctx> { local_only: bool, force_fetch: bool, git_throttle: usize, + suppress_warnings: IndexSet, ) -> Session<'ctx> { Session { root, @@ -116,6 +119,7 @@ impl<'sess, 'ctx: 'sess> Session<'ctx> { cache: Default::default(), git_throttle: Arc::new(Semaphore::new(git_throttle)), local_only, + suppress_warnings, } } @@ -137,15 +141,15 @@ impl<'sess, 'ctx: 'sess> Session<'ctx> { calling_package ); let src = DependencySource::from(cfg); - self.deps - .lock() - .unwrap() - .add(self.intern_dependency_entry(DependencyEntry { + self.deps.lock().unwrap().add( + self.intern_dependency_entry(DependencyEntry { name: name.into(), source: src, revision: None, version: None, - })) + }), + &self.suppress_warnings, + ) } /// Load a lock file. @@ -172,6 +176,7 @@ impl<'sess, 'ctx: 'sess> Session<'ctx> { .as_ref() .map(|s| semver::Version::parse(s).unwrap()), }), + &self.suppress_warnings, ); graph_names.insert(id, &pkg.dependencies); names.insert(name.clone(), id); @@ -539,35 +544,38 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { // Initialize. self.sess.stats.num_database_init.increment(); // TODO MICHAERO: May need throttle - future::lazy(|_| { - stageln!("Cloning", "{} ({})", name2, url2); - Ok(()) - }) - .and_then(|_| git.clone().spawn_with(|c| c.arg("init").arg("--bare"))) - .and_then(|_| { - git.clone() - .spawn_with(|c| c.arg("remote").arg("add").arg("origin").arg(url)) - }) - .and_then(|_| git.clone().fetch("origin")) - .and_then(|_| async { - if let Some(reference) = fetch_ref { - git.clone().fetch_ref("origin", reference).await - } else { - Ok(()) - } - }) - .await - .map_err(move |cause| { - if url3.contains("git@") { - warnln!("Please ensure your public ssh key is added to the git server."); - } - warnln!("Please ensure the url is correct and you have access to the repository."); - Error::chain( - format!("Failed to initialize git database in {:?}.", db_dir), - cause, - ) - }) - .map(move |_| git) + stageln!("Cloning", "{} ({})", name2, url2); + git.clone() + .spawn_with(|c| c.arg("init").arg("--bare")) + .await?; + git.clone() + .spawn_with(|c| c.arg("remote").arg("add").arg("origin").arg(url)) + .await?; + git.clone() + .fetch("origin") + .and_then(|_| async { + if let Some(reference) = fetch_ref { + git.clone().fetch_ref("origin", reference).await + } else { + Ok(()) + } + }) + .await + .map_err(move |cause| { + if url3.contains("git@") && !self.sess.suppress_warnings.contains("W07") { + warnln!("[W07] Please ensure your public ssh key is added to the git server."); + } + if !self.sess.suppress_warnings.contains("W07") { + warnln!( + "[W07] Please ensure the url is correct and you have access to the repository." + ); + } + Error::chain( + format!("Failed to initialize git database in {:?}.", db_dir), + cause, + ) + }) + .map(move |_| git) } else { // Update if the manifest has been modified since the last fetch. let db_mtime = try_modification_time(db_dir.join("FETCH_HEAD")); @@ -577,30 +585,32 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { } self.sess.stats.num_database_fetch.increment(); // TODO MICHAERO: May need throttle - future::lazy(|_| { - stageln!("Fetching", "{} ({})", name2, url2); - Ok(()) - }) - .and_then(|_| git.clone().fetch("origin")) - .and_then(|_| async { - if let Some(reference) = fetch_ref { - git.clone().fetch_ref("origin", reference).await - } else { - Ok(()) - } - }) - .await - .map_err(move |cause| { - if url3.contains("git@") { - warnln!("Please ensure your public ssh key is added to the git server."); - } - warnln!("Please ensure the url is correct and you have access to the repository."); - Error::chain( - format!("Failed to update git database in {:?}.", db_dir), - cause, - ) - }) - .map(move |_| git) + stageln!("Fetching", "{} ({})", name2, url2); + git.clone() + .fetch("origin") + .and_then(|_| async { + if let Some(reference) = fetch_ref { + git.clone().fetch_ref("origin", reference).await + } else { + Ok(()) + } + }) + .await + .map_err(move |cause| { + if url3.contains("git@") && !self.sess.suppress_warnings.contains("W07") { + warnln!("[W07] Please ensure your public ssh key is added to the git server."); + } + if !self.sess.suppress_warnings.contains("W07") { + warnln!( + "[W07] Please ensure the url is correct and you have access to the repository." + ); + } + Error::chain( + format!("Failed to update git database in {:?}.", db_dir), + cause, + ) + }) + .map(move |_| git) } } @@ -873,21 +883,25 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { { CheckoutState::ToCheckout } else { + if !self.sess.suppress_warnings.contains("W19") { + warnln!( + "[W19] Workspace checkout directory set and has uncommitted changes, not updating {} at {}.\n\ + \tRun `bender checkout --force` to overwrite the dependency at your own risk.", + name, + path.display() + ); + } + CheckoutState::Clean + } + } else { + if !self.sess.suppress_warnings.contains("W19") { warnln!( - "Workspace checkout directory set and has uncommitted changes, not updating {} at {}.\n\ + "[W19] Workspace checkout directory set and remote url doesn't match, not updating {} at {}.\n\ \tRun `bender checkout --force` to overwrite the dependency at your own risk.", name, path.display() ); - CheckoutState::Clean } - } else { - warnln!( - "Workspace checkout directory set and remote url doesn't match, not updating {} at {}.\n\ - \tRun `bender checkout --force` to overwrite the dependency at your own risk.", - name, - path.display() - ); CheckoutState::Clean } } else { @@ -942,7 +956,9 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { .spawn_with(move |c| c.arg("fetch").arg("--all")) .await?; git.clone().spawn_with(move |c| c.arg("tag").arg(tag_name_1).arg(revision).arg("--force").arg("--no-sign")).map_err(|cause| { - warnln!("Please ensure the commits are available on the remote or run bender update"); + if !self.sess.suppress_warnings.contains("W08") { + warnln!("[W08] Please ensure the commits are available on the remote or run bender update"); + } Error::chain( format!( "Failed to checkout commit {} for {} given in Bender.lock.\n", @@ -1000,7 +1016,10 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { for dep in (dep_iter_mut).iter_mut() { if let (_, config::Dependency::Path(ref path)) = dep { if !path.starts_with("/") { - warnln!("Path dependencies ({:?}) in git dependencies ({:?}) currently not fully supported. Your mileage may vary.", dep.0, top_package_name); + if !self.sess.suppress_warnings.contains("W09") { + warnln!("[W09] Path dependencies ({:?}) in git dependencies ({:?}) currently not fully supported. \ + Your mileage may vary.", dep.0, top_package_name); + } let sub_entries = db .clone() @@ -1053,8 +1072,9 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { cause, ) })?; - let mut full = - partial.validate_ignore_sources("", true).map_err(|cause| { + let mut full = partial + .validate_ignore_sources("", true, &self.sess.suppress_warnings) + .map_err(|cause| { Error::chain( format!( "Error in manifest of dependency `{}` at revision \ @@ -1115,15 +1135,17 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { use self::DependencyVersion as DepVer; match (&dep.source, version) { (DepSrc::Path(path), DepVer::Path) => { - if !path.starts_with("/") { - warnln!("There may be issues in the path for {:?}.", dep.name); + if !path.starts_with("/") && !self.sess.suppress_warnings.contains("W10") { + warnln!("[W10] There may be issues in the path for {:?}.", dep.name); } let manifest_path = path.join("Bender.yml"); if manifest_path.exists() { - match read_manifest(&manifest_path) { + match read_manifest(&manifest_path, &self.sess.suppress_warnings) { Ok(m) => { - if dep.name != m.package.name { - warnln!("Dependency name and package name do not match for {:?} / {:?}, this can cause unwanted behavior", + if dep.name != m.package.name + && !self.sess.suppress_warnings.contains("W11") + { + warnln!("[W11] Dependency name and package name do not match for {:?} / {:?}, this can cause unwanted behavior", dep.name, m.package.name); // TODO: This should be an error } Ok(Some(self.sess.intern_manifest(m))) @@ -1145,10 +1167,13 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { .join(".bender") .join("tmp") .join(format!("{}_manifest.yml", dep.name)), + &self.sess.suppress_warnings, ) { Ok(m) => { - if dep.name != m.package.name { - warnln!("Dependency name and package name do not match for {:?} / {:?}, this can cause unwanted behavior", + if dep.name != m.package.name + && !self.sess.suppress_warnings.contains("W11") + { + warnln!("[W11] Dependency name and package name do not match for {:?} / {:?}, this can cause unwanted behavior", dep.name, m.package.name); // TODO: This should be an error } Ok(Some(self.sess.intern_manifest(m))) @@ -1156,7 +1181,13 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { Err(e) => Err(e), } } else { - warnln!("Manifest not found for {:?} at {:?}", dep.name, dep.source); + if !self.sess.suppress_warnings.contains("W12") { + warnln!( + "[W12] Manifest not found for {:?} at {:?}", + dep.name, + dep.source + ); + } Ok(None) } } @@ -1185,8 +1216,9 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { cause, ) })?; - let mut full = - partial.validate_ignore_sources("", true).map_err(|cause| { + let mut full = partial + .validate_ignore_sources("", true, &self.sess.suppress_warnings) + .map_err(|cause| { Error::chain( format!( "Error in manifest of dependency `{}` at revision \ @@ -1211,7 +1243,9 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { Ok(Some(self.sess.intern_manifest(full))) } None => { - warnln!("Manifest not found for {:?}", dep.name); + if !self.sess.suppress_warnings.contains("W12") { + warnln!("[W12] Manifest not found for {:?}", dep.name); + } Ok(None) } }; @@ -1227,8 +1261,9 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { Some(x) => &x.package.name, None => "dead", } + && !self.sess.suppress_warnings.contains("W11") { - warnln!("Dependency name and package name do not match for {:?} / {:?}, this can cause unwanted behavior", + warnln!("[W11] Dependency name and package name do not match for {:?} / {:?}, this can cause unwanted behavior", dep.name, match manifest { Some(x) => &x.package.name, None => "dead" @@ -1272,7 +1307,7 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { .and_then(move |path| { let manifest_path = path.join("Bender.yml"); if manifest_path.exists() { - match read_manifest(&manifest_path) { + match read_manifest(&manifest_path, &self.sess.suppress_warnings) { Ok(m) => Ok(Some(self.sess.intern_manifest(m))), Err(e) => Err(e), } @@ -1384,7 +1419,9 @@ impl<'io, 'sess: 'io, 'ctx: 'sess> SessionIo<'sess, 'ctx> { if !m.dependencies.is_empty() { for i in m.dependencies.keys() { if !all_export_include_dirs.contains_key(i) { - warnln!("Name issue with {:?}, `export_include_dirs` not handled\n\tCould relate to name mismatch, see `bender update`", i); + if !self.sess.suppress_warnings.contains("W13") { + warnln!("[W13] Name issue with {:?}, `export_include_dirs` not handled\n\tCould relate to name mismatch, see `bender update`", i); + } export_include_dirs.insert(i.clone(), IndexSet::new()); } else { export_include_dirs.insert( @@ -1612,7 +1649,7 @@ pub struct DependencyEntry { impl DependencyEntry { /// Obtain the dependency version for this entry. - pub fn version(&self) -> DependencyVersion { + pub fn version(&self) -> DependencyVersion<'_> { match self.source { DependencySource::Registry => unimplemented!(), DependencySource::Path(_) => DependencyVersion::Path, @@ -1685,15 +1722,19 @@ impl<'ctx> DependencyTable<'ctx> { /// /// The reference with which the information can later be retrieved is /// returned. - pub fn add(&mut self, entry: &'ctx DependencyEntry) -> DependencyRef { + pub fn add( + &mut self, + entry: &'ctx DependencyEntry, + suppress_warnings: &IndexSet, + ) -> DependencyRef { if let Some(&id) = self.ids.get(&entry) { debugln!("sess: reusing {:?}", id); id } else { if let DependencySource::Path(path) = &entry.source { - if !path.exists() { + if !path.exists() && !suppress_warnings.contains("W22") { warnln!( - "Dependency `{}` has source path `{}` which does not exist", + "[W22] Dependency `{}` has source path `{}` which does not exist", entry.name, path.display() ); diff --git a/src/src.rs b/src/src.rs index e1cffbaa..45f8ad21 100644 --- a/src/src.rs +++ b/src/src.rs @@ -50,12 +50,13 @@ impl<'ctx> Validate for SourceGroup<'ctx> { self, package_name: &str, pre_output: bool, + suppress_warnings: &IndexSet, ) -> crate::error::Result> { Ok(SourceGroup { files: self .files .into_iter() - .map(|f| f.validate(package_name, pre_output)) + .map(|f| f.validate(package_name, pre_output, suppress_warnings)) .collect::, Error>>()?, ..self }) @@ -340,24 +341,37 @@ impl<'ctx> From<&'ctx Path> for SourceFile<'ctx> { impl<'ctx> Validate for SourceFile<'ctx> { type Output = SourceFile<'ctx>; type Error = Error; - fn validate(self, package_name: &str, pre_output: bool) -> Result, Error> { + fn validate( + self, + package_name: &str, + pre_output: bool, + suppress_warnings: &IndexSet, + ) -> Result, Error> { match self { SourceFile::File(path) => { let env_path_buf = crate::config::env_path_from_string(path.to_string_lossy().to_string())?; let exists = env_path_buf.exists() && env_path_buf.is_file(); - if exists { + if exists || suppress_warnings.contains("E31") { + if !(exists || suppress_warnings.contains("W31")) { + warnln!( + "[W31] File {} doesn't exist.", + env_path_buf.to_string_lossy() + ); + } Ok(SourceFile::File(path)) } else { Err(Error::new(format!( - "File {} doesn't exist", + "[E31] File {} doesn't exist", env_path_buf.to_string_lossy() ))) } } - SourceFile::Group(srcs) => Ok(SourceFile::Group(Box::new( - srcs.validate(package_name, pre_output)?, - ))), + SourceFile::Group(srcs) => Ok(SourceFile::Group(Box::new(srcs.validate( + package_name, + pre_output, + suppress_warnings, + )?))), } } }