From d430b36d2293526267cd8389c3366298636a85a0 Mon Sep 17 00:00:00 2001 From: Bernhard Schuster Date: Sat, 16 Jan 2021 11:16:01 +0100 Subject: [PATCH] allow dev comments to be enabled from cli, opt in (#138) Closes #132 --- README.md | 8 +- src/action/mod.rs | 25 +++++- src/checker/mod.rs | 3 +- src/config/mod.rs | 16 ++++ src/documentation/cluster.rs | 12 +-- src/documentation/mod.rs | 23 ++--- src/documentation/tests.rs | 9 +- src/main.rs | 162 +++++++++++++++++++++-------------- src/reflow/tests.rs | 20 ++--- src/traverse/iter.rs | 18 +++- src/traverse/mod.rs | 5 +- 11 files changed, 197 insertions(+), 104 deletions(-) diff --git a/README.md b/README.md index d93c52b0..28c4da1d 100644 --- a/README.md +++ b/README.md @@ -85,6 +85,12 @@ return code if mistakes are found instead of `0`. # Project settings where a Cargo.toml exists and is passed # ${CARGO_MANIFEST_DIR}/.config/spellcheck.toml +# Also take into account developer comments +dev_comments = false + +# Skip the README.md file as defined in the cargo manifest +skip_readme = false + # Fallback to per use configuration files: # Linux: /home/alice/.config/cargo_spellcheck/config.toml # Windows: C:\Users\Alice\AppData\Roaming\cargo_spellcheck\config.toml @@ -104,7 +110,7 @@ lang = "en_US" # os specific search dirs, searched in order, defaults last # search_dirs = [] -# Adds additional dictionaries, can be specified as +# Adds additional dictionaries, can be specified as # absolute paths or relative in the search dirs (in this order). # Relative paths are resolved relative to the configuration file # which is used. diff --git a/src/action/mod.rs b/src/action/mod.rs index 5c0d851c..21e8c5e7 100644 --- a/src/action/mod.rs +++ b/src/action/mod.rs @@ -209,15 +209,33 @@ where Ok(()) } -/// Mode in which `cargo-spellcheck` operates -#[derive(Debug, Clone, Copy, Eq, PartialEq)] +/// Mode in which `cargo-spellcheck` operates. +/// +/// Eventually to be used directly in parsing args. +#[derive(Debug, Clone, Copy, Eq, PartialEq, Deserialize)] pub enum Action { /// Only show errors + #[serde(alias = "check")] Check, + /// Interactively choose from checker provided suggestions. + #[serde(alias = "fix")] Fix, /// Reflow doc comments so they adhere to a given maximum column width. + #[serde(alias = "reflow")] Reflow, + + /// Print help and exit. + #[serde(alias = "help")] + Help, + + /// Print the version info and exit. + #[serde(alias = "version")] + Version, + + /// Print the config being in use, default config if none. + #[serde(alias = "config")] + Config, } impl Action { @@ -356,6 +374,9 @@ impl Action { Ok(Finish::MistakeCount(n)) } } + Self::Config | Self::Version | Self::Help => { + unreachable!("Should have been handled way earlier") + } } } } diff --git a/src/checker/mod.rs b/src/checker/mod.rs index a50352ce..5be20e6f 100644 --- a/src/checker/mod.rs +++ b/src/checker/mod.rs @@ -150,7 +150,8 @@ pub mod tests { .filter(None, log::LevelFilter::Trace) .is_test(true) .try_init(); - let d = Documentation::from((ContentOrigin::TestEntityRust, content)); + let dev_comments = false; + let d = Documentation::load_from_str(ContentOrigin::TestEntityRust, content, dev_comments); let suggestion_set = dummy::DummyChecker::check(&d, &()).expect("Dummy extraction must never fail"); diff --git a/src/config/mod.rs b/src/config/mod.rs index a849effd..0eb8f292 100644 --- a/src/config/mod.rs +++ b/src/config/mod.rs @@ -28,6 +28,17 @@ use std::path::{Path, PathBuf}; #[derive(Deserialize, Serialize, Debug, Clone)] #[serde(deny_unknown_fields)] pub struct Config { + // Options that modify the inputs being picked up. + #[serde(default)] + #[serde(alias = "dev-comments")] + #[serde(alias = "devcomments")] + pub dev_comments: bool, + + #[serde(default)] + #[serde(alias = "skip-readme")] + #[serde(alias = "skipreadme")] + pub skip_readme: bool, + #[serde(alias = "Hunspell")] pub hunspell: Option, #[serde(alias = "LanguageTool")] @@ -419,6 +430,8 @@ impl Config { impl Default for Config { fn default() -> Self { Self { + dev_comments: false, + skip_readme: false, hunspell: Some(HunspellConfig { lang: Some("en_US".to_owned()), search_dirs: SearchDirs::default(), @@ -452,6 +465,9 @@ mod tests { fn all() { let _ = Config::parse( r#" +dev_comments = true +skip-readme = true + [LanguageTool] url = "http://127.0.0.1:8010/" diff --git a/src/documentation/cluster.rs b/src/documentation/cluster.rs index 6cabf7fd..d02ce289 100644 --- a/src/documentation/cluster.rs +++ b/src/documentation/cluster.rs @@ -4,7 +4,7 @@ use super::{trace, LiteralSet, Spacing, TokenTree, TrimmedLiteral, TryInto}; use crate::documentation::developer::extract_developer_comments; use crate::documentation::Range; use crate::Span; -use anyhow::{anyhow, Error, Result}; +use anyhow::{anyhow, Result}; use std::convert::TryFrom; /// Cluster literals for one file @@ -109,18 +109,18 @@ impl Clusters { fn ensure_sorted(&mut self) { self.set.sort_by(|ls1, ls2| ls1.coverage.cmp(&ls2.coverage)); } -} -impl TryFrom<&str> for Clusters { - type Error = Error; - fn try_from(source: &str) -> Result { + /// Load clusters from a `&str`. Optionally loads developer comments as well. + pub(crate) fn load_from_str(source: &str, dev_comments: bool) -> Result { let mut chunk = Self { set: Vec::with_capacity(64), }; let stream = syn::parse_str::(source) .map_err(|e| anyhow!("Failed to parse content to stream").context(e))?; chunk.parse_token_tree(source, stream)?; - chunk.parse_developer_comments(source); + if dev_comments { + chunk.parse_developer_comments(source); + } chunk.ensure_sorted(); Ok(chunk) } diff --git a/src/documentation/mod.rs b/src/documentation/mod.rs index d319bc90..ab4bec26 100644 --- a/src/documentation/mod.rs +++ b/src/documentation/mod.rs @@ -21,7 +21,7 @@ use log::trace; pub use proc_macro2::LineColumn; use proc_macro2::{Spacing, TokenTree}; use rayon::prelude::*; -use std::convert::{TryFrom, TryInto}; +use std::convert::TryInto; use std::path::PathBuf; /// Range based on `usize`, simplification. @@ -117,8 +117,13 @@ impl Documentation { } /// Adds a rust content str to the documentation. - pub fn add_rust(&mut self, origin: ContentOrigin, content: &str) -> Result<()> { - let cluster = Clusters::try_from(content)?; + pub fn add_rust( + &mut self, + origin: ContentOrigin, + content: &str, + dev_comments: bool, + ) -> Result<()> { + let cluster = Clusters::load_from_str(content, dev_comments)?; let chunks = Vec::::from(cluster); self.add_inner(origin, chunks); @@ -166,26 +171,24 @@ impl Documentation { pub fn entry_count(&self) -> usize { self.index.len() } -} -/// only a shortcut to avoid duplicate code -impl From<(ContentOrigin, &str)> for Documentation { - fn from((origin, content): (ContentOrigin, &str)) -> Self { + /// Load a document from a single string with a defined origin. + pub fn load_from_str(origin: ContentOrigin, content: &str, dev_comments: bool) -> Self { let mut docs = Documentation::new(); match &origin { ContentOrigin::RustDocTest(_path, span) => { if let Ok(excerpt) = load_span_from(&mut content.as_bytes(), span.clone()) { - docs.add_rust(origin.clone(), excerpt.as_str()) + docs.add_rust(origin.clone(), excerpt.as_str(), dev_comments) } else { // TODO Ok(()) } } - ContentOrigin::RustSourceFile(_path) => docs.add_rust(origin, content), + ContentOrigin::RustSourceFile(_path) => docs.add_rust(origin, content, dev_comments), ContentOrigin::CommonMarkFile(_path) => docs.add_commonmark(origin, content), #[cfg(test)] - ContentOrigin::TestEntityRust => docs.add_rust(origin, content), + ContentOrigin::TestEntityRust => docs.add_rust(origin, content, dev_comments), #[cfg(test)] ContentOrigin::TestEntityCommonMark => docs.add_commonmark(origin, content), } diff --git a/src/documentation/tests.rs b/src/documentation/tests.rs index b6ef9222..2f9cae4f 100644 --- a/src/documentation/tests.rs +++ b/src/documentation/tests.rs @@ -21,7 +21,7 @@ fn parse_and_construct() { const TEST_PLAIN: &str = r#"A very good test."#; let origin = ContentOrigin::TestEntityRust; - let docs = Documentation::from((origin.clone(), TEST_SOURCE)); + let docs = Documentation::load_from_str(origin.clone(), TEST_SOURCE, false); assert_eq!(docs.index.len(), 1); let chunks = docs.index.get(&origin).expect("Must contain dummy path"); assert_eq!(dbg!(chunks).len(), 1); @@ -80,7 +80,7 @@ macro_rules! end2end { .try_init(); let origin: ContentOrigin = $origin; - let docs = Documentation::from((origin.clone(), $test)); + let docs = Documentation::load_from_str(origin.clone(), $test, false); assert_eq!(docs.index.len(), 1); let chunks = docs.index.get(&origin).expect("Must contain dummy path"); assert_eq!(dbg!(chunks).len(), 1); @@ -175,8 +175,7 @@ mod e2e { let origin: ContentOrigin = $origin; - let docs = Documentation::from((origin.clone(), $source)); - + let docs = Documentation::load_from_str(origin.clone(), $source, false); let suggestion_set = dbg!(DummyChecker::check(&docs, &())).expect("Dummy checker never fails. qed"); @@ -1111,6 +1110,8 @@ ff"#, } pub(crate) fn annotated_literals(source: &str) -> Vec { + use std::convert::TryFrom; + let stream = syn::parse_str::(source).expect("Must be valid rust"); stream .into_iter() diff --git a/src/main.rs b/src/main.rs index 900651b1..66c52e1b 100644 --- a/src/main.rs +++ b/src/main.rs @@ -43,11 +43,10 @@ const USAGE: &str = r#" Spellcheck all your doc comments Usage: - cargo-spellcheck [(-v...|-q)] check [--cfg=] [--code=] [--skip-readme] [--checkers=] [[--recursive] ... ] - cargo-spellcheck [(-v...|-q)] fix [--cfg=] [--code=] [--skip-readme] [--checkers=] [[--recursive] ... ] - cargo-spellcheck [(-v...|-q)] reflow [--cfg=] [--code=] [--skip-readme] [[--recursive] ... ] + cargo-spellcheck [(-v...|-q)] [check] [--fix] [--cfg=] [--code=] [--dev-comments] [--skip-readme] [--checkers=] [[--recursive] ... ] + cargo-spellcheck [(-v...|-q)] fix [--cfg=] [--code=] [--dev-comments] [--skip-readme] [--checkers=] [[--recursive] ... ] + cargo-spellcheck [(-v...|-q)] reflow [--cfg=] [--code=] [--dev-comments] [--skip-readme] [[--recursive] ... ] cargo-spellcheck [(-v...|-q)] config (--user|--stdout|--cfg=) [--force] - cargo-spellcheck [(-v...|-q)] [--cfg=] [--fix] [--code=] [--skip-readme] [--checkers=] [[--recursive] ... ] cargo-spellcheck --help cargo-spellcheck --version @@ -70,6 +69,19 @@ Options: --skip-readme Do not attempt to process README.md files listed in Cargo.toml manifests. "#; +#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, Deserialize)] +enum CheckerType { + #[serde(alias = "hunspell")] + Hunspell, + #[serde(alias = "languageTool")] + #[serde(alias = "Languagetool")] + #[serde(alias = "languagetool")] + LanguageTool, + #[serde(alias = "ReFlow")] + #[serde(alias = "reflow")] + Reflow, +} + /// A simple exit code representation. /// /// `Custom` can be specified by the user, others map to thei unix equivalents @@ -105,11 +117,13 @@ struct Args { flag_quiet: bool, flag_version: bool, flag_help: bool, - flag_checkers: Option, + flag_checkers: Option>, flag_cfg: Option, flag_force: bool, flag_user: bool, - flag_skip_readme: bool, + // with fallback from config, so it has to be tri-state + flag_skip_readme: Option, + flag_dev_comments: Option, flag_code: u8, flag_stdout: bool, cmd_fix: bool, @@ -118,6 +132,27 @@ struct Args { cmd_config: bool, } +impl Args { + fn action(&self) -> Action { + // extract operation mode + let action = if self.cmd_fix || self.flag_fix { + Action::Fix + } else if self.cmd_reflow { + Action::Reflow + } else if self.cmd_config { + Action::Config + } else if self.flag_help { + Action::Help + } else if self.cmd_check { + Action::Check + } else { + Action::Check + }; + log::trace!("Derived action {:?} from flags/args/cmds", action); + action + } +} + /// Handle incoming signals. /// /// Only relevant for *-nix platforms. @@ -199,73 +234,67 @@ fn run() -> anyhow::Result { .filter_level(verbosity) .init(); - if args.flag_version { - println!("cargo-spellcheck {}", env!("CARGO_PKG_VERSION")); - return Ok(ExitCode::Success); - } - - if args.flag_help { - println!("{}", USAGE); - return Ok(ExitCode::Success); - } - - #[cfg(not(target_os = "windows"))] - let _signalthread = std::thread::spawn(move || signal_handler()); - let checkers = |config: &mut Config| { // overwrite checkers - if let Some(checkers) = args.flag_checkers.clone() { - let checkers = checkers - .split(',') - .map(|checker| checker.to_lowercase()) - .collect::>(); - if !checkers.contains(&"hunspell".to_owned()) { + if let Some(ref checkers) = args.flag_checkers { + if !checkers.contains(&CheckerType::Hunspell) { if !config.hunspell.take().is_some() { warn!("Hunspell was never configured.") } } - if !checkers.contains(&"languagetool".to_owned()) { + if !checkers.contains(&CheckerType::LanguageTool) { if !config.languagetool.take().is_some() { warn!("Languagetool was never configured.") } } - if !checkers.contains(&"reflow".to_owned()) { + if !checkers.contains(&CheckerType::Reflow) { warn!("Reflow is a separate sub command.") } } }; - // handle `config` sub command - if args.cmd_config { - trace!("Configuration chore"); - let mut config = Config::full(); - checkers(&mut config); - - let config_path = match args.flag_cfg.as_ref() { - Some(path) => Some(path.to_owned()), - None if args.flag_user => Some(Config::default_path()?), - None => None, - }; - - if args.flag_stdout { - println!("{}", config.to_toml()?); + let action = match args.action() { + Action::Version => { + println!("cargo-spellcheck {}", env!("CARGO_PKG_VERSION")); return Ok(ExitCode::Success); } + Action::Help => { + println!("{}", USAGE); + return Ok(ExitCode::Success); + } + Action::Config => { + trace!("Configuration chore"); + let mut config = Config::full(); + checkers(&mut config); + + let config_path = match args.flag_cfg.as_ref() { + Some(path) => Some(path.to_owned()), + None if args.flag_user => Some(Config::default_path()?), + None => None, + }; - if let Some(path) = config_path { - if path.is_file() && !args.flag_force { - return Err(anyhow::anyhow!( - "Attempting to overwrite {} requires `--force`.", - path.display() - )); + if args.flag_stdout { + println!("{}", config.to_toml()?); + return Ok(ExitCode::Success); } - info!("Writing configuration file to {}", path.display()); - config.write_values_to_path(path)?; + + if let Some(path) = config_path { + if path.is_file() && !args.flag_force { + return Err(anyhow::anyhow!( + "Attempting to overwrite {} requires `--force`.", + path.display() + )); + } + info!("Writing configuration file to {}", path.display()); + config.write_values_to_path(path)?; + } + return Ok(ExitCode::Success); } - return Ok(ExitCode::Success); - } else { - trace!("Not configuration sub command"); - } + action => action, + }; + + #[cfg(not(target_os = "windows"))] + let _signalthread = std::thread::spawn(move || signal_handler()); let (explicit_cfg, config_path) = match args.flag_cfg.as_ref() { Some(config_path) => { @@ -322,7 +351,8 @@ fn run() -> anyhow::Result { // otherwise it's a file and we do not care about it } - let config_path = config_path.with_file_name(""); //.expect("Found file ends in Cargo.toml and is abs. qed"); + // remove the file name + let config_path = config_path.with_file_name(""); let resolved_config_path = config::Config::project_config(&config_path) .or_else(|e| { @@ -361,29 +391,26 @@ fn run() -> anyhow::Result { checkers(&mut config); - // extract operation mode - let action = if args.cmd_fix || args.flag_fix { - Action::Fix - } else if args.cmd_reflow { - Action::Reflow - } else { - Action::Check - }; - debug!("Executing: {:?} with {:?}", action, &config); + let dev_comments = args.flag_dev_comments.unwrap_or(config.dev_comments); + let skip_readme = args.flag_skip_readme.unwrap_or(config.skip_readme); + let combined = traverse::extract( args.arg_paths, args.flag_recursive, - args.flag_skip_readme, + skip_readme, + dev_comments, &config, )?; + // TODO move this into action `fn run()` let suggestion_set = match action { Action::Reflow => { reflow::Reflow::check(&combined, &config.reflow.clone().unwrap_or_default())? } Action::Check | Action::Fix => checker::check(&combined, &config)?, + _ => unreachable!("Should never be reached, handled earlier"), }; let finish = action.run(suggestion_set, &config)?; @@ -430,7 +457,12 @@ mod tests { "cargo-spellcheck reflow", ]; for command in commands { - assert!(parse_args(commandline_to_iter(command)).is_ok()); + assert!(parse_args(commandline_to_iter(command)) + .map_err(|e| { + println!("Processing > {:?}", command); + e + }) + .is_ok()); } } } diff --git a/src/reflow/tests.rs b/src/reflow/tests.rs index ffc9dab1..7f44e1d9 100644 --- a/src/reflow/tests.rs +++ b/src/reflow/tests.rs @@ -12,7 +12,7 @@ macro_rules! verify_reflow_inner { .try_init(); const CONTENT: &'static str = fluff_up!($( $line ),+); - let docs = Documentation::from((ContentOrigin::TestEntityRust, CONTENT)); + let docs = Documentation::load_from_str(ContentOrigin::TestEntityRust, CONTENT, false); assert_eq!(docs.entry_count(), 1); let chunks = docs.get(&ContentOrigin::TestEntityRust).expect("Must contain dummy path"); assert_eq!(dbg!(chunks).len(), 1); @@ -86,7 +86,7 @@ macro_rules! reflow_content { .is_test(true) .try_init(); - let docs = Documentation::from(($content_type, $content)); + let docs = Documentation::load_from_str($content_type, $content, false); assert_eq!(docs.entry_count(), 1); let chunks = docs.get(&$content_type).expect("Contains test data. qed"); assert_eq!(dbg!(chunks).len(), 1); @@ -129,7 +129,7 @@ macro_rules! reflow_content { .is_test(true) .try_init(); - let docs = Documentation::from(($content_type, $content)); + let docs = Documentation::load_from_str($content_type, $content, false); assert_eq!(docs.entry_count(), 1); let chunks = docs.get(&$content_type).expect("Contains test data. qed"); assert_eq!(dbg!(chunks).len(), 1); @@ -154,7 +154,7 @@ macro_rules! reflow_content { .is_test(true) .try_init(); - let docs = Documentation::from(($content_type, $content)); + let docs = Documentation::load_from_str($content_type, $content, false); assert_eq!(docs.entry_count(), 1); let chunks = docs.get(&$content_type).expect("Contains test data. qed"); assert_eq!(dbg!(chunks).len(), 1); @@ -286,7 +286,7 @@ fn reflow_indentations() { max_line_length: 10, }; - let docs = Documentation::from((ContentOrigin::TestEntityRust, CONTENT)); + let docs = Documentation::load_from_str(ContentOrigin::TestEntityRust, CONTENT, false); assert_eq!(docs.entry_count(), 1); let chunks = docs .get(&ContentOrigin::TestEntityRust) @@ -324,7 +324,7 @@ fn reflow_doc_indentations() { #[doc = r#"that spans over two lines and"#] #[doc = r#"should be rewrapped."##; - let docs = Documentation::from((ContentOrigin::TestEntityRust, CONTENT)); + let docs = Documentation::load_from_str(ContentOrigin::TestEntityRust, CONTENT, false); assert_eq!(dbg!(&docs).entry_count(), 1); let chunks = docs .get(&ContentOrigin::TestEntityRust) @@ -397,7 +397,7 @@ fn reflow_markdown_two_paragraphs() { .is_test(true) .try_init(); - let docs = Documentation::from((ContentOrigin::TestEntityRust, CONTENT)); + let docs = Documentation::load_from_str(ContentOrigin::TestEntityRust, CONTENT, false); assert_eq!(docs.entry_count(), 1); let chunks = docs .get(&ContentOrigin::TestEntityRust) @@ -439,7 +439,7 @@ With a second part that is fine"# r#"With a second part that is fine"#, ]; - let docs = Documentation::from((ContentOrigin::TestEntityRust, chyrped)); + let docs = Documentation::load_from_str(ContentOrigin::TestEntityRust, chyrped, false); assert_eq!(docs.entry_count(), 1); let chunks = docs .get(&ContentOrigin::TestEntityRust) @@ -538,7 +538,7 @@ multiline. Fullstop."#, .is_test(true) .try_init(); - let docs = Documentation::from((ContentOrigin::TestEntityCommonMark, CONTENT)); + let docs = Documentation::load_from_str(ContentOrigin::TestEntityCommonMark, CONTENT, false); assert_eq!(docs.entry_count(), 1); let chunks = docs .get(&ContentOrigin::TestEntityCommonMark) @@ -607,7 +607,7 @@ struct Fff; end: LineColumn { line: 2, column: 8 }, }; - let docs = Documentation::from((ContentOrigin::TestEntityRust, CONTENT)); + let docs = Documentation::load_from_str(ContentOrigin::TestEntityRust, CONTENT, false); assert_eq!(docs.entry_count(), 1); let chunks = docs .get(&ContentOrigin::TestEntityRust) diff --git a/src/traverse/iter.rs b/src/traverse/iter.rs index 9ad1622d..fe935614 100644 --- a/src/traverse/iter.rs +++ b/src/traverse/iter.rs @@ -128,8 +128,13 @@ impl Iterator for TraverseModulesIter { /// traverse path with a depth limit, if the path is a directory all its children will be collected /// instead -pub(crate) fn traverse(path: &Path) -> Result> { - traverse_with_depth_limit(path, usize::MAX) +// TODO should not read the documentation, that is out of scope. +// TODO should not have knowledge of `dev_comments`. +pub(crate) fn traverse( + path: &Path, + dev_comments: bool, +) -> Result> { + traverse_with_depth_limit(path, usize::MAX, dev_comments) } /// traverse path with a depth limit, if the path is a directory all its children will be collected @@ -137,11 +142,16 @@ pub(crate) fn traverse(path: &Path) -> Result Result> { let it = TraverseModulesIter::with_depth_limit(path, max_depth)? - .filter_map(|path: PathBuf| -> Option { + .filter_map(move |path: PathBuf| -> Option { fs::read_to_string(&path).ok().map(|content| { - Documentation::from((ContentOrigin::RustSourceFile(path), content.as_str())) + Documentation::load_from_str( + ContentOrigin::RustSourceFile(path), + content.as_str(), + dev_comments, + ) }) }) .filter(|documentation| !documentation.is_empty()); diff --git a/src/traverse/mod.rs b/src/traverse/mod.rs index fab39090..46946ae0 100644 --- a/src/traverse/mod.rs +++ b/src/traverse/mod.rs @@ -298,6 +298,7 @@ pub(crate) fn extract( mut paths: Vec, mut recurse: bool, skip_readme: bool, + dev_comments: bool, _config: &Config, ) -> Result { let cwd = cwd()?; @@ -397,7 +398,7 @@ pub(crate) fn extract( match item { CheckEntity::Source(path, recurse) => { if recurse { - let iter = traverse(path.as_path())?; + let iter = traverse(path.as_path(), dev_comments)?; docs.extend(iter); } else { let content: String = fs::read_to_string(&path).map_err(|e| { @@ -406,6 +407,7 @@ pub(crate) fn extract( docs.add_rust( ContentOrigin::RustSourceFile(path.to_owned()), content.as_str(), + dev_comments, ) .unwrap_or_else(|_e| { log::error!("BUG: Failed to create cluster for {}", path.display()) @@ -540,6 +542,7 @@ mod tests { ], $recurse, false, + true, &Config::default(), ) .expect("Must be able to extract demo dir");