Make clippy more strict

This commit is contained in:
Nick Groenen 2024-08-02 18:46:17 +02:00
parent b216ac63aa
commit c8cf91c0c8
No known key found for this signature in database
GPG Key ID: 4F0AD019928AE098
9 changed files with 214 additions and 105 deletions

View File

@ -76,3 +76,81 @@ windows-archive = ".zip"
pr-run-mode = "plan" pr-run-mode = "plan"
# Publish jobs to run in CI # Publish jobs to run in CI
publish-jobs = ["./publish-crate"] publish-jobs = ["./publish-crate"]
[lints.rust]
nonstandard_style = { level = "warn", priority = -1 }
rust_2018_idioms = { level = "warn", priority = -1 }
rust_2024_compatibility = { level = "warn", priority = -1 }
noop_method_call = "warn"
redundant-lifetimes = "warn"
unsafe_op_in_unsafe_fn = "warn"
unused_qualifications = "warn"
[lints.clippy]
pedantic = { level = "warn", priority = -1 }
nursery = { level = "warn", priority = -1 }
# Should probably change these back to warn in the future, but it's a
# low-priority issue for me at the moment.
missing_errors_doc = "allow"
missing_panics_doc = "allow"
# These lints from the pedantic group are actually too pedantic for my taste:
match_bool = "allow"
similar_names = "allow"
string-add = "allow"
# Enable select lints from the 'restriction' group (which is not meant to be
# enabled as a whole)
arithmetic_side_effects = "warn"
as_conversions = "warn"
assertions_on_result_states = "warn"
clone_on_ref_ptr = "warn"
dbg_macro = "warn"
default_numeric_fallback = "warn"
else_if_without_else = "warn"
empty_enum_variants_with_brackets = "warn"
error_impl_error = "warn"
exhaustive_enums = "warn"
exhaustive_structs = "warn"
filetype_is_file = "warn"
float_cmp_const = "warn"
fn_to_numeric_cast_any = "warn"
if_then_some_else_none = "warn"
impl_trait_in_params = "warn"
indexing_slicing = "warn"
infinite_loop = "warn"
integer_division = "warn"
large_include_file = "warn"
lossy_float_literal = "warn"
map_err_ignore = "warn"
mem_forget = "warn"
multiple_inherent_impl = "warn"
multiple_unsafe_ops_per_block = "warn"
panic_in_result_fn = "warn"
rc_buffer = "warn"
rc_mutex = "warn"
redundant_type_annotations = "warn"
same_name_method = "warn"
self_named_module_files = "warn"
shadow_unrelated = "warn"
str_to_string = "warn"
string_add = "warn"
string_slice = "warn"
string_to_string = "warn"
suspicious_xor_used_as_pow = "warn"
todo = "warn"
try_err = "warn"
undocumented_unsafe_blocks = "warn"
unneeded_field_pattern = "warn"
unseparated_literal_suffix = "warn"
vec_init_then_push = "warn"
#expect_used = "warn"
#missing_docs_in_private_items = "warn"
#missing_inline_in_public_items = "warn"
#pathbuf_init_then_push = "warn" # Rust 1.81.0+
#renamed_function_params = "warn" # Rust 1.80.0+
#unwrap_in_result = "warn"
#unwrap_used = "warn"
#wildcard_enum_match_arm = "warn"

View File

@ -21,7 +21,7 @@ pub struct Context {
pub destination: PathBuf, pub destination: PathBuf,
/// The [Frontmatter] for this note. Frontmatter may be modified in-place (see /// The [Frontmatter] for this note. Frontmatter may be modified in-place (see
/// [serde_yaml::Mapping] for available methods) or replaced entirely. /// [`serde_yaml::Mapping`] for available methods) or replaced entirely.
/// ///
/// # Example /// # Example
/// ///
@ -46,8 +46,10 @@ pub struct Context {
impl Context { impl Context {
/// Create a new `Context` /// Create a new `Context`
pub fn new(src: PathBuf, dest: PathBuf) -> Context { #[inline]
Context { #[must_use]
pub fn new(src: PathBuf, dest: PathBuf) -> Self {
Self {
file_tree: vec![src], file_tree: vec![src],
destination: dest, destination: dest,
frontmatter: Frontmatter::new(), frontmatter: Frontmatter::new(),
@ -55,13 +57,17 @@ impl Context {
} }
/// Create a new `Context` which inherits from a parent Context. /// Create a new `Context` which inherits from a parent Context.
pub fn from_parent(context: &Context, child: &Path) -> Context { #[inline]
#[must_use]
pub fn from_parent(context: &Self, child: &Path) -> Self {
let mut context = context.clone(); let mut context = context.clone();
context.file_tree.push(child.to_path_buf()); context.file_tree.push(child.to_path_buf());
context context
} }
/// Return the path of the file currently being parsed. /// Return the path of the file currently being parsed.
#[inline]
#[must_use]
pub fn current_file(&self) -> &PathBuf { pub fn current_file(&self) -> &PathBuf {
self.file_tree self.file_tree
.last() .last()
@ -72,6 +78,8 @@ impl Context {
/// ///
/// Typically this will yield the same element as `current_file`, but when a note is embedded /// Typically this will yield the same element as `current_file`, but when a note is embedded
/// within another note, this will return the outer-most note. /// within another note, this will return the outer-most note.
#[inline]
#[must_use]
pub fn root_file(&self) -> &PathBuf { pub fn root_file(&self) -> &PathBuf {
self.file_tree self.file_tree
.first() .first()
@ -79,6 +87,8 @@ impl Context {
} }
/// Return the note depth (nesting level) for this context. /// Return the note depth (nesting level) for this context.
#[inline]
#[must_use]
pub fn note_depth(&self) -> usize { pub fn note_depth(&self) -> usize {
self.file_tree.len() self.file_tree.len()
} }
@ -87,6 +97,8 @@ impl Context {
/// ///
/// The first element corresponds to the root file, the final element corresponds to the file /// The first element corresponds to the root file, the final element corresponds to the file
/// which is currently being processed (see also `current_file`). /// which is currently being processed (see also `current_file`).
#[inline]
#[must_use]
pub fn file_tree(&self) -> Vec<PathBuf> { pub fn file_tree(&self) -> Vec<PathBuf> {
self.file_tree.clone() self.file_tree.clone()
} }

View File

@ -2,7 +2,7 @@ use serde_yaml::Result;
/// YAML front matter from an Obsidian note. /// YAML front matter from an Obsidian note.
/// ///
/// This is essentially an alias of [serde_yaml::Mapping] so all the methods available on that type /// This is essentially an alias of [`serde_yaml::Mapping`] so all the methods available on that type
/// are available with `Frontmatter` as well. /// are available with `Frontmatter` as well.
/// ///
/// # Examples /// # Examples
@ -26,6 +26,8 @@ use serde_yaml::Result;
/// ``` /// ```
pub type Frontmatter = serde_yaml::Mapping; pub type Frontmatter = serde_yaml::Mapping;
// Would be nice to rename this to just from_str, but that would be a breaking change.
#[allow(clippy::module_name_repetitions)]
pub fn frontmatter_from_str(mut s: &str) -> Result<Frontmatter> { pub fn frontmatter_from_str(mut s: &str) -> Result<Frontmatter> {
if s.is_empty() { if s.is_empty() {
s = "{}"; s = "{}";
@ -34,9 +36,11 @@ pub fn frontmatter_from_str(mut s: &str) -> Result<Frontmatter> {
Ok(frontmatter) Ok(frontmatter)
} }
pub fn frontmatter_to_str(frontmatter: Frontmatter) -> Result<String> { // Would be nice to rename this to just to_str, but that would be a breaking change.
#[allow(clippy::module_name_repetitions)]
pub fn frontmatter_to_str(frontmatter: &Frontmatter) -> Result<String> {
if frontmatter.is_empty() { if frontmatter.is_empty() {
return Ok("---\n---\n".to_string()); return Ok("---\n---\n".to_owned());
} }
let mut buffer = String::new(); let mut buffer = String::new();
@ -46,8 +50,11 @@ pub fn frontmatter_to_str(frontmatter: Frontmatter) -> Result<String> {
Ok(buffer) Ok(buffer)
} }
#[derive(Debug, Clone, Copy)]
/// Available strategies for the inclusion of frontmatter in notes. /// Available strategies for the inclusion of frontmatter in notes.
#[derive(Debug, Clone, Copy)]
// Would be nice to rename this to just Strategy, but that would be a breaking change.
#[allow(clippy::module_name_repetitions)]
#[non_exhaustive]
pub enum FrontmatterStrategy { pub enum FrontmatterStrategy {
/// Copy frontmatter when a note has frontmatter defined. /// Copy frontmatter when a note has frontmatter defined.
Auto, Auto,
@ -66,16 +73,16 @@ mod tests {
#[test] #[test]
fn empty_string_should_yield_empty_frontmatter() { fn empty_string_should_yield_empty_frontmatter() {
assert_eq!(frontmatter_from_str("").unwrap(), Frontmatter::new()) assert_eq!(frontmatter_from_str("").unwrap(), Frontmatter::new());
} }
#[test] #[test]
fn empty_frontmatter_to_str() { fn empty_frontmatter_to_str() {
let frontmatter = Frontmatter::new(); let frontmatter = Frontmatter::new();
assert_eq!( assert_eq!(
frontmatter_to_str(frontmatter).unwrap(), frontmatter_to_str(&frontmatter).unwrap(),
format!("---\n---\n") format!("---\n---\n")
) );
} }
#[test] #[test]
@ -86,8 +93,8 @@ mod tests {
Value::String("bar".to_string()), Value::String("bar".to_string()),
); );
assert_eq!( assert_eq!(
frontmatter_to_str(frontmatter).unwrap(), frontmatter_to_str(&frontmatter).unwrap(),
format!("---\nfoo: bar\n---\n") format!("---\nfoo: bar\n---\n")
) );
} }
} }

View File

@ -1,5 +1,5 @@
pub extern crate pulldown_cmark; pub use pulldown_cmark;
pub extern crate serde_yaml; pub use serde_yaml;
#[macro_use] #[macro_use]
extern crate lazy_static; extern crate lazy_static;
@ -20,7 +20,7 @@ use percent_encoding::{utf8_percent_encode, AsciiSet, CONTROLS};
use pulldown_cmark::{CodeBlockKind, CowStr, Event, HeadingLevel, Options, Parser, Tag}; use pulldown_cmark::{CodeBlockKind, CowStr, Event, HeadingLevel, Options, Parser, Tag};
use pulldown_cmark_to_cmark::cmark_with_options; use pulldown_cmark_to_cmark::cmark_with_options;
use rayon::prelude::*; use rayon::prelude::*;
use references::*; use references::{ObsidianNoteReference, RefParser, RefParserState, RefType};
use slug::slugify; use slug::slugify;
use snafu::{ResultExt, Snafu}; use snafu::{ResultExt, Snafu};
use std::ffi::OsString; use std::ffi::OsString;
@ -38,14 +38,14 @@ pub type MarkdownEvents<'a> = Vec<Event<'a>>;
/// A post-processing function that is to be called after an Obsidian note has been fully parsed and /// A post-processing function that is to be called after an Obsidian note has been fully parsed and
/// converted to regular markdown syntax. /// converted to regular markdown syntax.
/// ///
/// Postprocessors are called in the order they've been added through [Exporter::add_postprocessor] /// Postprocessors are called in the order they've been added through [`Exporter::add_postprocessor`]
/// just before notes are written out to their final destination. /// just before notes are written out to their final destination.
/// They may be used to achieve the following: /// They may be used to achieve the following:
/// ///
/// 1. Modify a note's [Context], for example to change the destination filename or update its [Frontmatter] (see [Context::frontmatter]). /// 1. Modify a note's [Context], for example to change the destination filename or update its [Frontmatter] (see [`Context::frontmatter`]).
/// 2. Change a note's contents by altering [MarkdownEvents]. /// 2. Change a note's contents by altering [`MarkdownEvents`].
/// 3. Prevent later postprocessors from running ([PostprocessorResult::StopHere]) or cause a note /// 3. Prevent later postprocessors from running ([`PostprocessorResult::StopHere`]) or cause a note
/// to be skipped entirely ([PostprocessorResult::StopAndSkipNote]). /// to be skipped entirely ([`PostprocessorResult::StopAndSkipNote`]).
/// ///
/// # Postprocessors and embeds /// # Postprocessors and embeds
/// ///
@ -54,17 +54,17 @@ pub type MarkdownEvents<'a> = Vec<Event<'a>>;
/// ///
/// In some cases it may be desirable to change the contents of these embedded notes *before* they /// In some cases it may be desirable to change the contents of these embedded notes *before* they
/// are inserted into the final document. This is possible through the use of /// are inserted into the final document. This is possible through the use of
/// [Exporter::add_embed_postprocessor]. /// [`Exporter::add_embed_postprocessor`].
/// These "embed postprocessors" run much the same way as regular postprocessors, but they're run on /// These "embed postprocessors" run much the same way as regular postprocessors, but they're run on
/// the note that is about to be embedded in another note. In addition: /// the note that is about to be embedded in another note. In addition:
/// ///
/// - Changes to context carry over to later embed postprocessors, but are then discarded. This /// - Changes to context carry over to later embed postprocessors, but are then discarded. This
/// means that changes to frontmatter do not propagate to the root note for example. /// means that changes to frontmatter do not propagate to the root note for example.
/// - [PostprocessorResult::StopAndSkipNote] prevents the embedded note from being included (it's /// - [`PostprocessorResult::StopAndSkipNote`] prevents the embedded note from being included (it's
/// replaced with a blank document) but doesn't affect the root note. /// replaced with a blank document) but doesn't affect the root note.
/// ///
/// It's possible to pass the same functions to [Exporter::add_postprocessor] and /// It's possible to pass the same functions to [`Exporter::add_postprocessor`] and
/// [Exporter::add_embed_postprocessor]. The [Context::note_depth] method may be used to determine /// [`Exporter::add_embed_postprocessor`]. The [`Context::note_depth`] method may be used to determine
/// whether a note is a root note or an embedded note in this situation. /// whether a note is a root note or an embedded note in this situation.
/// ///
/// # Examples /// # Examples
@ -104,7 +104,7 @@ pub type MarkdownEvents<'a> = Vec<Event<'a>>;
/// ///
/// ## Change note contents /// ## Change note contents
/// ///
/// In this example a note's markdown content is changed by iterating over the [MarkdownEvents] and /// In this example a note's markdown content is changed by iterating over the [`MarkdownEvents`] and
/// changing the text when we encounter a [text element][Event::Text]. /// changing the text when we encounter a [text element][Event::Text].
/// ///
/// Instead of using a closure like above, this example shows how to use a separate function /// Instead of using a closure like above, this example shows how to use a separate function
@ -132,9 +132,8 @@ pub type MarkdownEvents<'a> = Vec<Event<'a>>;
/// exporter.add_postprocessor(&foo_to_bar); /// exporter.add_postprocessor(&foo_to_bar);
/// # exporter.run().unwrap(); /// # exporter.run().unwrap();
/// ``` /// ```
pub type Postprocessor<'f> = pub type Postprocessor<'f> =
dyn Fn(&mut Context, &mut MarkdownEvents) -> PostprocessorResult + Send + Sync + 'f; dyn Fn(&mut Context, &mut MarkdownEvents<'_>) -> PostprocessorResult + Send + Sync + 'f;
type Result<T, E = ExportError> = std::result::Result<T, E>; type Result<T, E = ExportError> = std::result::Result<T, E>;
const PERCENTENCODE_CHARS: &AsciiSet = &CONTROLS.add(b' ').add(b'(').add(b')').add(b'%').add(b'?'); const PERCENTENCODE_CHARS: &AsciiSet = &CONTROLS.add(b' ').add(b'(').add(b')').add(b'%').add(b'?');
@ -142,7 +141,7 @@ const NOTE_RECURSION_LIMIT: usize = 10;
#[non_exhaustive] #[non_exhaustive]
#[derive(Debug, Snafu)] #[derive(Debug, Snafu)]
/// ExportError represents all errors which may be returned when using this crate. /// `ExportError` represents all errors which may be returned when using this crate.
pub enum ExportError { pub enum ExportError {
#[snafu(display("failed to read from '{}'", path.display()))] #[snafu(display("failed to read from '{}'", path.display()))]
/// This occurs when a read IO operation fails. /// This occurs when a read IO operation fails.
@ -205,8 +204,9 @@ pub enum ExportError {
}, },
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
/// Emitted by [Postprocessor]s to signal the next action to take. /// Emitted by [Postprocessor]s to signal the next action to take.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[non_exhaustive]
pub enum PostprocessorResult { pub enum PostprocessorResult {
/// Continue with the next post-processor (if any). /// Continue with the next post-processor (if any).
Continue, Continue,
@ -236,7 +236,7 @@ pub struct Exporter<'a> {
} }
impl<'a> fmt::Debug for Exporter<'a> { impl<'a> fmt::Debug for Exporter<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("WalkOptions") f.debug_struct("WalkOptions")
.field("root", &self.root) .field("root", &self.root)
.field("destination", &self.destination) .field("destination", &self.destination)
@ -265,8 +265,9 @@ impl<'a> fmt::Debug for Exporter<'a> {
impl<'a> Exporter<'a> { impl<'a> Exporter<'a> {
/// Create a new exporter which reads notes from `root` and exports these to /// Create a new exporter which reads notes from `root` and exports these to
/// `destination`. /// `destination`.
pub fn new(root: PathBuf, destination: PathBuf) -> Exporter<'a> { #[must_use]
Exporter { pub fn new(root: PathBuf, destination: PathBuf) -> Self {
Self {
start_at: root.clone(), start_at: root.clone(),
root, root,
destination, destination,
@ -283,19 +284,19 @@ impl<'a> Exporter<'a> {
/// ///
/// Normally all notes under `root` (except for notes excluded by ignore rules) will be exported. /// Normally all notes under `root` (except for notes excluded by ignore rules) will be exported.
/// When `start_at` is set, only notes under this path will be exported to the target destination. /// When `start_at` is set, only notes under this path will be exported to the target destination.
pub fn start_at(&mut self, start_at: PathBuf) -> &mut Exporter<'a> { pub fn start_at(&mut self, start_at: PathBuf) -> &mut Self {
self.start_at = start_at; self.start_at = start_at;
self self
} }
/// Set the [`WalkOptions`] to be used for this exporter. /// Set the [`WalkOptions`] to be used for this exporter.
pub fn walk_options(&mut self, options: WalkOptions<'a>) -> &mut Exporter<'a> { pub fn walk_options(&mut self, options: WalkOptions<'a>) -> &mut Self {
self.walk_options = options; self.walk_options = options;
self self
} }
/// Set the [`FrontmatterStrategy`] to be used for this exporter. /// Set the [`FrontmatterStrategy`] to be used for this exporter.
pub fn frontmatter_strategy(&mut self, strategy: FrontmatterStrategy) -> &mut Exporter<'a> { pub fn frontmatter_strategy(&mut self, strategy: FrontmatterStrategy) -> &mut Self {
self.frontmatter_strategy = strategy; self.frontmatter_strategy = strategy;
self self
} }
@ -304,23 +305,23 @@ impl<'a> Exporter<'a> {
/// ///
/// When `recursive` is true (the default), emdeds are always processed recursively. This may /// When `recursive` is true (the default), emdeds are always processed recursively. This may
/// lead to infinite recursion when note A embeds B, but B also embeds A. /// lead to infinite recursion when note A embeds B, but B also embeds A.
/// (When this happens, [ExportError::RecursionLimitExceeded] will be returned by [Exporter::run]). /// (When this happens, [`ExportError::RecursionLimitExceeded`] will be returned by [`Exporter::run`]).
/// ///
/// When `recursive` is false, if a note is encountered for a second time while processing the /// When `recursive` is false, if a note is encountered for a second time while processing the
/// original note, instead of embedding it again a link to the note is inserted instead. /// original note, instead of embedding it again a link to the note is inserted instead.
pub fn process_embeds_recursively(&mut self, recursive: bool) -> &mut Exporter<'a> { pub fn process_embeds_recursively(&mut self, recursive: bool) -> &mut Self {
self.process_embeds_recursively = recursive; self.process_embeds_recursively = recursive;
self self
} }
/// Append a function to the chain of [postprocessors][Postprocessor] to run on exported Obsidian Markdown notes. /// Append a function to the chain of [postprocessors][Postprocessor] to run on exported Obsidian Markdown notes.
pub fn add_postprocessor(&mut self, processor: &'a Postprocessor) -> &mut Exporter<'a> { pub fn add_postprocessor(&mut self, processor: &'a Postprocessor<'_>) -> &mut Self {
self.postprocessors.push(processor); self.postprocessors.push(processor);
self self
} }
/// Append a function to the chain of [postprocessors][Postprocessor] for embeds. /// Append a function to the chain of [postprocessors][Postprocessor] for embeds.
pub fn add_embed_postprocessor(&mut self, processor: &'a Postprocessor) -> &mut Exporter<'a> { pub fn add_embed_postprocessor(&mut self, processor: &'a Postprocessor<'_>) -> &mut Self {
self.embed_postprocessors.push(processor); self.embed_postprocessors.push(processor);
self self
} }
@ -408,27 +409,32 @@ impl<'a> Exporter<'a> {
} }
} }
let dest = context.destination; let mut outfile = create_file(&context.destination)?;
let mut outfile = create_file(&dest)?;
let write_frontmatter = match self.frontmatter_strategy { let write_frontmatter = match self.frontmatter_strategy {
FrontmatterStrategy::Always => true, FrontmatterStrategy::Always => true,
FrontmatterStrategy::Never => false, FrontmatterStrategy::Never => false,
FrontmatterStrategy::Auto => !context.frontmatter.is_empty(), FrontmatterStrategy::Auto => !context.frontmatter.is_empty(),
}; };
if write_frontmatter { if write_frontmatter {
let mut frontmatter_str = frontmatter_to_str(context.frontmatter) let mut frontmatter_str = frontmatter_to_str(&context.frontmatter)
.context(FrontMatterEncodeSnafu { path: src })?; .context(FrontMatterEncodeSnafu { path: src })?;
frontmatter_str.push('\n'); frontmatter_str.push('\n');
outfile outfile
.write_all(frontmatter_str.as_bytes()) .write_all(frontmatter_str.as_bytes())
.context(WriteSnafu { path: &dest })?; .context(WriteSnafu {
path: &context.destination,
})?;
} }
outfile outfile
.write_all(render_mdevents_to_mdtext(markdown_events).as_bytes()) .write_all(render_mdevents_to_mdtext(&markdown_events).as_bytes())
.context(WriteSnafu { path: &dest })?; .context(WriteSnafu {
path: &context.destination,
})?;
Ok(()) Ok(())
} }
#[allow(clippy::too_many_lines)]
#[allow(clippy::panic_in_result_fn)]
fn parse_obsidian_note<'b>( fn parse_obsidian_note<'b>(
&self, &self,
path: &Path, path: &Path,
@ -441,7 +447,7 @@ impl<'a> Exporter<'a> {
} }
let content = fs::read_to_string(path).context(ReadSnafu { path })?; let content = fs::read_to_string(path).context(ReadSnafu { path })?;
let (frontmatter, content) = let (frontmatter, content) =
matter::matter(&content).unwrap_or(("".to_string(), content.to_string())); matter::matter(&content).unwrap_or((String::new(), content.clone()));
let frontmatter = let frontmatter =
frontmatter_from_str(&frontmatter).context(FrontMatterDecodeSnafu { path })?; frontmatter_from_str(&frontmatter).context(FrontMatterDecodeSnafu { path })?;
@ -616,7 +622,7 @@ impl<'a> Exporter<'a> {
} }
events events
} }
Some("png") | Some("jpg") | Some("jpeg") | Some("gif") | Some("webp") | Some("svg") => { Some("png" | "jpg" | "jpeg" | "gif" | "webp" | "svg") => {
self.make_link_to_file(note_ref, &child_context) self.make_link_to_file(note_ref, &child_context)
.into_iter() .into_iter()
.map(|event| match event { .map(|event| match event {
@ -652,10 +658,10 @@ impl<'a> Exporter<'a> {
reference: ObsidianNoteReference<'_>, reference: ObsidianNoteReference<'_>,
context: &Context, context: &Context,
) -> MarkdownEvents<'c> { ) -> MarkdownEvents<'c> {
let target_file = reference let target_file = reference.file.map_or_else(
.file || Some(context.current_file()),
.map(|file| lookup_filename_in_vault(file, self.vault_contents.as_ref().unwrap())) |file| lookup_filename_in_vault(file, self.vault_contents.as_ref().unwrap()),
.unwrap_or_else(|| Some(context.current_file())); );
if target_file.is_none() { if target_file.is_none() {
// TODO: Extract into configurable function. // TODO: Extract into configurable function.
@ -693,7 +699,7 @@ impl<'a> Exporter<'a> {
link.push_str(&slugify(section)); link.push_str(&slugify(section));
} }
let link_tag = pulldown_cmark::Tag::Link( let link_tag = Tag::Link(
pulldown_cmark::LinkType::Inline, pulldown_cmark::LinkType::Inline,
CowStr::from(link), CowStr::from(link),
CowStr::from(""), CowStr::from(""),
@ -707,13 +713,13 @@ impl<'a> Exporter<'a> {
} }
} }
/// Get the full path for the given filename when it's contained in vault_contents, taking into /// Get the full path for the given filename when it's contained in `vault_contents`, taking into
/// account: /// account:
/// ///
/// 1. Standard Obsidian note references not including a .md extension. /// 1. Standard Obsidian note references not including a .md extension.
/// 2. Case-insensitive matching /// 2. Case-insensitive matching
/// 3. Unicode normalization rules using normalization form C /// 3. Unicode normalization rules using normalization form C
/// (https://www.w3.org/TR/charmod-norm/#unicodeNormalization) /// (<https://www.w3.org/TR/charmod-norm/#unicodeNormalization>)
fn lookup_filename_in_vault<'a>( fn lookup_filename_in_vault<'a>(
filename: &str, filename: &str,
vault_contents: &'a [PathBuf], vault_contents: &'a [PathBuf],
@ -737,7 +743,7 @@ fn lookup_filename_in_vault<'a>(
}) })
} }
fn render_mdevents_to_mdtext(markdown: MarkdownEvents) -> String { fn render_mdevents_to_mdtext(markdown: &MarkdownEvents<'_>) -> String {
let mut buffer = String::new(); let mut buffer = String::new();
cmark_with_options( cmark_with_options(
markdown.iter(), markdown.iter(),
@ -754,7 +760,7 @@ fn create_file(dest: &Path) -> Result<File> {
.or_else(|err| { .or_else(|err| {
if err.kind() == ErrorKind::NotFound { if err.kind() == ErrorKind::NotFound {
let parent = dest.parent().expect("file should have a parent directory"); let parent = dest.parent().expect("file should have a parent directory");
std::fs::create_dir_all(parent)? fs::create_dir_all(parent)?;
} }
File::create(dest) File::create(dest)
}) })
@ -763,13 +769,13 @@ fn create_file(dest: &Path) -> Result<File> {
} }
fn copy_file(src: &Path, dest: &Path) -> Result<()> { fn copy_file(src: &Path, dest: &Path) -> Result<()> {
std::fs::copy(src, dest) fs::copy(src, dest)
.or_else(|err| { .or_else(|err| {
if err.kind() == ErrorKind::NotFound { if err.kind() == ErrorKind::NotFound {
let parent = dest.parent().expect("file should have a parent directory"); let parent = dest.parent().expect("file should have a parent directory");
std::fs::create_dir_all(parent)? fs::create_dir_all(parent)?;
} }
std::fs::copy(src, dest) fs::copy(src, dest)
}) })
.context(WriteSnafu { path: dest })?; .context(WriteSnafu { path: dest })?;
Ok(()) Ok(())
@ -791,7 +797,7 @@ fn reduce_to_section<'a>(events: MarkdownEvents<'a>, section: &str) -> MarkdownE
let mut last_level = HeadingLevel::H1; let mut last_level = HeadingLevel::H1;
let mut last_tag_was_heading = false; let mut last_tag_was_heading = false;
for event in events.into_iter() { for event in events {
filtered_events.push(event.clone()); filtered_events.push(event.clone());
match event { match event {
// FIXME: This should propagate fragment_identifier and classes. // FIXME: This should propagate fragment_identifier and classes.
@ -831,7 +837,7 @@ fn reduce_to_section<'a>(events: MarkdownEvents<'a>, section: &str) -> MarkdownE
filtered_events filtered_events
} }
fn event_to_owned<'a>(event: Event) -> Event<'a> { fn event_to_owned<'a>(event: Event<'_>) -> Event<'a> {
match event { match event {
Event::Start(tag) => Event::Start(tag_to_owned(tag)), Event::Start(tag) => Event::Start(tag_to_owned(tag)),
Event::End(tag) => Event::End(tag_to_owned(tag)), Event::End(tag) => Event::End(tag_to_owned(tag)),
@ -848,7 +854,7 @@ fn event_to_owned<'a>(event: Event) -> Event<'a> {
} }
} }
fn tag_to_owned<'a>(tag: Tag) -> Tag<'a> { fn tag_to_owned<'a>(tag: Tag<'_>) -> Tag<'a> {
match tag { match tag {
Tag::Paragraph => Tag::Paragraph, Tag::Paragraph => Tag::Paragraph,
Tag::Heading(level, _fragment_identifier, _classes) => { Tag::Heading(level, _fragment_identifier, _classes) => {
@ -882,7 +888,7 @@ fn tag_to_owned<'a>(tag: Tag) -> Tag<'a> {
} }
} }
fn codeblock_kind_to_owned<'a>(codeblock_kind: CodeBlockKind) -> CodeBlockKind<'a> { fn codeblock_kind_to_owned<'a>(codeblock_kind: CodeBlockKind<'_>) -> CodeBlockKind<'a> {
match codeblock_kind { match codeblock_kind {
CodeBlockKind::Indented => CodeBlockKind::Indented, CodeBlockKind::Indented => CodeBlockKind::Indented,
CodeBlockKind::Fenced(cowstr) => CodeBlockKind::Fenced(CowStr::from(cowstr.into_string())), CodeBlockKind::Fenced(cowstr) => CodeBlockKind::Fenced(CowStr::from(cowstr.into_string())),
@ -896,7 +902,7 @@ mod tests {
use rstest::rstest; use rstest::rstest;
lazy_static! { lazy_static! {
static ref VAULT: Vec<std::path::PathBuf> = vec![ static ref VAULT: Vec<PathBuf> = vec![
PathBuf::from("NoteA.md"), PathBuf::from("NoteA.md"),
PathBuf::from("Document.pdf"), PathBuf::from("Document.pdf"),
PathBuf::from("Note.1.md"), PathBuf::from("Note.1.md"),
@ -956,9 +962,9 @@ mod tests {
#[case("Note\u{41}\u{308}", "Note\u{E4}.md")] #[case("Note\u{41}\u{308}", "Note\u{E4}.md")]
fn test_lookup_filename_in_vault(#[case] input: &str, #[case] expected: &str) { fn test_lookup_filename_in_vault(#[case] input: &str, #[case] expected: &str) {
let result = lookup_filename_in_vault(input, &VAULT); let result = lookup_filename_in_vault(input, &VAULT);
println!("Test input: {:?}", input); println!("Test input: {input:?}");
println!("Expecting: {:?}", expected); println!("Expecting: {expected:?}");
println!("Got: {:?}", result.unwrap_or(&PathBuf::from(""))); println!("Got: {:?}", result.unwrap_or(&PathBuf::from("")));
assert_eq!(result, Some(&PathBuf::from(expected))) assert_eq!(result, Some(&PathBuf::from(expected)));
} }
} }

View File

@ -1,12 +1,13 @@
use eyre::{eyre, Result}; use eyre::{eyre, Result};
use gumdrop::Options; use gumdrop::Options;
use obsidian_export::{postprocessors::*, ExportError}; use obsidian_export::postprocessors::{filter_by_tags, softbreaks_to_hardbreaks};
use obsidian_export::{Exporter, FrontmatterStrategy, WalkOptions}; use obsidian_export::{ExportError, Exporter, FrontmatterStrategy, WalkOptions};
use std::{env, path::PathBuf}; use std::{env, path::PathBuf};
const VERSION: &str = env!("CARGO_PKG_VERSION"); const VERSION: &str = env!("CARGO_PKG_VERSION");
#[derive(Debug, Options)] #[derive(Debug, Options)]
#[allow(clippy::struct_excessive_bools)]
struct Opts { struct Opts {
#[options(help = "Display program help")] #[options(help = "Display program help")]
help: bool, help: bool,
@ -76,7 +77,7 @@ fn main() {
// version flag was specified. Without this, "missing required free argument" would get printed // version flag was specified. Without this, "missing required free argument" would get printed
// when no other args are specified. // when no other args are specified.
if env::args().any(|arg| arg == "-v" || arg == "--version") { if env::args().any(|arg| arg == "-v" || arg == "--version") {
println!("obsidian-export {}", VERSION); println!("obsidian-export {VERSION}");
std::process::exit(0); std::process::exit(0);
} }
@ -107,6 +108,9 @@ fn main() {
exporter.start_at(path); exporter.start_at(path);
} }
#[allow(clippy::pattern_type_mismatch)]
#[allow(clippy::ref_patterns)]
#[allow(clippy::shadow_unrelated)]
if let Err(err) = exporter.run() { if let Err(err) = exporter.run() {
match err { match err {
ExportError::FileExportError { ExportError::FileExportError {
@ -128,7 +132,7 @@ fn main() {
for (idx, path) in file_tree.iter().enumerate() { for (idx, path) in file_tree.iter().enumerate() {
eprintln!(" {}-> {}", " ".repeat(idx), path.display()); eprintln!(" {}-> {}", " ".repeat(idx), path.display());
} }
eprintln!("\nHint: Ensure notes are non-recursive, or specify --no-recursive-embeds to break cycles") eprintln!("\nHint: Ensure notes are non-recursive, or specify --no-recursive-embeds to break cycles");
} }
_ => eprintln!("Error: {:?}", eyre!(err)), _ => eprintln!("Error: {:?}", eyre!(err)),
}, },

View File

@ -8,7 +8,7 @@ use serde_yaml::Value;
/// Obsidian's _'Strict line breaks'_ setting. /// Obsidian's _'Strict line breaks'_ setting.
pub fn softbreaks_to_hardbreaks( pub fn softbreaks_to_hardbreaks(
_context: &mut Context, _context: &mut Context,
events: &mut MarkdownEvents, events: &mut MarkdownEvents<'_>,
) -> PostprocessorResult { ) -> PostprocessorResult {
for event in events.iter_mut() { for event in events.iter_mut() {
if event == &Event::SoftBreak { if event == &Event::SoftBreak {
@ -21,8 +21,8 @@ pub fn softbreaks_to_hardbreaks(
pub fn filter_by_tags( pub fn filter_by_tags(
skip_tags: Vec<String>, skip_tags: Vec<String>,
only_tags: Vec<String>, only_tags: Vec<String>,
) -> impl Fn(&mut Context, &mut MarkdownEvents) -> PostprocessorResult { ) -> impl Fn(&mut Context, &mut MarkdownEvents<'_>) -> PostprocessorResult {
move |context: &mut Context, _events: &mut MarkdownEvents| -> PostprocessorResult { move |context: &mut Context, _events: &mut MarkdownEvents<'_>| -> PostprocessorResult {
match context.frontmatter.get("tags") { match context.frontmatter.get("tags") {
None => filter_by_tags_(&[], &skip_tags, &only_tags), None => filter_by_tags_(&[], &skip_tags, &only_tags),
Some(Value::Sequence(tags)) => filter_by_tags_(tags, &skip_tags, &only_tags), Some(Value::Sequence(tags)) => filter_by_tags_(tags, &skip_tags, &only_tags),

View File

@ -6,8 +6,8 @@ lazy_static! {
Regex::new(r"^(?P<file>[^#|]+)??(#(?P<section>.+?))??(\|(?P<label>.+?))??$").unwrap(); Regex::new(r"^(?P<file>[^#|]+)??(#(?P<section>.+?))??(\|(?P<label>.+?))??$").unwrap();
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
/// ObsidianNoteReference represents the structure of a `[[note]]` or `![[embed]]` reference. /// `ObsidianNoteReference` represents the structure of a `[[note]]` or `![[embed]]` reference.
pub struct ObsidianNoteReference<'a> { pub struct ObsidianNoteReference<'a> {
/// The file (note name or partial path) being referenced. /// The file (note name or partial path) being referenced.
/// This will be None in the case that the reference is to a section within the same document /// This will be None in the case that the reference is to a section within the same document
@ -19,7 +19,7 @@ pub struct ObsidianNoteReference<'a> {
} }
#[derive(PartialEq, Eq)] #[derive(PartialEq, Eq)]
/// RefParserState enumerates all the possible parsing states [RefParser] may enter. /// `RefParserState` enumerates all the possible parsing states [`RefParser`] may enter.
pub enum RefParserState { pub enum RefParserState {
NoState, NoState,
ExpectSecondOpenBracket, ExpectSecondOpenBracket,
@ -29,13 +29,13 @@ pub enum RefParserState {
Resetting, Resetting,
} }
/// RefType indicates whether a note reference is a link (`[[note]]`) or embed (`![[embed]]`). /// `RefType` indicates whether a note reference is a link (`[[note]]`) or embed (`![[embed]]`).
pub enum RefType { pub enum RefType {
Link, Link,
Embed, Embed,
} }
/// RefParser holds state which is used to parse Obsidian WikiLinks (`[[note]]`, `![[embed]]`). /// `RefParser` holds state which is used to parse Obsidian `WikiLinks` (`[[note]]`, `![[embed]]`).
pub struct RefParser { pub struct RefParser {
pub state: RefParserState, pub state: RefParserState,
pub ref_type: Option<RefType>, pub ref_type: Option<RefType>,
@ -49,8 +49,8 @@ pub struct RefParser {
} }
impl RefParser { impl RefParser {
pub fn new() -> RefParser { pub const fn new() -> Self {
RefParser { Self {
state: RefParserState::NoState, state: RefParserState::NoState,
ref_type: None, ref_type: None,
ref_text: String::new(), ref_text: String::new(),
@ -69,7 +69,7 @@ impl RefParser {
} }
impl<'a> ObsidianNoteReference<'a> { impl<'a> ObsidianNoteReference<'a> {
pub fn from_str(text: &str) -> ObsidianNoteReference { pub fn from_str(text: &str) -> ObsidianNoteReference<'_> {
let captures = OBSIDIAN_NOTE_LINK_RE let captures = OBSIDIAN_NOTE_LINK_RE
.captures(text) .captures(text)
.expect("note link regex didn't match - bad input?"); .expect("note link regex didn't match - bad input?");
@ -85,23 +85,23 @@ impl<'a> ObsidianNoteReference<'a> {
} }
pub fn display(&self) -> String { pub fn display(&self) -> String {
format!("{}", self) format!("{self}")
} }
} }
impl<'a> fmt::Display for ObsidianNoteReference<'a> { impl<'a> fmt::Display for ObsidianNoteReference<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let label = let label = self.label.map_or_else(
self.label || match (self.file, self.section) {
.map(|v| v.to_string()) (Some(file), Some(section)) => format!("{file} > {section}"),
.unwrap_or_else(|| match (self.file, self.section) { (Some(file), None) => file.to_owned(),
(Some(file), Some(section)) => format!("{} > {}", file, section), (None, Some(section)) => section.to_owned(),
(Some(file), None) => file.to_string(),
(None, Some(section)) => section.to_string(),
_ => panic!("Reference exists without file or section!"), _ => panic!("Reference exists without file or section!"),
}); },
write!(f, "{}", label) ToString::to_string,
);
write!(f, "{label}")
} }
} }

View File

@ -7,8 +7,9 @@ use std::path::{Path, PathBuf};
type Result<T, E = ExportError> = std::result::Result<T, E>; type Result<T, E = ExportError> = std::result::Result<T, E>;
type FilterFn = dyn Fn(&DirEntry) -> bool + Send + Sync + 'static; type FilterFn = dyn Fn(&DirEntry) -> bool + Send + Sync + 'static;
/// `WalkOptions` specifies how an Obsidian vault directory is scanned for eligible files to export.
#[derive(Clone)] #[derive(Clone)]
/// WalkOptions specifies how an Obsidian vault directory is scanned for eligible files to export. #[allow(clippy::exhaustive_structs)]
pub struct WalkOptions<'a> { pub struct WalkOptions<'a> {
/// The filename for ignore files, following the /// The filename for ignore files, following the
/// [gitignore](https://git-scm.com/docs/gitignore) syntax. /// [gitignore](https://git-scm.com/docs/gitignore) syntax.
@ -32,7 +33,7 @@ pub struct WalkOptions<'a> {
} }
impl<'a> fmt::Debug for WalkOptions<'a> { impl<'a> fmt::Debug for WalkOptions<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let filter_fn_fmt = match self.filter_fn { let filter_fn_fmt = match self.filter_fn {
Some(_) => "<function set>", Some(_) => "<function set>",
None => "<not set>", None => "<not set>",
@ -48,7 +49,8 @@ impl<'a> fmt::Debug for WalkOptions<'a> {
impl<'a> WalkOptions<'a> { impl<'a> WalkOptions<'a> {
/// Create a new set of options using default values. /// Create a new set of options using default values.
pub fn new() -> WalkOptions<'a> { #[must_use]
pub fn new() -> Self {
WalkOptions { WalkOptions {
ignore_filename: ".export-ignore", ignore_filename: ".export-ignore",
ignore_hidden: true, ignore_hidden: true,
@ -83,12 +85,12 @@ impl<'a> Default for WalkOptions<'a> {
} }
/// `vault_contents` returns all of the files in an Obsidian vault located at `path` which would be /// `vault_contents` returns all of the files in an Obsidian vault located at `path` which would be
/// exported when using the given [WalkOptions]. /// exported when using the given [`WalkOptions`].
pub fn vault_contents(path: &Path, opts: WalkOptions) -> Result<Vec<PathBuf>> { pub fn vault_contents(root: &Path, opts: WalkOptions<'_>) -> Result<Vec<PathBuf>> {
let mut contents = Vec::new(); let mut contents = Vec::new();
let walker = opts.build_walker(path); let walker = opts.build_walker(root);
for entry in walker { for entry in walker {
let entry = entry.context(WalkDirSnafu { path })?; let entry = entry.context(WalkDirSnafu { path: root })?;
let path = entry.path(); let path = entry.path();
let metadata = entry.metadata().context(WalkDirSnafu { path })?; let metadata = entry.metadata().context(WalkDirSnafu { path })?;

View File

@ -14,7 +14,7 @@ use walkdir::WalkDir;
fn foo_to_bar(_ctx: &mut Context, events: &mut MarkdownEvents) -> PostprocessorResult { fn foo_to_bar(_ctx: &mut Context, events: &mut MarkdownEvents) -> PostprocessorResult {
for event in events.iter_mut() { for event in events.iter_mut() {
if let Event::Text(text) = event { if let Event::Text(text) = event {
*event = Event::Text(CowStr::from(text.replace("foo", "bar"))) *event = Event::Text(CowStr::from(text.replace("foo", "bar")));
} }
} }
PostprocessorResult::Continue PostprocessorResult::Continue
@ -135,7 +135,7 @@ fn test_postprocessor_stateful_callback() {
let expected = tmp_dir.path(); let expected = tmp_dir.path();
let parents = parents.lock().unwrap(); let parents = parents.lock().unwrap();
println!("{:?}", parents); println!("{parents:?}");
assert_eq!(1, parents.len()); assert_eq!(1, parents.len());
assert!(parents.contains(expected)); assert!(parents.contains(expected));
} }
@ -209,7 +209,7 @@ fn test_embed_postprocessors_context() {
panic!( panic!(
"postprocessor: expected is_root_note in {} to be true, got false", "postprocessor: expected is_root_note in {} to be true, got false",
&ctx.current_file().display() &ctx.current_file().display()
) );
} }
PostprocessorResult::Continue PostprocessorResult::Continue
}); });
@ -225,7 +225,7 @@ fn test_embed_postprocessors_context() {
panic!( panic!(
"embed_postprocessor: expected is_root_note in {} to be false, got true", "embed_postprocessor: expected is_root_note in {} to be false, got true",
&ctx.current_file().display() &ctx.current_file().display()
) );
} }
PostprocessorResult::Continue PostprocessorResult::Continue
}); });