docify_macros/
lib.rs

1//! This crate contains the proc macros used by [docify](https://crates.io/crates/docify).
2
3use common_path::common_path;
4use derive_syn_parse::Parse;
5use once_cell::sync::Lazy;
6use proc_macro::TokenStream;
7use proc_macro2::{Span, TokenStream as TokenStream2};
8use quote::{quote, ToTokens};
9use regex::Regex;
10use std::{
11    cmp::min,
12    collections::HashMap,
13    fs::{self, OpenOptions},
14    io::Write,
15    path::{Path, PathBuf},
16    str::FromStr,
17};
18use syn::{
19    parse2,
20    spanned::Spanned,
21    token::Paren,
22    visit::{self, Visit},
23    AttrStyle, Attribute, Error, File, Ident, ImplItem, Item, LitStr, Meta, Result, Token,
24    TraitItem,
25};
26use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
27use toml::{Table, Value};
28use walkdir::WalkDir;
29
30fn line_start_position<S: AsRef<str>>(source: S, pos: usize) -> usize {
31    let source = source.as_ref();
32    if source.len() <= pos {
33        panic!(
34            "The specified position ({}) is longer than the source string length ({}).",
35            pos,
36            source.len()
37        );
38    }
39    let mut cursor = 0;
40    for line in source.lines() {
41        cursor += line.len();
42        if cursor > pos {
43            return cursor - line.len();
44        }
45        cursor += 1; // '\n'
46    }
47    unreachable!()
48}
49
50fn fix_leading_indentation<S: AsRef<str>>(source: S) -> String {
51    let source = source.as_ref();
52    let mut shared_indent: Option<usize> = None;
53
54    for line in source.lines() {
55        if line.trim().is_empty() {
56            continue; // Skip whitespace-only or empty lines
57        }
58        let prefix = &line[..(line.len() - line.trim_start().len())];
59        if let Some(shared) = shared_indent {
60            shared_indent = Some(std::cmp::min(prefix.len(), shared));
61        } else {
62            shared_indent = Some(prefix.len());
63        }
64    }
65
66    let shared_indent = shared_indent.unwrap_or(0);
67    let mut output_lines = source
68        .lines()
69        .map(|line| {
70            if line.len() >= shared_indent {
71                line[shared_indent..].to_string()
72            } else {
73                line.to_string()
74            }
75        })
76        .collect::<Vec<String>>();
77
78    // Add trailing newline if the source had it
79    if source.ends_with('\n') {
80        output_lines.push("".to_string());
81    }
82
83    output_lines.join("\n")
84}
85
86fn fix_indentation<S: AsRef<str>>(source: S) -> String {
87    let source = source.as_ref();
88    // let source = fix_first_line_indentation(source);
89    let source = fix_leading_indentation(source);
90    source
91}
92
93fn caller_crate_root() -> Option<PathBuf> {
94    let crate_name =
95        std::env::var("CARGO_PKG_NAME").expect("failed to read ENV var `CARGO_PKG_NAME`!");
96    let current_dir = PathBuf::from(
97        std::env::var("CARGO_MANIFEST_DIR").expect("failed to read ENV var `CARGO_MANIFEST_DIR`!"),
98    );
99    for entry in WalkDir::new(&current_dir)
100        .into_iter()
101        .filter_entry(|e| !e.file_name().eq_ignore_ascii_case("target"))
102    {
103        let Ok(entry) = entry else { continue };
104        if !entry.file_type().is_file() {
105            continue;
106        }
107        let Some(file_name) = entry.path().file_name() else {
108            continue;
109        };
110        if !file_name.eq_ignore_ascii_case("Cargo.toml") {
111            continue;
112        }
113        let Ok(cargo_toml) = std::fs::read_to_string(&entry.path()) else {
114            continue;
115        };
116        let Ok(table) = Table::from_str(cargo_toml.as_str()) else {
117            continue;
118        };
119        let Some(package) = table.get("package") else {
120            continue;
121        };
122        let Some(Value::String(package_name)) = package.get("name") else {
123            continue;
124        };
125        if package_name.eq_ignore_ascii_case(&crate_name) {
126            return Some(entry.path().parent().unwrap().to_path_buf());
127        }
128    }
129    None
130}
131
132/// Prettifies a long path so that leading segments other than the crate root are ignored
133///
134/// NOTE: unwraps [`caller_crate_root`], as you would only use this if that has already
135/// evaluated to a non-`None` value.
136fn prettify_path<P: AsRef<Path>>(path: P) -> PathBuf {
137    let path = path.as_ref();
138    if path.is_relative() {
139        return path.into();
140    }
141    let Some(prefix) = common_path(caller_crate_root().unwrap(), path) else {
142        return path.into();
143    };
144    path.components()
145        .skip(prefix.components().collect::<Vec<_>>().len())
146        .collect::<PathBuf>()
147}
148
149const DOCIFYING: &'static str = "   Docifying ";
150
151/// Tries to write the specified string to the terminal in green+bold. Falls back to normal
152/// `print!()`. Function is infallible.
153fn write_green<S: AsRef<str>>(st: S) {
154    let mut stdout = StandardStream::stdout(ColorChoice::Always);
155    let _ = stdout.set_color(ColorSpec::new().set_fg(Some(Color::Green)).set_bold(true));
156    if let Err(_) = write!(&mut stdout, "{}", st.as_ref()) {
157        print!("{}", st.as_ref());
158    }
159    let _ = stdout.set_color(ColorSpec::new().set_fg(None).set_bold(false));
160}
161
162/// An item that may or may not have an inherent "name" ident.
163trait NamedItem {
164    /// Gets a copy of the inherent name ident of this item, if applicable.
165    fn name_ident(&self) -> Option<Ident>;
166}
167
168impl NamedItem for Item {
169    fn name_ident(&self) -> Option<Ident> {
170        match self {
171            Item::Const(item_const) => Some(item_const.ident.clone()),
172            Item::Enum(item_enum) => Some(item_enum.ident.clone()),
173            Item::ExternCrate(item_extern_crate) => Some(item_extern_crate.ident.clone()),
174            Item::Fn(item_fn) => Some(item_fn.sig.ident.clone()),
175            Item::Macro(item_macro) => item_macro.ident.clone(), // note this one might not have an Ident as well
176            Item::Mod(item_mod) => Some(item_mod.ident.clone()),
177            Item::Static(item_static) => Some(item_static.ident.clone()),
178            Item::Struct(item_struct) => Some(item_struct.ident.clone()),
179            Item::Trait(item_trait) => Some(item_trait.ident.clone()),
180            Item::TraitAlias(item_trait_alias) => Some(item_trait_alias.ident.clone()),
181            Item::Type(item_type) => Some(item_type.ident.clone()),
182            Item::Union(item_union) => Some(item_union.ident.clone()),
183            // Item::ForeignMod(item_foreign_mod) => None,
184            // Item::Use(item_use) => None,
185            // Item::Impl(item_impl) => None,
186            // Item::Verbatim(_) => None,
187            _ => None,
188        }
189    }
190}
191
192impl NamedItem for ImplItem {
193    fn name_ident(&self) -> Option<Ident> {
194        match self {
195            ImplItem::Const(impl_item_const) => Some(impl_item_const.ident.clone()),
196            ImplItem::Fn(impl_item_fn) => Some(impl_item_fn.sig.ident.clone()),
197            ImplItem::Type(impl_item_type) => Some(impl_item_type.ident.clone()),
198            // ImplItem::Macro(impl_item_macro) => None,
199            // ImplItem::Verbatim(impl_item_verbatim) => None,
200            _ => None,
201        }
202    }
203}
204
205impl NamedItem for TraitItem {
206    fn name_ident(&self) -> Option<Ident> {
207        match self {
208            TraitItem::Const(trait_item_const) => Some(trait_item_const.ident.clone()),
209            TraitItem::Fn(trait_item_fn) => Some(trait_item_fn.sig.ident.clone()),
210            TraitItem::Type(trait_item_type) => Some(trait_item_type.ident.clone()),
211            // TraitItem::Macro(trait_item_macro) => None,
212            // TraitItem::Verbatim(trait_item_verbatim) => None,
213            _ => None,
214        }
215    }
216}
217
218/// Generalizes over items that have some underlying set of [`Attribute`] associated with them.
219trait AttributedItem {
220    /// Gets a reference to the underlying [`Vec`] of [`Attribute`]s for this item, if
221    /// applicable. If not applicable, an empty [`Vec`] will be returned.
222    fn item_attributes(&self) -> &Vec<Attribute>;
223
224    /// Sets the underlying [`Vec`] of [`Attribute`]s for this item, if applicable. Panics if
225    /// you attempt to use this on an inapplicable item!
226    fn set_item_attributes(&mut self, attrs: Vec<Attribute>);
227}
228
229impl AttributedItem for Item {
230    fn item_attributes(&self) -> &Vec<Attribute> {
231        const EMPTY: &Vec<Attribute> = &Vec::new();
232        match self {
233            Item::Const(c) => &c.attrs,
234            Item::Enum(e) => &e.attrs,
235            Item::ExternCrate(e) => &e.attrs,
236            Item::Fn(f) => &f.attrs,
237            Item::ForeignMod(f) => &f.attrs,
238            Item::Impl(i) => &i.attrs,
239            Item::Macro(m) => &m.attrs,
240            Item::Mod(m) => &m.attrs,
241            Item::Static(s) => &s.attrs,
242            Item::Struct(s) => &s.attrs,
243            Item::Trait(t) => &t.attrs,
244            Item::TraitAlias(t) => &t.attrs,
245            Item::Type(t) => &t.attrs,
246            Item::Union(u) => &u.attrs,
247            Item::Use(u) => &u.attrs,
248            _ => EMPTY,
249        }
250    }
251
252    fn set_item_attributes(&mut self, attrs: Vec<Attribute>) {
253        match self {
254            Item::Const(c) => c.attrs = attrs,
255            Item::Enum(e) => e.attrs = attrs,
256            Item::ExternCrate(e) => e.attrs = attrs,
257            Item::Fn(f) => f.attrs = attrs,
258            Item::ForeignMod(f) => f.attrs = attrs,
259            Item::Impl(i) => i.attrs = attrs,
260            Item::Macro(m) => m.attrs = attrs,
261            Item::Mod(m) => m.attrs = attrs,
262            Item::Static(s) => s.attrs = attrs,
263            Item::Struct(s) => s.attrs = attrs,
264            Item::Trait(t) => t.attrs = attrs,
265            Item::TraitAlias(t) => t.attrs = attrs,
266            Item::Type(t) => t.attrs = attrs,
267            Item::Union(u) => u.attrs = attrs,
268            Item::Use(u) => u.attrs = attrs,
269            _ => unimplemented!(),
270        }
271    }
272}
273
274impl AttributedItem for ImplItem {
275    fn item_attributes(&self) -> &Vec<Attribute> {
276        const EMPTY: &Vec<Attribute> = &Vec::new();
277        match self {
278            ImplItem::Const(impl_item_const) => &impl_item_const.attrs,
279            ImplItem::Fn(impl_item_fn) => &impl_item_fn.attrs,
280            ImplItem::Type(impl_item_type) => &impl_item_type.attrs,
281            ImplItem::Macro(impl_item_macro) => &impl_item_macro.attrs,
282            // ImplItem::Verbatim(impl_item_verbatim) => &EMPTY,
283            _ => &EMPTY,
284        }
285    }
286
287    fn set_item_attributes(&mut self, attrs: Vec<Attribute>) {
288        match self {
289            ImplItem::Const(impl_item_const) => impl_item_const.attrs = attrs,
290            ImplItem::Fn(impl_item_fn) => impl_item_fn.attrs = attrs,
291            ImplItem::Type(impl_item_type) => impl_item_type.attrs = attrs,
292            ImplItem::Macro(impl_item_macro) => impl_item_macro.attrs = attrs,
293            // ImplItem::Verbatim(impl_item_verbatim) => unimplemented!(),
294            _ => unimplemented!(),
295        }
296    }
297}
298
299impl AttributedItem for TraitItem {
300    fn item_attributes(&self) -> &Vec<Attribute> {
301        const EMPTY: &Vec<Attribute> = &Vec::new();
302        match self {
303            TraitItem::Const(trait_item_const) => &trait_item_const.attrs,
304            TraitItem::Fn(trait_item_fn) => &trait_item_fn.attrs,
305            TraitItem::Type(trait_item_type) => &trait_item_type.attrs,
306            TraitItem::Macro(trait_item_macro) => &trait_item_macro.attrs,
307            // TraitItem::Verbatim(trait_item_verbatim) => &EMPTY,
308            _ => &EMPTY,
309        }
310    }
311
312    fn set_item_attributes(&mut self, attrs: Vec<Attribute>) {
313        match self {
314            TraitItem::Const(trait_item_const) => trait_item_const.attrs = attrs,
315            TraitItem::Fn(trait_item_fn) => trait_item_fn.attrs = attrs,
316            TraitItem::Type(trait_item_type) => trait_item_type.attrs = attrs,
317            TraitItem::Macro(trait_item_macros) => trait_item_macros.attrs = attrs,
318            // TraitItem::Verbatim(trait_item_verbatim) => unimplemented!(),
319            _ => unimplemented!(),
320        }
321    }
322}
323
324/// Marks an item for export, making it available for embedding as a rust doc example via
325/// [`docify::embed!(..)`](`macro@embed`) or [`docify::embed_run!(..)`](`macro@embed_run`).
326///
327/// By default, you can just call the attribute with no arguments like the following:
328/// ```ignore
329/// #[docify::export]
330/// mod some_item {
331///     fn some_func() {
332///         println!("hello world");
333///     }
334/// }
335/// ```
336///
337/// When you [`docify::embed!(..)`](`macro@embed`) this item, you will have to refer to it by
338/// the primary ident associated with the item, in this case `some_item`. In some cases, such
339/// as with `impl` statements, there is no clear main ident. You should handle these situations
340/// by specifying an ident manually (not doing so will result in a compile error):
341/// ```ignore
342/// #[docify::export(some_name)]
343/// impl SomeTrait for Something {
344///     // ...
345/// }
346/// ```
347///
348/// You are also free to specify an alternate export name for items that _do_ have a clear
349/// ident if you need/want to:
350/// ```ignore
351/// #[docify::export(SomeName)]
352/// fn hello_world() {
353///     println!("hello");
354///     println!("world");
355/// }
356/// ```
357///
358/// When you go to [`docify::embed!(..)`](`macro@embed`) or
359/// [`docify::embed_run!(..)`](`macro@embed_run`) such an item, you must refer to it by
360/// `SomeName` (in this case), or whatever name you provided to `#[docify::export]`.
361///
362/// There is no guard to prevent duplicate export names in the same file, and export names are
363/// all considered within the global namespace of the file in question (they do not exist
364/// inside a particular module or scope within a source file). When using
365/// [`docify::embed!(..)`](`macro@embed`), duplicate results are simply embedded one after
366/// another, and this is by design.
367///
368/// If there are multiple items with the same inherent name in varipous scopes in the same
369/// file, and you want to export just one of them as a doc example, you should specify a unique
370/// ident as the export name for this item.
371///
372/// Note that if you wish to embed an _entire_ file, you don't need `#[docify::export]` at all
373/// and can instead specify just a path to [`docify::embed!(..)`](`macro@embed`) or
374/// [`docify::embed_run!(..)`](`macro@embed_run`).
375#[proc_macro_attribute]
376pub fn export(attr: TokenStream, tokens: TokenStream) -> TokenStream {
377    match export_internal(attr, tokens) {
378        Ok(tokens) => tokens.into(),
379        Err(err) => err.to_compile_error().into(),
380    }
381}
382
383/// Like [`#[docify::export]`](`macro@export`) but only exports the inner contents of whatever
384/// item the attribute is attached to.
385///
386/// For example, given the following:
387/// ```ignore
388/// #[docify::export_content]
389/// mod my_mod {
390///     pub fn some_fun() {
391///         println!("hello world!");
392///     }
393/// }
394/// ```
395///
396/// only this part would be exported:
397/// ```ignore
398/// pub fn some_fun() {
399///     println!("hello world");
400/// }
401/// ```
402///
403/// Note that if [`#[docify::export_content]`](`macro@export_content`) is used on an item that
404/// has no notion of inner contents, such as a type, static, or const declaration, it will
405/// simply function like a regular [`#[docify::export]`](`macro@export`) attribute.
406///
407/// Supported items include:
408/// - functions
409/// - modules
410/// - trait declarations
411/// - trait impls
412/// - basic blocks (when inside an outer macro pattern)
413///
414/// All other items will behave like they normally do with
415/// [`#[docify::export]`](`macro@export`). Notably this includes structs and enums, because
416/// while these items have a defined notion of "contents", those contents cannot stand on their
417/// own as valid rust code.
418#[proc_macro_attribute]
419pub fn export_content(attr: TokenStream, tokens: TokenStream) -> TokenStream {
420    match export_internal(attr, tokens) {
421        Ok(tokens) => tokens.into(),
422        Err(err) => err.to_compile_error().into(),
423    }
424}
425
426/// Used to parse args for `#[export(..)]`
427#[derive(Parse)]
428struct ExportAttr {
429    ident: Option<Ident>,
430}
431
432/// Internal implementation for `#[export]`
433fn export_internal(
434    attr: impl Into<TokenStream2>,
435    tokens: impl Into<TokenStream2>,
436) -> Result<TokenStream2> {
437    let attr = parse2::<ExportAttr>(attr.into())?;
438    let item = parse2::<Item>(tokens.into())?;
439
440    // get export ident
441    let _export_ident = attr.ident.or_else(|| item.name_ident()).ok_or_else(|| {
442        Error::new(
443            item.span(),
444            "Cannot automatically detect ident from this item. \
445            You will need to specify a name manually as the argument \
446            for the #[export] attribute, i.e. #[export(my_name)].",
447        )
448    })?;
449
450    Ok(quote!(#item))
451}
452
453/// Embeds the specified item from the specified source file in a rust doc example, with pretty
454/// formatting enabled.
455///
456/// Should be used in a `#[doc = ...]` statement, like the following:
457///
458/// ```ignore
459/// /// some doc comments here
460/// #[doc = docify::embed!("path/to/file.rs", my_example)]
461/// /// more doc comments
462/// struct DocumentedItem;
463/// ```
464///
465/// Which will expand to the `my_example` item in `path/to/file.rs` being embedded in a rust
466/// doc example marked with `ignore`. If you want to have your example actually run in rust
467/// docs as well, you should use [`docify::embed_run!(..)`](`macro@embed_run`).
468///
469/// ### Arguments
470/// - `source_path`: the file path (relative to the current crate root) that contains the item
471///   you would like to embed, represented as a string literal. If you wish to embed an entire
472///   file, simply specify only a `source_path` with no other arguments and the entire file
473///   will be embedded as a doc example. If the path cannot be read for whatever reason, a
474///   compile error will be issued. The `source_path` _does  not_ have to be a file that is
475///   part of the current compilation unit, though typically it should be. The only requirement
476///   is that it must contain valid Rust source code, and must be a descendant of the current
477///   crate's root directory. While embedding files from a parent directory of the current
478///   crate may work locally, this will fail when you go to deploy to `crates.io` and/or
479///   `docs.rs`, so you should not use `../` or similar means unless you plan to never deploy
480///   to these services.
481/// - `item_ident`: (optional) can be specified after `source_path`, preceded by a comma. This
482///   should match the export name you used to [`#[docify::export(..)]`](`macro@export`) the
483///   item, or, if no export name was specified, this should match the inherent ident/name of
484///   the item. If the item cannot be found, a compile error will be issued. As mentioned
485///   above, if no `item_ident` is specified, the entire file will be embedded as an example.
486///
487/// All items in the `source_file` exist in the same global scope when they are exported for
488/// embedding. Special care must be taken with how you
489/// [`#[docify::export(..)]`](`macro@export`) items in order to get the item you want.
490///
491/// If there multiple items in a file that resolve to the same `item_ident` (whether as an
492/// inherent ident name or as a manually specified `item_ident`), and you embed using this
493/// ident, all matching items will be embedded, one after another, listed in the order that
494/// they appear in the `source_file`.
495///
496/// Here is an example of embedding an _entire_ source file as an example:
497/// ```ignore
498/// /// Here is a cool example module:
499/// #[doc = docify::embed!("examples/my_example.rs")]
500/// struct DocumentedItem
501/// ```
502///
503/// You are also free to embed multiple examples in the same set of doc comments:
504/// ```ignore
505/// /// Example 1:
506/// #[doc = docify::embed!("examples/example_1.rs")]
507/// /// Example 2:
508/// #[doc = docify::embed!("examples/example_2.rs")]
509/// /// More docs
510/// struct DocumentedItem;
511/// ```
512///
513/// Note that all examples generated by `docify::embed!(..)` are set to `ignore` by default,
514/// since they are typically already functioning examples or tests elsewhere in the project,
515/// and so they do not need to be run as well in the context where they are being embedded. If
516/// for whatever reason you _do_ want to also run an embedded example as a doc example, you can
517/// use [`docify::embed_run!(..)`](`macro@embed_run`) which removes the `ignore` tag from the
518/// generated example but otherwise functions exactly like `#[docify::embed!(..)]` in every
519/// way.
520///
521/// Output should match `rustfmt` output exactly.
522#[proc_macro]
523pub fn embed(tokens: TokenStream) -> TokenStream {
524    match embed_internal(tokens, MarkdownLanguage::Ignore) {
525        Ok(tokens) => tokens.into(),
526        Err(err) => err.to_compile_error().into(),
527    }
528}
529
530/// Exactly like [`docify::embed!(..)`](`macro@embed`) in every way _except_ the generated
531/// examples are also run automatically as rust doc examples (`ignore` is not included).
532///
533/// Other than this fact all of the usual docs and syntax and behaviors for
534/// [`docify::embed!(..)`](`macro@embed`) also apply to this macro.
535#[proc_macro]
536pub fn embed_run(tokens: TokenStream) -> TokenStream {
537    match embed_internal(tokens, MarkdownLanguage::Blank) {
538        Ok(tokens) => tokens.into(),
539        Err(err) => err.to_compile_error().into(),
540    }
541}
542
543/// Used to parse args for `docify::embed!(..)`
544#[derive(Parse)]
545struct EmbedArgs {
546    file_path: LitStr,
547    #[prefix(Option<Token![,]> as comma)]
548    #[parse_if(comma.is_some())]
549    item_ident: Option<Ident>,
550}
551
552impl ToTokens for EmbedArgs {
553    fn to_tokens(&self, tokens: &mut TokenStream2) {
554        tokens.extend(self.file_path.to_token_stream());
555        let Some(item_ident) = &self.item_ident else {
556            return;
557        };
558        tokens.extend(quote!(,));
559        tokens.extend(item_ident.to_token_stream());
560    }
561}
562
563mod keywords {
564    use syn::custom_keyword;
565
566    custom_keyword!(docify);
567    custom_keyword!(embed);
568}
569
570/// Used to parse a full `docify::embed!(..)` call, as seen in markdown documents and other
571/// embedded settings
572#[derive(Parse)]
573struct EmbedCommentCall {
574    #[prefix(keywords::docify)]
575    #[prefix(Token![::])]
576    #[prefix(keywords::embed)]
577    #[prefix(Token![!])]
578    #[paren]
579    _paren: Paren,
580    #[inside(_paren)]
581    args: EmbedArgs,
582    _semi: Option<Token![;]>,
583}
584
585/// This corresponds with the string immediately following the "```" in codeblocks. Blank means
586/// no language is specified. Ignore will cause the example not to run in rust docs.
587#[derive(Copy, Clone, Eq, PartialEq)]
588enum MarkdownLanguage {
589    Ignore,
590    Rust,
591    Blank,
592}
593
594/// Converts a source string to a codeblocks wrapped example
595fn into_example(st: &str, lang: MarkdownLanguage) -> String {
596    let mut lines: Vec<String> = Vec::new();
597    match lang {
598        MarkdownLanguage::Ignore => lines.push(String::from("```ignore")),
599        MarkdownLanguage::Rust => lines.push(String::from("```rust")),
600        MarkdownLanguage::Blank => lines.push(String::from("```")),
601    }
602    for line in st.lines() {
603        lines.push(String::from(line));
604    }
605    lines.push(String::from("```"));
606    lines.join("\n")
607}
608
609/// Generalizes over items that we support exporting via Docify, used by [`ItemVisitor`].
610trait SupportedVisitItem<'ast> {
611    fn visit_supported_item<T: NamedItem + AttributedItem + ToTokens + Clone>(
612        &mut self,
613        node: &'ast T,
614    );
615}
616
617impl<'ast> SupportedVisitItem<'ast> for ItemVisitor {
618    fn visit_supported_item<T: NamedItem + AttributedItem + ToTokens + Clone>(
619        &mut self,
620        node: &'ast T,
621    ) {
622        let mut i = 0;
623        let attrs = node.item_attributes();
624        for attr in attrs {
625            i += 1; // note, 1-based
626            let AttrStyle::Outer = attr.style else {
627                continue;
628            };
629            let Some(last_seg) = attr.path().segments.last() else {
630                continue;
631            };
632            let is_export_content = last_seg.ident == "export_content";
633            if last_seg.ident != "export" && !is_export_content {
634                continue;
635            }
636            let Some(second_to_last_seg) = attr.path().segments.iter().rev().nth(1) else {
637                continue;
638            };
639            if second_to_last_seg.ident != last_seg.ident && second_to_last_seg.ident != "docify" {
640                continue;
641            }
642            // we have found a #[something::docify::export] or #[docify::export] or
643            // #[export]-style attribute
644            // (OR any of the above but export_content)
645
646            // resolve item_ident
647            let item_ident = match &attr.meta {
648                Meta::List(list) => match parse2::<Ident>(list.tokens.clone()) {
649                    Ok(ident) => Some(ident),
650                    Err(_) => None,
651                },
652                _ => None,
653            };
654            let item_ident = match item_ident {
655                Some(ident) => ident,
656                None => match node.name_ident() {
657                    Some(ident) => ident,
658                    None => continue,
659                },
660            };
661
662            // check if this ident matches the one we're searching for
663            if item_ident == self.search {
664                let mut item = node.clone();
665                // modify item's attributes to not include this one so this one is excluded
666                // from the code example
667                let attrs_without_this_one: Vec<Attribute> = attrs
668                    .iter()
669                    .enumerate()
670                    .filter(|&(n, _)| n != i - 1)
671                    .map(|(_, v)| v)
672                    .cloned()
673                    .collect();
674                item.set_item_attributes(attrs_without_this_one);
675                // add the item to results
676                self.results.push((
677                    item.to_token_stream(),
678                    match is_export_content {
679                        true => ResultStyle::ExportContent,
680                        false => ResultStyle::Export,
681                    },
682                ));
683                // no need to explore the attributes of this item further, it is already in results
684                break;
685            }
686        }
687    }
688}
689
690#[derive(Copy, Clone, PartialEq, Eq)]
691enum ResultStyle {
692    Export,
693    ExportContent,
694}
695
696/// Visitor pattern for finding items
697struct ItemVisitor {
698    search: Ident,
699    results: Vec<(TokenStream2, ResultStyle)>,
700}
701
702impl<'ast> Visit<'ast> for ItemVisitor {
703    fn visit_trait_item(&mut self, node: &'ast TraitItem) {
704        self.visit_supported_item(node);
705        visit::visit_trait_item(self, node);
706    }
707
708    fn visit_impl_item(&mut self, node: &'ast ImplItem) {
709        self.visit_supported_item(node);
710        visit::visit_impl_item(self, node);
711    }
712
713    fn visit_item(&mut self, node: &'ast Item) {
714        self.visit_supported_item(node);
715        visit::visit_item(self, node);
716    }
717}
718
719/// Abstraction for a character that has been transposed/offset from its original position in
720/// the original string in which it appeared (i.e. if the string has been compressed in some way)
721#[derive(Copy, Clone, Eq, PartialEq)]
722struct OffsetChar {
723    char: char,
724    original_pos: usize,
725}
726
727impl OffsetChar {
728    fn new(char: char, original_pos: usize) -> OffsetChar {
729        OffsetChar { char, original_pos }
730    }
731}
732
733/// Used to mark an entity within a piece of source code. Used with [`CompressedString`].
734#[derive(Clone, PartialEq, Eq)]
735struct SourceEntity {
736    start: usize,
737    end: usize,
738}
739
740impl SourceEntity {
741    pub fn new(start: usize, end: usize) -> SourceEntity {
742        SourceEntity { start, end }
743    }
744
745    /// Marks the character positions corresponding with this entity as belonging to this
746    /// entity in the enclosing [`CompressedString`].
747    pub fn claim(&self, claimed: &mut Vec<bool>) {
748        for i in self.start..min(self.end, claimed.len()) {
749            claimed[i] = true;
750        }
751    }
752
753    /// Returns `true` if this entity already appears in the specified claimed vec
754    pub fn is_claimed(&self, claimed: &Vec<bool>) -> bool {
755        claimed[(self.start + self.end) / 2]
756    }
757
758    // pub fn value<'a>(&self, source: &'a String) -> &'a str {
759    //     &source.as_str()[self.start..self.end]
760    // }
761
762    // pub fn contains(&self, x: usize) -> bool {
763    //     x >= self.start && x < self.end
764    // }
765}
766
767/// Represents a [`String`] that has been compressed in some way, and includes data structures
768/// allowing us to map individual characters back to their original positions in the
769/// uncompressed version of the [`String`].
770struct CompressedString {
771    chars: HashMap<usize, OffsetChar>,
772    chars_arr: Vec<OffsetChar>,
773}
774
775impl CompressedString {
776    fn to_string(&self) -> String {
777        self.chars_arr.iter().map(|c| c.char).collect()
778    }
779}
780
781static DOCIFY_ATTRIBUTES: Lazy<Regex> = Lazy::new(|| {
782    Regex::new(r"\n?\#\[(?:\w+\s*::\s*)*(?:export|export_content)(?:\s*\(\s*(\w+)\s*\))?\]\n?")
783        .unwrap()
784});
785
786static DOC_COMMENT: Lazy<Regex> = Lazy::new(|| Regex::new(r"///.*").unwrap());
787static DOC_COMMENT_ATTR: Lazy<Regex> =
788    Lazy::new(|| Regex::new(r#"#\[doc\s*=\s*".*"\s*]"#).unwrap());
789static LINE_COMMENT: Lazy<Regex> = Lazy::new(|| Regex::new(r"//.*").unwrap());
790static MULTI_LINE_COMMENT: Lazy<Regex> = Lazy::new(|| Regex::new(r"/\*[\s\S]*?\*/").unwrap());
791static HTML_COMMENT: Lazy<Regex> = Lazy::new(|| Regex::new(r"<!--[\s\S]*?-->").unwrap());
792static MARKDOWN_CODEBLOCK: Lazy<Regex> = Lazy::new(|| Regex::new(r"```[\s\S]*?```").unwrap());
793// static ref STRING_LIT: Regex = Regex::new(r#"("([^"\\]|\\[\s\S])*")"#).unwrap();
794
795impl From<&String> for CompressedString {
796    fn from(value: &String) -> Self {
797        let mut entities: Vec<SourceEntity> = Vec::new();
798        let mut claimed: Vec<bool> = vec![false; value.len()];
799        for m in DOC_COMMENT.find_iter(value) {
800            let entity = SourceEntity::new(m.start(), m.end());
801            entity.claim(&mut claimed);
802            entities.push(entity);
803        }
804        for m in DOC_COMMENT_ATTR.find_iter(value) {
805            let entity = SourceEntity::new(m.start(), m.end());
806            if !entity.is_claimed(&claimed) {
807                entity.claim(&mut claimed);
808                entities.push(entity);
809            }
810        }
811        for m in MULTI_LINE_COMMENT.find_iter(value) {
812            let entity = SourceEntity::new(m.start(), m.end());
813            if !entity.is_claimed(&claimed) {
814                entity.claim(&mut claimed);
815                entities.push(entity);
816            }
817        }
818        for m in LINE_COMMENT.find_iter(value) {
819            let entity = SourceEntity::new(m.start(), m.end());
820            if !entity.is_claimed(&claimed) {
821                entity.claim(&mut claimed);
822                entities.push(entity);
823            }
824        }
825        for m in DOCIFY_ATTRIBUTES.find_iter(value) {
826            let entity = SourceEntity::new(m.start(), m.end());
827            if !entity.is_claimed(&claimed) {
828                entity.claim(&mut claimed);
829                entities.push(entity);
830            }
831        }
832        let mut compressed = CompressedString {
833            chars_arr: Vec::new(),
834            chars: HashMap::new(),
835        };
836        let mut cursor = 0;
837        let mut byte_index = 0;
838        while byte_index < value.len() {
839            let current_char = &value[byte_index..].chars().next().unwrap(); // get the current character
840            let char_len = current_char.len_utf8(); // get its length in bytes
841
842            if claimed[byte_index] || current_char.is_whitespace() {
843                byte_index += char_len;
844                continue;
845            }
846            let oc = OffsetChar::new(*current_char, byte_index);
847            compressed.chars.insert(cursor, oc);
848            compressed.chars_arr.push(oc);
849            cursor += 1;
850            byte_index += char_len;
851        }
852
853        compressed
854    }
855}
856
857/// Responsible for retrieving the "contents" of an item, used by `#[docify::export_contents]`
858fn get_content_tokens<'a>(item: &'a Item) -> TokenStream2 {
859    match item {
860        // Item::Const(item_const) => item_const.to_token_stream(),
861        // Item::Enum(item_enum) => item_enum.to_token_stream(),
862        // Item::ExternCrate(item_extern) => item_extern.to_token_stream(),
863        Item::Fn(item_fn) => {
864            let mut tokens = TokenStream2::new();
865            tokens.extend(item_fn.block.stmts.iter().map(|t| t.to_token_stream()));
866            tokens
867        }
868        Item::ForeignMod(item_mod) => {
869            let mut tokens = TokenStream2::new();
870            tokens.extend(item_mod.items.iter().map(|t| t.to_token_stream()));
871            tokens
872        }
873        Item::Impl(item_impl) => {
874            let mut tokens = TokenStream2::new();
875            tokens.extend(item_impl.items.iter().map(|t| t.to_token_stream()));
876            tokens
877        }
878        // Item::Macro(item_macro) => item_macro.to_token_stream(),
879        Item::Mod(item_mod) => {
880            let Some(content) = &item_mod.content else {
881                return item_mod.to_token_stream();
882            };
883            let mut tokens = TokenStream2::new();
884            tokens.extend(content.1.iter().map(|t| t.to_token_stream()));
885            tokens
886        }
887        // Item::Static(item_static) => item_static.to_token_stream(),
888        // Item::Struct(item_struct) => item_struct.to_token_stream(),
889        Item::Trait(item_trait) => {
890            let mut tokens = TokenStream2::new();
891            tokens.extend(item_trait.items.iter().map(|t| t.to_token_stream()));
892            tokens
893        }
894        Item::TraitAlias(item_trait_alias) => item_trait_alias.to_token_stream(),
895        // Item::Type(item_type) => item_type.to_token_stream(),
896        // Item::Union(item_union) => item_union.to_token_stream(),
897        // Item::Use(item_use) => item_use.to_token_stream(),
898        // Item::Verbatim(item_verbatim) => item_verbatim.to_token_stream(),
899        _ => item.to_token_stream(),
900    }
901}
902
903/// Finds and returns the specified [`Item`] within a source text string and returns the exact
904/// source code of that item, without any formatting changes. If span locations are stabilized,
905/// this can be removed along with most of the [`CompressedString`] machinery.
906fn source_excerpt<'a, T: ToTokens>(
907    source: &'a String,
908    item: &'a T,
909    style: ResultStyle,
910) -> Result<String> {
911    // note: can't rely on span locations because this requires nightly and/or is otherwise
912    // bugged
913    let compressed_source = CompressedString::from(source);
914    let item_tokens = match style {
915        ResultStyle::Export => item.to_token_stream(),
916        ResultStyle::ExportContent => get_content_tokens(&parse2::<Item>(item.to_token_stream())?),
917    };
918    let compressed_item = CompressedString::from(&item_tokens.to_string());
919    let compressed_source_string = compressed_source.to_string();
920    let compressed_item_string = compressed_item.to_string();
921    let Some(found_start) = compressed_source_string.find(compressed_item_string.as_str()) else {
922        return Err(Error::new(
923            item.span(),
924            "You have found a bug in docify! Please submit a new GitHub issue at \
925            https://github.com/sam0x17/docify/issues/new?title=%60source_excerpt\
926            %60%3A%20can%27t%20find%20item%20in%20source with a sample of the item \
927            you are trying to embed.",
928        ));
929    };
930    let start_c = compressed_source.chars[&found_start];
931    let start_pos = start_c.original_pos;
932    let start_pos = line_start_position(source, start_pos);
933    let end_c = compressed_source.chars[&(found_start + compressed_item_string.len() - 1)];
934    let end_pos = end_c.original_pos;
935    let final_excerpt = &source[start_pos..min(end_pos + 1, source.len())];
936    Ok(final_excerpt
937        .lines()
938        .filter(|line| !(DOCIFY_ATTRIBUTES.is_match(line) && !line.trim().starts_with("//")))
939        .collect::<Vec<&str>>()
940        .join("\n"))
941}
942
943/// Inner version of [`embed_internal`] that just returns the result as a [`String`].
944fn embed_internal_str(tokens: impl Into<TokenStream2>, lang: MarkdownLanguage) -> Result<String> {
945    let args = parse2::<EmbedArgs>(tokens.into())?;
946    // return blank result if we can't properly resolve `caller_crate_root`
947    let Some(root) = caller_crate_root() else {
948        return Ok(String::from(""));
949    };
950    let file_path = root.join(args.file_path.value());
951    let source_code = match fs::read_to_string(&file_path) {
952        Ok(src) => src,
953        Err(_) => {
954            return Err(Error::new(
955                args.file_path.span(),
956                format!(
957                    "Could not read the specified path '{}'.",
958                    file_path.display(),
959                ),
960            ))
961        }
962    };
963    let parsed = source_code.parse::<TokenStream2>()?;
964    let source_file = parse2::<File>(parsed)?;
965
966    let output = if let Some(ident) = args.item_ident {
967        let mut visitor = ItemVisitor {
968            search: ident.clone(),
969            results: Vec::new(),
970        };
971        visitor.visit_file(&source_file);
972        if visitor.results.is_empty() {
973            return Err(Error::new(
974                ident.span(),
975                format!(
976                    "Could not find docify export item '{}' in '{}'.",
977                    ident,
978                    file_path.display(),
979                ),
980            ));
981        }
982        let mut results: Vec<String> = Vec::new();
983        for (item, style) in visitor.results {
984            let excerpt = source_excerpt(&source_code, &item, style)?;
985            let formatted = fix_indentation(excerpt);
986            let example = into_example(formatted.as_str(), lang);
987            results.push(example);
988        }
989        results.join("\n")
990    } else {
991        into_example(source_code.as_str(), lang)
992    };
993    Ok(output)
994}
995
996/// Internal implementation behind [`macro@embed`].
997fn embed_internal(tokens: impl Into<TokenStream2>, lang: MarkdownLanguage) -> Result<TokenStream2> {
998    let output = embed_internal_str(tokens, lang)?;
999    Ok(quote!(#output))
1000}
1001
1002/// Used to parse args for [`macro@compile_markdown`].
1003#[derive(Parse)]
1004struct CompileMarkdownArgs {
1005    input: LitStr,
1006    #[prefix(Option<Token![,]> as comma)]
1007    #[parse_if(comma.is_some())]
1008    output: Option<LitStr>,
1009}
1010
1011/// Internal implementation behind [`macro@compile_markdown`].
1012fn compile_markdown_internal(tokens: impl Into<TokenStream2>) -> Result<TokenStream2> {
1013    let args = parse2::<CompileMarkdownArgs>(tokens.into())?;
1014    if args.input.value().is_empty() {
1015        return Err(Error::new(args.input.span(), "Input path cannot be blank!"));
1016    }
1017    let input_path = std::path::PathBuf::from(&args.input.value());
1018    // return blank result if we can't properly resolve `caller_crate_root`
1019    let Some(root) = caller_crate_root() else {
1020        return Ok(quote!());
1021    };
1022    let input_path = root.join(input_path);
1023    if !input_path.exists() {
1024        return Err(Error::new(
1025            args.input.span(),
1026            format!(
1027                "Could not read the specified path '{}'.",
1028                input_path.display(),
1029            ),
1030        ));
1031    }
1032    if let Some(output) = args.output {
1033        if output.value().is_empty() {
1034            return Err(Error::new(
1035                output.span(),
1036                "If specified, output path cannot be blank!",
1037            ));
1038        }
1039        let output = root.join(output.value());
1040        if input_path.is_dir() {
1041            compile_markdown_dir(input_path, format!("{}", output.display()))?;
1042        } else {
1043            if cfg!(not(test)) {
1044                write_green(DOCIFYING);
1045                println!(
1046                    "{} {} {}",
1047                    prettify_path(&input_path).display(),
1048                    "=>", // TODO: fancy arrow
1049                    prettify_path(&output).display(),
1050                );
1051            }
1052            let Ok(source) = fs::read_to_string(&input_path) else {
1053                return Err(Error::new(
1054                    Span::call_site(),
1055                    format!("Failed to read markdown file at '{}'", input_path.display()),
1056                ));
1057            };
1058            let compiled = compile_markdown_source(source.as_str())?;
1059            let Ok(_) = overwrite_file(&output, &compiled) else {
1060                return Err(Error::new(
1061                    Span::call_site(),
1062                    format!("Failed to write to '{}'", output.display()),
1063                ));
1064            };
1065        }
1066        Ok(quote!())
1067    } else {
1068        if input_path.is_dir() {
1069            return Err(Error::new(
1070                args.input.span(),
1071                "Only individual files are supported with no output path, you specified a directory."
1072            ));
1073        }
1074        let Ok(source) = fs::read_to_string(&input_path) else {
1075            return Err(Error::new(
1076                Span::call_site(),
1077                format!("Failed to read markdown file at '{}'", input_path.display()),
1078            ));
1079        };
1080        let compiled = compile_markdown_source(source.as_str())?;
1081        Ok(quote!(#compiled))
1082    }
1083}
1084
1085/// Takes in a `path` and re-writes it as a subpath in `target_dir`.
1086fn transpose_subpath<P1: AsRef<Path>, P2: AsRef<Path>, P3: AsRef<Path>>(
1087    input_dir: P1,
1088    path: P2,
1089    target_dir: P3,
1090) -> PathBuf {
1091    let prefix = common_path(input_dir, &path).unwrap();
1092    Path::join(
1093        target_dir.as_ref(),
1094        path.as_ref()
1095            .components()
1096            .skip(prefix.components().collect::<Vec<_>>().len())
1097            .collect::<PathBuf>(),
1098    )
1099}
1100
1101/// Overwrites or creates a file at the specified path and populates it with the specified
1102/// data. Will only overwrite the file if the data is different from what is already there.
1103fn overwrite_file<P: AsRef<Path>, D: AsRef<[u8]>>(path: P, data: D) -> std::io::Result<()> {
1104    if path.as_ref().exists() {
1105        if let Ok(existing) = fs::read(path.as_ref()) {
1106            if existing == data.as_ref() {
1107                return Ok(());
1108            }
1109        }
1110    }
1111    let mut f = OpenOptions::new()
1112        .write(true)
1113        .truncate(true)
1114        .create(true)
1115        .open(path)?;
1116    f.write_all(data.as_ref())?;
1117    f.flush()?;
1118    Ok(())
1119}
1120
1121/// Docifies a directory of markdown files
1122fn compile_markdown_dir<P1: AsRef<Path>, P2: AsRef<Path>>(
1123    input_dir: P1,
1124    output_dir: P2,
1125) -> Result<()> {
1126    // recursively walk all files in output_dir
1127    for entry in WalkDir::new(&input_dir)
1128        .into_iter()
1129        .filter_map(std::result::Result::ok)
1130        .filter(|e| {
1131            if !e.file_type().is_file() && !e.file_type().is_symlink() {
1132                return false;
1133            }
1134            let Some(ext) = e.path().extension() else {
1135                return false;
1136            };
1137            if ext.eq_ignore_ascii_case("md") {
1138                return true;
1139            }
1140            false
1141        })
1142    {
1143        let src_path = entry.path();
1144        let dest_path = transpose_subpath(&input_dir, &src_path, &output_dir);
1145        if cfg!(not(test)) {
1146            write_green(DOCIFYING);
1147            println!(
1148                "{} {} {}",
1149                prettify_path(&src_path).display(),
1150                "=>", // TODO: fancy arrow
1151                prettify_path(&dest_path).display(),
1152            );
1153        }
1154        if let Some(parent) = dest_path.parent() {
1155            let Ok(_) = fs::create_dir_all(parent) else {
1156                return Err(Error::new(
1157                    Span::call_site(),
1158                    format!("Failed to create output directory '{}'", parent.display()),
1159                ));
1160            };
1161        }
1162        let Ok(source) = fs::read_to_string(src_path) else {
1163            return Err(Error::new(
1164                Span::call_site(),
1165                format!("Failed to read markdown file at '{}'", src_path.display()),
1166            ));
1167        };
1168        let compiled = compile_markdown_source(source.as_str())?;
1169        if let Some(parent) = dest_path.parent() {
1170            let Ok(_) = fs::create_dir_all(parent) else {
1171                return Err(Error::new(
1172                    Span::call_site(),
1173                    format!("Failed to create directory '{}'", parent.display()),
1174                ));
1175            };
1176        }
1177        let Ok(_) = overwrite_file(&dest_path, &compiled) else {
1178            return Err(Error::new(
1179                Span::call_site(),
1180                format!("Failed to write to '{}'", dest_path.display()),
1181            ));
1182        };
1183    }
1184    Ok(())
1185}
1186
1187/// Docifies the specified markdown source string
1188fn compile_markdown_source<S: AsRef<str>>(source: S) -> Result<String> {
1189    let source = source.as_ref();
1190    if source.is_empty() {
1191        return Ok(String::from(""));
1192    }
1193    let mut claimed: Vec<bool> = source.chars().map(|_| false).collect();
1194    for m in MARKDOWN_CODEBLOCK.find_iter(source) {
1195        let entity = SourceEntity::new(m.start(), m.end());
1196        entity.claim(&mut claimed);
1197    }
1198    let mut output: Vec<String> = Vec::new();
1199    let mut prev_end = 0;
1200    for m in HTML_COMMENT.find_iter(source) {
1201        let entity = SourceEntity::new(m.start(), m.end());
1202        if entity.is_claimed(&claimed) {
1203            // skip HTML comments that are inside of codeblocks
1204            continue;
1205        }
1206        // push prefix
1207        output.push(String::from(&source[prev_end..m.start()]));
1208        // get comment
1209        let orig_comment = &source[m.start()..m.end()];
1210        // strip <!-- -->
1211        let comment = &orig_comment[4..(orig_comment.len() - 3)].trim();
1212        if comment.starts_with("docify") {
1213            let args = parse2::<EmbedCommentCall>(comment.parse()?)?.args;
1214            let compiled = embed_internal_str(args.to_token_stream(), MarkdownLanguage::Rust)?;
1215            output.push(compiled);
1216        } else {
1217            output.push(String::from(orig_comment));
1218        }
1219        prev_end = m.end();
1220    }
1221    // push remaining portion of document if applicable
1222    if prev_end < source.len() - 1 {
1223        output.push(String::from(&source[prev_end..]));
1224    }
1225    Ok(output.join(""))
1226}
1227
1228/// Allows you to use [`docify::embed!(..)`](`macro@embed``) within markdown source files via
1229/// HTML comments and compiles the result for you (at compile-time).
1230///
1231/// The macro supports embed syntax within markdown files like the following:
1232/// ```markdown
1233/// # This is some markdown
1234/// <!-- docify::embed!("some/rust/file.rs", some_ident) -->
1235/// ```
1236///
1237/// Which would expand to the `some_ident` exported item in `some/rust/file.rs` expanding into
1238/// a Rust codeblock as a replacement for the HTML comment, i.e.:
1239///
1240/// ````markdown
1241/// # This is some markdown
1242/// ```rust
1243/// fn hello_world() {
1244///     println!("hello!");
1245/// }
1246/// ```
1247/// ````
1248///
1249/// There are two supported arguments, of the form:
1250/// ```ignore
1251/// docify::compile_markdown!("input_path", "output_path");
1252/// ```
1253///
1254/// If `input_path` is a directory, then all markdown files (recursively) found within
1255/// `input_path` will be processed (expanded) and placed in their respective locations relative
1256/// to `output_path`.
1257///
1258/// If `input_path` is a file and `output_path` is specified, then `input_path` will be loaded
1259/// as a markdown file, processed, and saved to `output_path` (which must be a file path, not a
1260/// directory).
1261///
1262/// If only `input_path` is specified, then it is assumed to be a file, which is loaded as
1263/// markdown, processed, and the result is returned as a string literal.
1264///
1265/// While files are compiling, terminal output is produced such as:
1266/// ```txt
1267/// Docifying fixtures/subfolder/file_2.md => test_bin/subfolder/file_2.md
1268/// ```
1269///
1270/// ## Conventions
1271///
1272/// We encourage crate authors to feature-gate their `compile_markdown!` calls like we do for
1273/// the `README.md` file in this crate:
1274///
1275/// ```ignore
1276/// #[cfg(all(doc, feature = "generate-readme"))]
1277/// compile_markdown!("README.docify.md", "README.md");
1278/// ```
1279///
1280/// This way the `README.md` will not regenerate itself every time a user of your crate runs
1281/// `cargo doc` unless they explicitly enable the `generate-readme` feature for your crate.
1282///
1283/// Another convention we encourage, shown above, is naming template files `foo.docify.md` so
1284/// they can exist alongside the generated `foo.md` file without collisions.
1285#[proc_macro]
1286pub fn compile_markdown(tokens: TokenStream) -> TokenStream {
1287    match compile_markdown_internal(tokens) {
1288        Ok(tokens) => tokens.into(),
1289        Err(err) => err.to_compile_error().into(),
1290    }
1291}
1292
1293#[cfg(test)]
1294mod tests;