1use common_path::common_path;
4use derive_syn_parse::Parse;
5use once_cell::sync::Lazy;
6use proc_macro::TokenStream;
7use proc_macro2::{Span, TokenStream as TokenStream2};
8use quote::{quote, ToTokens};
9use regex::Regex;
10use std::{
11 cmp::min,
12 collections::HashMap,
13 fs::{self, OpenOptions},
14 io::Write,
15 path::{Path, PathBuf},
16 str::FromStr,
17};
18use syn::{
19 parse2,
20 spanned::Spanned,
21 token::Paren,
22 visit::{self, Visit},
23 AttrStyle, Attribute, Error, File, Ident, ImplItem, Item, LitStr, Meta, Result, Token,
24 TraitItem,
25};
26use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
27use toml::{Table, Value};
28use walkdir::WalkDir;
29
30fn line_start_position<S: AsRef<str>>(source: S, pos: usize) -> usize {
31 let source = source.as_ref();
32 if source.len() <= pos {
33 panic!(
34 "The specified position ({}) is longer than the source string length ({}).",
35 pos,
36 source.len()
37 );
38 }
39 let mut cursor = 0;
40 for line in source.lines() {
41 cursor += line.len();
42 if cursor > pos {
43 return cursor - line.len();
44 }
45 cursor += 1; }
47 unreachable!()
48}
49
50fn fix_leading_indentation<S: AsRef<str>>(source: S) -> String {
51 let source = source.as_ref();
52 let mut shared_indent: Option<usize> = None;
53
54 for line in source.lines() {
55 if line.trim().is_empty() {
56 continue; }
58 let prefix = &line[..(line.len() - line.trim_start().len())];
59 if let Some(shared) = shared_indent {
60 shared_indent = Some(std::cmp::min(prefix.len(), shared));
61 } else {
62 shared_indent = Some(prefix.len());
63 }
64 }
65
66 let shared_indent = shared_indent.unwrap_or(0);
67 let mut output_lines = source
68 .lines()
69 .map(|line| {
70 if line.len() >= shared_indent {
71 line[shared_indent..].to_string()
72 } else {
73 line.to_string()
74 }
75 })
76 .collect::<Vec<String>>();
77
78 if source.ends_with('\n') {
80 output_lines.push("".to_string());
81 }
82
83 output_lines.join("\n")
84}
85
86fn fix_indentation<S: AsRef<str>>(source: S) -> String {
87 let source = source.as_ref();
88 let source = fix_leading_indentation(source);
90 source
91}
92
93fn caller_crate_root() -> Option<PathBuf> {
94 let crate_name =
95 std::env::var("CARGO_PKG_NAME").expect("failed to read ENV var `CARGO_PKG_NAME`!");
96 let current_dir = PathBuf::from(
97 std::env::var("CARGO_MANIFEST_DIR").expect("failed to read ENV var `CARGO_MANIFEST_DIR`!"),
98 );
99 for entry in WalkDir::new(¤t_dir)
100 .into_iter()
101 .filter_entry(|e| !e.file_name().eq_ignore_ascii_case("target"))
102 {
103 let Ok(entry) = entry else { continue };
104 if !entry.file_type().is_file() {
105 continue;
106 }
107 let Some(file_name) = entry.path().file_name() else {
108 continue;
109 };
110 if !file_name.eq_ignore_ascii_case("Cargo.toml") {
111 continue;
112 }
113 let Ok(cargo_toml) = std::fs::read_to_string(&entry.path()) else {
114 continue;
115 };
116 let Ok(table) = Table::from_str(cargo_toml.as_str()) else {
117 continue;
118 };
119 let Some(package) = table.get("package") else {
120 continue;
121 };
122 let Some(Value::String(package_name)) = package.get("name") else {
123 continue;
124 };
125 if package_name.eq_ignore_ascii_case(&crate_name) {
126 return Some(entry.path().parent().unwrap().to_path_buf());
127 }
128 }
129 None
130}
131
132fn prettify_path<P: AsRef<Path>>(path: P) -> PathBuf {
137 let path = path.as_ref();
138 if path.is_relative() {
139 return path.into();
140 }
141 let Some(prefix) = common_path(caller_crate_root().unwrap(), path) else {
142 return path.into();
143 };
144 path.components()
145 .skip(prefix.components().collect::<Vec<_>>().len())
146 .collect::<PathBuf>()
147}
148
149const DOCIFYING: &'static str = " Docifying ";
150
151fn write_green<S: AsRef<str>>(st: S) {
154 let mut stdout = StandardStream::stdout(ColorChoice::Always);
155 let _ = stdout.set_color(ColorSpec::new().set_fg(Some(Color::Green)).set_bold(true));
156 if let Err(_) = write!(&mut stdout, "{}", st.as_ref()) {
157 print!("{}", st.as_ref());
158 }
159 let _ = stdout.set_color(ColorSpec::new().set_fg(None).set_bold(false));
160}
161
162trait NamedItem {
164 fn name_ident(&self) -> Option<Ident>;
166}
167
168impl NamedItem for Item {
169 fn name_ident(&self) -> Option<Ident> {
170 match self {
171 Item::Const(item_const) => Some(item_const.ident.clone()),
172 Item::Enum(item_enum) => Some(item_enum.ident.clone()),
173 Item::ExternCrate(item_extern_crate) => Some(item_extern_crate.ident.clone()),
174 Item::Fn(item_fn) => Some(item_fn.sig.ident.clone()),
175 Item::Macro(item_macro) => item_macro.ident.clone(), Item::Mod(item_mod) => Some(item_mod.ident.clone()),
177 Item::Static(item_static) => Some(item_static.ident.clone()),
178 Item::Struct(item_struct) => Some(item_struct.ident.clone()),
179 Item::Trait(item_trait) => Some(item_trait.ident.clone()),
180 Item::TraitAlias(item_trait_alias) => Some(item_trait_alias.ident.clone()),
181 Item::Type(item_type) => Some(item_type.ident.clone()),
182 Item::Union(item_union) => Some(item_union.ident.clone()),
183 _ => None,
188 }
189 }
190}
191
192impl NamedItem for ImplItem {
193 fn name_ident(&self) -> Option<Ident> {
194 match self {
195 ImplItem::Const(impl_item_const) => Some(impl_item_const.ident.clone()),
196 ImplItem::Fn(impl_item_fn) => Some(impl_item_fn.sig.ident.clone()),
197 ImplItem::Type(impl_item_type) => Some(impl_item_type.ident.clone()),
198 _ => None,
201 }
202 }
203}
204
205impl NamedItem for TraitItem {
206 fn name_ident(&self) -> Option<Ident> {
207 match self {
208 TraitItem::Const(trait_item_const) => Some(trait_item_const.ident.clone()),
209 TraitItem::Fn(trait_item_fn) => Some(trait_item_fn.sig.ident.clone()),
210 TraitItem::Type(trait_item_type) => Some(trait_item_type.ident.clone()),
211 _ => None,
214 }
215 }
216}
217
218trait AttributedItem {
220 fn item_attributes(&self) -> &Vec<Attribute>;
223
224 fn set_item_attributes(&mut self, attrs: Vec<Attribute>);
227}
228
229impl AttributedItem for Item {
230 fn item_attributes(&self) -> &Vec<Attribute> {
231 const EMPTY: &Vec<Attribute> = &Vec::new();
232 match self {
233 Item::Const(c) => &c.attrs,
234 Item::Enum(e) => &e.attrs,
235 Item::ExternCrate(e) => &e.attrs,
236 Item::Fn(f) => &f.attrs,
237 Item::ForeignMod(f) => &f.attrs,
238 Item::Impl(i) => &i.attrs,
239 Item::Macro(m) => &m.attrs,
240 Item::Mod(m) => &m.attrs,
241 Item::Static(s) => &s.attrs,
242 Item::Struct(s) => &s.attrs,
243 Item::Trait(t) => &t.attrs,
244 Item::TraitAlias(t) => &t.attrs,
245 Item::Type(t) => &t.attrs,
246 Item::Union(u) => &u.attrs,
247 Item::Use(u) => &u.attrs,
248 _ => EMPTY,
249 }
250 }
251
252 fn set_item_attributes(&mut self, attrs: Vec<Attribute>) {
253 match self {
254 Item::Const(c) => c.attrs = attrs,
255 Item::Enum(e) => e.attrs = attrs,
256 Item::ExternCrate(e) => e.attrs = attrs,
257 Item::Fn(f) => f.attrs = attrs,
258 Item::ForeignMod(f) => f.attrs = attrs,
259 Item::Impl(i) => i.attrs = attrs,
260 Item::Macro(m) => m.attrs = attrs,
261 Item::Mod(m) => m.attrs = attrs,
262 Item::Static(s) => s.attrs = attrs,
263 Item::Struct(s) => s.attrs = attrs,
264 Item::Trait(t) => t.attrs = attrs,
265 Item::TraitAlias(t) => t.attrs = attrs,
266 Item::Type(t) => t.attrs = attrs,
267 Item::Union(u) => u.attrs = attrs,
268 Item::Use(u) => u.attrs = attrs,
269 _ => unimplemented!(),
270 }
271 }
272}
273
274impl AttributedItem for ImplItem {
275 fn item_attributes(&self) -> &Vec<Attribute> {
276 const EMPTY: &Vec<Attribute> = &Vec::new();
277 match self {
278 ImplItem::Const(impl_item_const) => &impl_item_const.attrs,
279 ImplItem::Fn(impl_item_fn) => &impl_item_fn.attrs,
280 ImplItem::Type(impl_item_type) => &impl_item_type.attrs,
281 ImplItem::Macro(impl_item_macro) => &impl_item_macro.attrs,
282 _ => &EMPTY,
284 }
285 }
286
287 fn set_item_attributes(&mut self, attrs: Vec<Attribute>) {
288 match self {
289 ImplItem::Const(impl_item_const) => impl_item_const.attrs = attrs,
290 ImplItem::Fn(impl_item_fn) => impl_item_fn.attrs = attrs,
291 ImplItem::Type(impl_item_type) => impl_item_type.attrs = attrs,
292 ImplItem::Macro(impl_item_macro) => impl_item_macro.attrs = attrs,
293 _ => unimplemented!(),
295 }
296 }
297}
298
299impl AttributedItem for TraitItem {
300 fn item_attributes(&self) -> &Vec<Attribute> {
301 const EMPTY: &Vec<Attribute> = &Vec::new();
302 match self {
303 TraitItem::Const(trait_item_const) => &trait_item_const.attrs,
304 TraitItem::Fn(trait_item_fn) => &trait_item_fn.attrs,
305 TraitItem::Type(trait_item_type) => &trait_item_type.attrs,
306 TraitItem::Macro(trait_item_macro) => &trait_item_macro.attrs,
307 _ => &EMPTY,
309 }
310 }
311
312 fn set_item_attributes(&mut self, attrs: Vec<Attribute>) {
313 match self {
314 TraitItem::Const(trait_item_const) => trait_item_const.attrs = attrs,
315 TraitItem::Fn(trait_item_fn) => trait_item_fn.attrs = attrs,
316 TraitItem::Type(trait_item_type) => trait_item_type.attrs = attrs,
317 TraitItem::Macro(trait_item_macros) => trait_item_macros.attrs = attrs,
318 _ => unimplemented!(),
320 }
321 }
322}
323
324#[proc_macro_attribute]
376pub fn export(attr: TokenStream, tokens: TokenStream) -> TokenStream {
377 match export_internal(attr, tokens) {
378 Ok(tokens) => tokens.into(),
379 Err(err) => err.to_compile_error().into(),
380 }
381}
382
383#[proc_macro_attribute]
419pub fn export_content(attr: TokenStream, tokens: TokenStream) -> TokenStream {
420 match export_internal(attr, tokens) {
421 Ok(tokens) => tokens.into(),
422 Err(err) => err.to_compile_error().into(),
423 }
424}
425
426#[derive(Parse)]
428struct ExportAttr {
429 ident: Option<Ident>,
430}
431
432fn export_internal(
434 attr: impl Into<TokenStream2>,
435 tokens: impl Into<TokenStream2>,
436) -> Result<TokenStream2> {
437 let attr = parse2::<ExportAttr>(attr.into())?;
438 let item = parse2::<Item>(tokens.into())?;
439
440 let _export_ident = attr.ident.or_else(|| item.name_ident()).ok_or_else(|| {
442 Error::new(
443 item.span(),
444 "Cannot automatically detect ident from this item. \
445 You will need to specify a name manually as the argument \
446 for the #[export] attribute, i.e. #[export(my_name)].",
447 )
448 })?;
449
450 Ok(quote!(#item))
451}
452
453#[proc_macro]
523pub fn embed(tokens: TokenStream) -> TokenStream {
524 match embed_internal(tokens, MarkdownLanguage::Ignore) {
525 Ok(tokens) => tokens.into(),
526 Err(err) => err.to_compile_error().into(),
527 }
528}
529
530#[proc_macro]
536pub fn embed_run(tokens: TokenStream) -> TokenStream {
537 match embed_internal(tokens, MarkdownLanguage::Blank) {
538 Ok(tokens) => tokens.into(),
539 Err(err) => err.to_compile_error().into(),
540 }
541}
542
543#[derive(Parse)]
545struct EmbedArgs {
546 file_path: LitStr,
547 #[prefix(Option<Token![,]> as comma)]
548 #[parse_if(comma.is_some())]
549 item_ident: Option<Ident>,
550}
551
552impl ToTokens for EmbedArgs {
553 fn to_tokens(&self, tokens: &mut TokenStream2) {
554 tokens.extend(self.file_path.to_token_stream());
555 let Some(item_ident) = &self.item_ident else {
556 return;
557 };
558 tokens.extend(quote!(,));
559 tokens.extend(item_ident.to_token_stream());
560 }
561}
562
563mod keywords {
564 use syn::custom_keyword;
565
566 custom_keyword!(docify);
567 custom_keyword!(embed);
568}
569
570#[derive(Parse)]
573struct EmbedCommentCall {
574 #[prefix(keywords::docify)]
575 #[prefix(Token![::])]
576 #[prefix(keywords::embed)]
577 #[prefix(Token![!])]
578 #[paren]
579 _paren: Paren,
580 #[inside(_paren)]
581 args: EmbedArgs,
582 _semi: Option<Token![;]>,
583}
584
585#[derive(Copy, Clone, Eq, PartialEq)]
588enum MarkdownLanguage {
589 Ignore,
590 Rust,
591 Blank,
592}
593
594fn into_example(st: &str, lang: MarkdownLanguage) -> String {
596 let mut lines: Vec<String> = Vec::new();
597 match lang {
598 MarkdownLanguage::Ignore => lines.push(String::from("```ignore")),
599 MarkdownLanguage::Rust => lines.push(String::from("```rust")),
600 MarkdownLanguage::Blank => lines.push(String::from("```")),
601 }
602 for line in st.lines() {
603 lines.push(String::from(line));
604 }
605 lines.push(String::from("```"));
606 lines.join("\n")
607}
608
609trait SupportedVisitItem<'ast> {
611 fn visit_supported_item<T: NamedItem + AttributedItem + ToTokens + Clone>(
612 &mut self,
613 node: &'ast T,
614 );
615}
616
617impl<'ast> SupportedVisitItem<'ast> for ItemVisitor {
618 fn visit_supported_item<T: NamedItem + AttributedItem + ToTokens + Clone>(
619 &mut self,
620 node: &'ast T,
621 ) {
622 let mut i = 0;
623 let attrs = node.item_attributes();
624 for attr in attrs {
625 i += 1; let AttrStyle::Outer = attr.style else {
627 continue;
628 };
629 let Some(last_seg) = attr.path().segments.last() else {
630 continue;
631 };
632 let is_export_content = last_seg.ident == "export_content";
633 if last_seg.ident != "export" && !is_export_content {
634 continue;
635 }
636 let Some(second_to_last_seg) = attr.path().segments.iter().rev().nth(1) else {
637 continue;
638 };
639 if second_to_last_seg.ident != last_seg.ident && second_to_last_seg.ident != "docify" {
640 continue;
641 }
642 let item_ident = match &attr.meta {
648 Meta::List(list) => match parse2::<Ident>(list.tokens.clone()) {
649 Ok(ident) => Some(ident),
650 Err(_) => None,
651 },
652 _ => None,
653 };
654 let item_ident = match item_ident {
655 Some(ident) => ident,
656 None => match node.name_ident() {
657 Some(ident) => ident,
658 None => continue,
659 },
660 };
661
662 if item_ident == self.search {
664 let mut item = node.clone();
665 let attrs_without_this_one: Vec<Attribute> = attrs
668 .iter()
669 .enumerate()
670 .filter(|&(n, _)| n != i - 1)
671 .map(|(_, v)| v)
672 .cloned()
673 .collect();
674 item.set_item_attributes(attrs_without_this_one);
675 self.results.push((
677 item.to_token_stream(),
678 match is_export_content {
679 true => ResultStyle::ExportContent,
680 false => ResultStyle::Export,
681 },
682 ));
683 break;
685 }
686 }
687 }
688}
689
690#[derive(Copy, Clone, PartialEq, Eq)]
691enum ResultStyle {
692 Export,
693 ExportContent,
694}
695
696struct ItemVisitor {
698 search: Ident,
699 results: Vec<(TokenStream2, ResultStyle)>,
700}
701
702impl<'ast> Visit<'ast> for ItemVisitor {
703 fn visit_trait_item(&mut self, node: &'ast TraitItem) {
704 self.visit_supported_item(node);
705 visit::visit_trait_item(self, node);
706 }
707
708 fn visit_impl_item(&mut self, node: &'ast ImplItem) {
709 self.visit_supported_item(node);
710 visit::visit_impl_item(self, node);
711 }
712
713 fn visit_item(&mut self, node: &'ast Item) {
714 self.visit_supported_item(node);
715 visit::visit_item(self, node);
716 }
717}
718
719#[derive(Copy, Clone, Eq, PartialEq)]
722struct OffsetChar {
723 char: char,
724 original_pos: usize,
725}
726
727impl OffsetChar {
728 fn new(char: char, original_pos: usize) -> OffsetChar {
729 OffsetChar { char, original_pos }
730 }
731}
732
733#[derive(Clone, PartialEq, Eq)]
735struct SourceEntity {
736 start: usize,
737 end: usize,
738}
739
740impl SourceEntity {
741 pub fn new(start: usize, end: usize) -> SourceEntity {
742 SourceEntity { start, end }
743 }
744
745 pub fn claim(&self, claimed: &mut Vec<bool>) {
748 for i in self.start..min(self.end, claimed.len()) {
749 claimed[i] = true;
750 }
751 }
752
753 pub fn is_claimed(&self, claimed: &Vec<bool>) -> bool {
755 claimed[(self.start + self.end) / 2]
756 }
757
758 }
766
767struct CompressedString {
771 chars: HashMap<usize, OffsetChar>,
772 chars_arr: Vec<OffsetChar>,
773}
774
775impl CompressedString {
776 fn to_string(&self) -> String {
777 self.chars_arr.iter().map(|c| c.char).collect()
778 }
779}
780
781static DOCIFY_ATTRIBUTES: Lazy<Regex> = Lazy::new(|| {
782 Regex::new(r"\n?\#\[(?:\w+\s*::\s*)*(?:export|export_content)(?:\s*\(\s*(\w+)\s*\))?\]\n?")
783 .unwrap()
784});
785
786static DOC_COMMENT: Lazy<Regex> = Lazy::new(|| Regex::new(r"///.*").unwrap());
787static DOC_COMMENT_ATTR: Lazy<Regex> =
788 Lazy::new(|| Regex::new(r#"#\[doc\s*=\s*".*"\s*]"#).unwrap());
789static LINE_COMMENT: Lazy<Regex> = Lazy::new(|| Regex::new(r"//.*").unwrap());
790static MULTI_LINE_COMMENT: Lazy<Regex> = Lazy::new(|| Regex::new(r"/\*[\s\S]*?\*/").unwrap());
791static HTML_COMMENT: Lazy<Regex> = Lazy::new(|| Regex::new(r"<!--[\s\S]*?-->").unwrap());
792static MARKDOWN_CODEBLOCK: Lazy<Regex> = Lazy::new(|| Regex::new(r"```[\s\S]*?```").unwrap());
793impl From<&String> for CompressedString {
796 fn from(value: &String) -> Self {
797 let mut entities: Vec<SourceEntity> = Vec::new();
798 let mut claimed: Vec<bool> = vec![false; value.len()];
799 for m in DOC_COMMENT.find_iter(value) {
800 let entity = SourceEntity::new(m.start(), m.end());
801 entity.claim(&mut claimed);
802 entities.push(entity);
803 }
804 for m in DOC_COMMENT_ATTR.find_iter(value) {
805 let entity = SourceEntity::new(m.start(), m.end());
806 if !entity.is_claimed(&claimed) {
807 entity.claim(&mut claimed);
808 entities.push(entity);
809 }
810 }
811 for m in MULTI_LINE_COMMENT.find_iter(value) {
812 let entity = SourceEntity::new(m.start(), m.end());
813 if !entity.is_claimed(&claimed) {
814 entity.claim(&mut claimed);
815 entities.push(entity);
816 }
817 }
818 for m in LINE_COMMENT.find_iter(value) {
819 let entity = SourceEntity::new(m.start(), m.end());
820 if !entity.is_claimed(&claimed) {
821 entity.claim(&mut claimed);
822 entities.push(entity);
823 }
824 }
825 for m in DOCIFY_ATTRIBUTES.find_iter(value) {
826 let entity = SourceEntity::new(m.start(), m.end());
827 if !entity.is_claimed(&claimed) {
828 entity.claim(&mut claimed);
829 entities.push(entity);
830 }
831 }
832 let mut compressed = CompressedString {
833 chars_arr: Vec::new(),
834 chars: HashMap::new(),
835 };
836 let mut cursor = 0;
837 let mut byte_index = 0;
838 while byte_index < value.len() {
839 let current_char = &value[byte_index..].chars().next().unwrap(); let char_len = current_char.len_utf8(); if claimed[byte_index] || current_char.is_whitespace() {
843 byte_index += char_len;
844 continue;
845 }
846 let oc = OffsetChar::new(*current_char, byte_index);
847 compressed.chars.insert(cursor, oc);
848 compressed.chars_arr.push(oc);
849 cursor += 1;
850 byte_index += char_len;
851 }
852
853 compressed
854 }
855}
856
857fn get_content_tokens<'a>(item: &'a Item) -> TokenStream2 {
859 match item {
860 Item::Fn(item_fn) => {
864 let mut tokens = TokenStream2::new();
865 tokens.extend(item_fn.block.stmts.iter().map(|t| t.to_token_stream()));
866 tokens
867 }
868 Item::ForeignMod(item_mod) => {
869 let mut tokens = TokenStream2::new();
870 tokens.extend(item_mod.items.iter().map(|t| t.to_token_stream()));
871 tokens
872 }
873 Item::Impl(item_impl) => {
874 let mut tokens = TokenStream2::new();
875 tokens.extend(item_impl.items.iter().map(|t| t.to_token_stream()));
876 tokens
877 }
878 Item::Mod(item_mod) => {
880 let Some(content) = &item_mod.content else {
881 return item_mod.to_token_stream();
882 };
883 let mut tokens = TokenStream2::new();
884 tokens.extend(content.1.iter().map(|t| t.to_token_stream()));
885 tokens
886 }
887 Item::Trait(item_trait) => {
890 let mut tokens = TokenStream2::new();
891 tokens.extend(item_trait.items.iter().map(|t| t.to_token_stream()));
892 tokens
893 }
894 Item::TraitAlias(item_trait_alias) => item_trait_alias.to_token_stream(),
895 _ => item.to_token_stream(),
900 }
901}
902
903fn source_excerpt<'a, T: ToTokens>(
907 source: &'a String,
908 item: &'a T,
909 style: ResultStyle,
910) -> Result<String> {
911 let compressed_source = CompressedString::from(source);
914 let item_tokens = match style {
915 ResultStyle::Export => item.to_token_stream(),
916 ResultStyle::ExportContent => get_content_tokens(&parse2::<Item>(item.to_token_stream())?),
917 };
918 let compressed_item = CompressedString::from(&item_tokens.to_string());
919 let compressed_source_string = compressed_source.to_string();
920 let compressed_item_string = compressed_item.to_string();
921 let Some(found_start) = compressed_source_string.find(compressed_item_string.as_str()) else {
922 return Err(Error::new(
923 item.span(),
924 "You have found a bug in docify! Please submit a new GitHub issue at \
925 https://github.com/sam0x17/docify/issues/new?title=%60source_excerpt\
926 %60%3A%20can%27t%20find%20item%20in%20source with a sample of the item \
927 you are trying to embed.",
928 ));
929 };
930 let start_c = compressed_source.chars[&found_start];
931 let start_pos = start_c.original_pos;
932 let start_pos = line_start_position(source, start_pos);
933 let end_c = compressed_source.chars[&(found_start + compressed_item_string.len() - 1)];
934 let end_pos = end_c.original_pos;
935 let final_excerpt = &source[start_pos..min(end_pos + 1, source.len())];
936 Ok(final_excerpt
937 .lines()
938 .filter(|line| !(DOCIFY_ATTRIBUTES.is_match(line) && !line.trim().starts_with("//")))
939 .collect::<Vec<&str>>()
940 .join("\n"))
941}
942
943fn embed_internal_str(tokens: impl Into<TokenStream2>, lang: MarkdownLanguage) -> Result<String> {
945 let args = parse2::<EmbedArgs>(tokens.into())?;
946 let Some(root) = caller_crate_root() else {
948 return Ok(String::from(""));
949 };
950 let file_path = root.join(args.file_path.value());
951 let source_code = match fs::read_to_string(&file_path) {
952 Ok(src) => src,
953 Err(_) => {
954 return Err(Error::new(
955 args.file_path.span(),
956 format!(
957 "Could not read the specified path '{}'.",
958 file_path.display(),
959 ),
960 ))
961 }
962 };
963 let parsed = source_code.parse::<TokenStream2>()?;
964 let source_file = parse2::<File>(parsed)?;
965
966 let output = if let Some(ident) = args.item_ident {
967 let mut visitor = ItemVisitor {
968 search: ident.clone(),
969 results: Vec::new(),
970 };
971 visitor.visit_file(&source_file);
972 if visitor.results.is_empty() {
973 return Err(Error::new(
974 ident.span(),
975 format!(
976 "Could not find docify export item '{}' in '{}'.",
977 ident,
978 file_path.display(),
979 ),
980 ));
981 }
982 let mut results: Vec<String> = Vec::new();
983 for (item, style) in visitor.results {
984 let excerpt = source_excerpt(&source_code, &item, style)?;
985 let formatted = fix_indentation(excerpt);
986 let example = into_example(formatted.as_str(), lang);
987 results.push(example);
988 }
989 results.join("\n")
990 } else {
991 into_example(source_code.as_str(), lang)
992 };
993 Ok(output)
994}
995
996fn embed_internal(tokens: impl Into<TokenStream2>, lang: MarkdownLanguage) -> Result<TokenStream2> {
998 let output = embed_internal_str(tokens, lang)?;
999 Ok(quote!(#output))
1000}
1001
1002#[derive(Parse)]
1004struct CompileMarkdownArgs {
1005 input: LitStr,
1006 #[prefix(Option<Token![,]> as comma)]
1007 #[parse_if(comma.is_some())]
1008 output: Option<LitStr>,
1009}
1010
1011fn compile_markdown_internal(tokens: impl Into<TokenStream2>) -> Result<TokenStream2> {
1013 let args = parse2::<CompileMarkdownArgs>(tokens.into())?;
1014 if args.input.value().is_empty() {
1015 return Err(Error::new(args.input.span(), "Input path cannot be blank!"));
1016 }
1017 let input_path = std::path::PathBuf::from(&args.input.value());
1018 let Some(root) = caller_crate_root() else {
1020 return Ok(quote!());
1021 };
1022 let input_path = root.join(input_path);
1023 if !input_path.exists() {
1024 return Err(Error::new(
1025 args.input.span(),
1026 format!(
1027 "Could not read the specified path '{}'.",
1028 input_path.display(),
1029 ),
1030 ));
1031 }
1032 if let Some(output) = args.output {
1033 if output.value().is_empty() {
1034 return Err(Error::new(
1035 output.span(),
1036 "If specified, output path cannot be blank!",
1037 ));
1038 }
1039 let output = root.join(output.value());
1040 if input_path.is_dir() {
1041 compile_markdown_dir(input_path, format!("{}", output.display()))?;
1042 } else {
1043 if cfg!(not(test)) {
1044 write_green(DOCIFYING);
1045 println!(
1046 "{} {} {}",
1047 prettify_path(&input_path).display(),
1048 "=>", prettify_path(&output).display(),
1050 );
1051 }
1052 let Ok(source) = fs::read_to_string(&input_path) else {
1053 return Err(Error::new(
1054 Span::call_site(),
1055 format!("Failed to read markdown file at '{}'", input_path.display()),
1056 ));
1057 };
1058 let compiled = compile_markdown_source(source.as_str())?;
1059 let Ok(_) = overwrite_file(&output, &compiled) else {
1060 return Err(Error::new(
1061 Span::call_site(),
1062 format!("Failed to write to '{}'", output.display()),
1063 ));
1064 };
1065 }
1066 Ok(quote!())
1067 } else {
1068 if input_path.is_dir() {
1069 return Err(Error::new(
1070 args.input.span(),
1071 "Only individual files are supported with no output path, you specified a directory."
1072 ));
1073 }
1074 let Ok(source) = fs::read_to_string(&input_path) else {
1075 return Err(Error::new(
1076 Span::call_site(),
1077 format!("Failed to read markdown file at '{}'", input_path.display()),
1078 ));
1079 };
1080 let compiled = compile_markdown_source(source.as_str())?;
1081 Ok(quote!(#compiled))
1082 }
1083}
1084
1085fn transpose_subpath<P1: AsRef<Path>, P2: AsRef<Path>, P3: AsRef<Path>>(
1087 input_dir: P1,
1088 path: P2,
1089 target_dir: P3,
1090) -> PathBuf {
1091 let prefix = common_path(input_dir, &path).unwrap();
1092 Path::join(
1093 target_dir.as_ref(),
1094 path.as_ref()
1095 .components()
1096 .skip(prefix.components().collect::<Vec<_>>().len())
1097 .collect::<PathBuf>(),
1098 )
1099}
1100
1101fn overwrite_file<P: AsRef<Path>, D: AsRef<[u8]>>(path: P, data: D) -> std::io::Result<()> {
1104 if path.as_ref().exists() {
1105 if let Ok(existing) = fs::read(path.as_ref()) {
1106 if existing == data.as_ref() {
1107 return Ok(());
1108 }
1109 }
1110 }
1111 let mut f = OpenOptions::new()
1112 .write(true)
1113 .truncate(true)
1114 .create(true)
1115 .open(path)?;
1116 f.write_all(data.as_ref())?;
1117 f.flush()?;
1118 Ok(())
1119}
1120
1121fn compile_markdown_dir<P1: AsRef<Path>, P2: AsRef<Path>>(
1123 input_dir: P1,
1124 output_dir: P2,
1125) -> Result<()> {
1126 for entry in WalkDir::new(&input_dir)
1128 .into_iter()
1129 .filter_map(std::result::Result::ok)
1130 .filter(|e| {
1131 if !e.file_type().is_file() && !e.file_type().is_symlink() {
1132 return false;
1133 }
1134 let Some(ext) = e.path().extension() else {
1135 return false;
1136 };
1137 if ext.eq_ignore_ascii_case("md") {
1138 return true;
1139 }
1140 false
1141 })
1142 {
1143 let src_path = entry.path();
1144 let dest_path = transpose_subpath(&input_dir, &src_path, &output_dir);
1145 if cfg!(not(test)) {
1146 write_green(DOCIFYING);
1147 println!(
1148 "{} {} {}",
1149 prettify_path(&src_path).display(),
1150 "=>", prettify_path(&dest_path).display(),
1152 );
1153 }
1154 if let Some(parent) = dest_path.parent() {
1155 let Ok(_) = fs::create_dir_all(parent) else {
1156 return Err(Error::new(
1157 Span::call_site(),
1158 format!("Failed to create output directory '{}'", parent.display()),
1159 ));
1160 };
1161 }
1162 let Ok(source) = fs::read_to_string(src_path) else {
1163 return Err(Error::new(
1164 Span::call_site(),
1165 format!("Failed to read markdown file at '{}'", src_path.display()),
1166 ));
1167 };
1168 let compiled = compile_markdown_source(source.as_str())?;
1169 if let Some(parent) = dest_path.parent() {
1170 let Ok(_) = fs::create_dir_all(parent) else {
1171 return Err(Error::new(
1172 Span::call_site(),
1173 format!("Failed to create directory '{}'", parent.display()),
1174 ));
1175 };
1176 }
1177 let Ok(_) = overwrite_file(&dest_path, &compiled) else {
1178 return Err(Error::new(
1179 Span::call_site(),
1180 format!("Failed to write to '{}'", dest_path.display()),
1181 ));
1182 };
1183 }
1184 Ok(())
1185}
1186
1187fn compile_markdown_source<S: AsRef<str>>(source: S) -> Result<String> {
1189 let source = source.as_ref();
1190 if source.is_empty() {
1191 return Ok(String::from(""));
1192 }
1193 let mut claimed: Vec<bool> = source.chars().map(|_| false).collect();
1194 for m in MARKDOWN_CODEBLOCK.find_iter(source) {
1195 let entity = SourceEntity::new(m.start(), m.end());
1196 entity.claim(&mut claimed);
1197 }
1198 let mut output: Vec<String> = Vec::new();
1199 let mut prev_end = 0;
1200 for m in HTML_COMMENT.find_iter(source) {
1201 let entity = SourceEntity::new(m.start(), m.end());
1202 if entity.is_claimed(&claimed) {
1203 continue;
1205 }
1206 output.push(String::from(&source[prev_end..m.start()]));
1208 let orig_comment = &source[m.start()..m.end()];
1210 let comment = &orig_comment[4..(orig_comment.len() - 3)].trim();
1212 if comment.starts_with("docify") {
1213 let args = parse2::<EmbedCommentCall>(comment.parse()?)?.args;
1214 let compiled = embed_internal_str(args.to_token_stream(), MarkdownLanguage::Rust)?;
1215 output.push(compiled);
1216 } else {
1217 output.push(String::from(orig_comment));
1218 }
1219 prev_end = m.end();
1220 }
1221 if prev_end < source.len() - 1 {
1223 output.push(String::from(&source[prev_end..]));
1224 }
1225 Ok(output.join(""))
1226}
1227
1228#[proc_macro]
1286pub fn compile_markdown(tokens: TokenStream) -> TokenStream {
1287 match compile_markdown_internal(tokens) {
1288 Ok(tokens) => tokens.into(),
1289 Err(err) => err.to_compile_error().into(),
1290 }
1291}
1292
1293#[cfg(test)]
1294mod tests;