1use quote::ToTokens;
2use regex::Regex;
3use std::error::Error;
4use std::fs;
5use std::path::{Path, PathBuf};
6use std::process::Command;
7use std::{collections::HashSet, env};
8use syn::{
9 Attribute, File, Item, ItemEnum, ItemImpl, ItemStruct, ItemTrait, ItemUse, Type, TypePath,
10};
11
12#[derive(Debug)]
13struct CodeExample {
14 input_code: String,
15 output_start: usize,
16 output_end: usize,
17}
18
19#[derive(Debug)]
20struct ExtractedCode {
21 use_statements: Vec<ItemUse>,
22 trait_definitions: Vec<ItemTrait>,
23 struct_definitions: Vec<ItemStruct>,
24 enum_definitions: Vec<ItemEnum>,
25 fieldwork_impls: Vec<ItemImpl>,
26}
27
28fn main() -> Result<(), Box<dyn Error>> {
29 let verbose = env::args().any(|arg| arg == "--verbose" || arg == "-v");
30 let verify = env::args().any(|arg| arg == "--verify");
31
32 let docs_dir = env::args()
33 .skip(1)
34 .find(|arg| !arg.starts_with("--"))
35 .unwrap_or_else(|| "docs".to_string());
36
37 let md_files = find_markdown_files(Path::new(&docs_dir))?;
38 println!("Found {} markdown files in {docs_dir}/", md_files.len());
39
40 let example_file = env::current_dir()?.join("examples/docs-expansion.rs");
41 let mut any_changed = false;
42
43 for path in &md_files {
44 let changed = process_file(path, &example_file, verbose, verify)?;
45 if changed {
46 any_changed = true;
47 }
48 }
49
50 if verify && any_changed {
51 eprintln!("❌ Documentation is out of date! Run `cargo run --bin docs-gen` to update.");
52 std::process::exit(1);
53 } else if verify {
54 println!("✅ Documentation is up to date.");
55 }
56
57 Ok(())
58}
59
60fn find_markdown_files(dir: &Path) -> Result<Vec<PathBuf>, Box<dyn Error>> {
61 let mut files = Vec::new();
62 if !dir.exists() {
63 return Ok(files);
64 }
65 collect_markdown_files(dir, &mut files)?;
66 files.sort();
67 Ok(files)
68}
69
70fn collect_markdown_files(dir: &Path, files: &mut Vec<PathBuf>) -> Result<(), Box<dyn Error>> {
71 for entry in fs::read_dir(dir)? {
72 let entry = entry?;
73 let path = entry.path();
74 if path.is_dir() {
75 collect_markdown_files(&path, files)?;
76 } else if path.extension().and_then(|e| e.to_str()) == Some("md") {
77 files.push(path);
78 }
79 }
80 Ok(())
81}
82
83fn process_file(
84 path: &Path,
85 example_file: &Path,
86 verbose: bool,
87 verify: bool,
88) -> Result<bool, Box<dyn Error>> {
89 let display = path.display();
90 let content = fs::read_to_string(path)?;
91
92 println!("Looking for examples in {display}...");
93 let examples = find_expandable_examples(&content)?;
94
95 if examples.is_empty() {
96 return Ok(false);
97 }
98
99 println!("Found {} examples", examples.len());
100
101 let mut new_content = content.clone();
102 let mut updated_count = 0;
103
104 for (i, example) in examples.iter().rev().enumerate() {
105 println!("🔄 Processing example {i} of {}...", examples.len());
106
107 match process_example(&example.input_code, example_file) {
108 Ok(formatted) => {
109 if verbose {
110 println!("Generated output ({} chars):", formatted.len());
111 let lines: Vec<&str> = formatted.lines().collect();
112 for (i, line) in lines.iter().take(5).enumerate() {
113 println!(" {}: {}", i + 1, line);
114 }
115 if lines.len() > 5 {
116 println!(" ... ({} more lines)", lines.len() - 5);
117 }
118 }
119
120 let start = example.output_start;
121 let end = example.output_end;
122
123 let safe_start = if start >= new_content.len() {
124 new_content.len()
125 } else if new_content.is_char_boundary(start) {
126 start
127 } else {
128 (0..=start)
129 .rev()
130 .find(|&i| new_content.is_char_boundary(i))
131 .unwrap_or(0)
132 };
133
134 let safe_end = if end > new_content.len() {
135 new_content.len()
136 } else if new_content.is_char_boundary(end) {
137 end
138 } else {
139 (end..new_content.len())
140 .find(|&i| new_content.is_char_boundary(i))
141 .unwrap_or(new_content.len())
142 };
143
144 if safe_start <= safe_end {
145 new_content.replace_range(safe_start..safe_end, &formatted);
146 } else {
147 eprintln!("⚠️ Invalid range for example {i}, skipping replacement");
148 }
149
150 updated_count += 1;
151 println!("✅ Example {i} updated successfully");
152 }
153 Err(e) => {
154 eprintln!("❌ Failed to process example {i}: {e}");
155 if verbose {
156 eprintln!("Input code was:\n{}", example.input_code);
157 }
158 continue;
159 }
160 }
161 }
162
163 let changed = new_content != content;
164
165 if !verify && updated_count > 0 {
166 fs::write(path, new_content)?;
167 println!("📝 Updated {updated_count} examples in {display}");
168 }
169
170 Ok(changed)
171}
172
173fn find_expandable_examples(content: &str) -> Result<Vec<CodeExample>, Box<dyn Error>> {
174 let mut examples = Vec::new();
175 let block_pattern = Regex::new(r"(?s)```rust\n(.*?)\n```")?;
176 let blocks: Vec<_> = block_pattern.captures_iter(content).collect();
177
178 for (i, block_match) in blocks.iter().enumerate() {
179 let block_content = block_match.get(1).unwrap().as_str();
180
181 if block_content.contains("#[derive(") && !block_content.contains("// docgen-skip") {
182 let input_code = block_content
183 .lines()
184 .map(|line| {
185 if let Some(stripped) = line.strip_prefix("# ") {
186 stripped
187 } else if line == "#" {
188 ""
189 } else {
190 line
191 }
192 })
193 .collect::<Vec<_>>()
194 .join("\n");
195
196 if let Some(next_block) = blocks.get(i + 1) {
197 let next_full = next_block.get(0).unwrap();
198 let output_start = next_full.start() + 8; let output_end = next_full.end() - 4; examples.push(CodeExample {
202 input_code,
203 output_start,
204 output_end,
205 });
206 }
207 }
208 }
209
210 Ok(examples)
211}
212
213fn process_example(input: &str, example_file: &Path) -> Result<String, Box<dyn Error>> {
214 let target_items = extract_item_names_from_input(input)?;
215 let expanded = expand_single_example(input, example_file)?;
216 let extracted = extract_fieldwork_code(&expanded, &target_items)?;
217 format_extracted_code(&extracted)
218}
219
220fn extract_item_names_from_input(input: &str) -> Result<HashSet<String>, Box<dyn Error>> {
221 let parsed: File = syn::parse_str(input)?;
222 let mut item_names = HashSet::new();
223
224 for item in parsed.items {
225 match item {
226 Item::Struct(s) => {
227 item_names.insert(s.ident.to_string());
228 }
229 Item::Enum(e) => {
230 item_names.insert(e.ident.to_string());
231 }
232 _ => {}
233 }
234 }
235
236 Ok(item_names)
237}
238
239fn extract_fieldwork_code(
240 expanded: &str,
241 target_items: &HashSet<String>,
242) -> Result<ExtractedCode, Box<dyn Error>> {
243 let parsed: File = syn::parse_str(expanded)?;
244
245 let mut use_statements = vec![];
246 let mut trait_definitions = vec![];
247 let mut struct_definitions = vec![];
248 let mut enum_definitions = vec![];
249 let mut fieldwork_impls = vec![];
250
251 for item in parsed.items {
252 match item {
253 Item::Use(use_item) => {
254 use_statements.push(use_item);
255 }
256 Item::Trait(item_trait) => {
257 trait_definitions.push(item_trait);
258 }
259 Item::Struct(item_struct) => {
260 if target_items.contains(&item_struct.ident.to_string()) {
261 struct_definitions.push(item_struct);
262 }
263 }
264 Item::Enum(item_enum) => {
265 if target_items.contains(&item_enum.ident.to_string()) {
266 enum_definitions.push(item_enum);
267 }
268 }
269 Item::Impl(item_impl) => {
270 if is_fieldwork_impl(&item_impl, target_items) {
271 fieldwork_impls.push(item_impl);
272 }
273 }
274 _ => {}
275 }
276 }
277
278 Ok(ExtractedCode {
279 trait_definitions,
280 struct_definitions,
281 enum_definitions,
282 fieldwork_impls,
283 use_statements,
284 })
285}
286
287fn is_fieldwork_impl(item_impl: &ItemImpl, target_items: &HashSet<String>) -> bool {
288 if item_impl.trait_.is_some() {
289 return false;
290 }
291
292 if let Type::Path(TypePath { path, .. }) = &*item_impl.self_ty {
293 if let Some(segment) = path.segments.last() {
294 let type_name = segment.ident.to_string();
295 return target_items.contains(&type_name);
296 }
297 }
298
299 false
300}
301
302fn format_extracted_code(extracted: &ExtractedCode) -> Result<String, Box<dyn Error>> {
303 let mut result = vec!["// GENERATED".to_string()];
304
305 for use_statement in &extracted.use_statements {
306 let formatted_use = concise_format(&use_statement.to_token_stream().to_string());
307 for line in formatted_use.lines() {
308 if !line.trim().is_empty()
309 && !line.starts_with("#[prelude_import]")
310 && line != "use fieldwork::Fieldwork;"
311 {
312 result.push(format!("# {line}"));
313 }
314 }
315 }
316
317 for trait_def in &extracted.trait_definitions {
318 let formatted_trait = concise_format(&trait_def.to_token_stream().to_string());
319 for line in formatted_trait.lines() {
320 if !line.trim().is_empty() {
321 result.push(format!("# {line}"));
322 }
323 }
324 }
325
326 for struct_def in &extracted.struct_definitions {
327 let mut cleaned_struct = struct_def.clone();
328 cleaned_struct
329 .attrs
330 .retain(|attr| !is_fieldwork_attr(attr) && !attr.path().is_ident("doc"));
331
332 for field in &mut cleaned_struct.fields {
333 field
334 .attrs
335 .retain(|attr| !is_fieldwork_attr(attr) && !attr.path().is_ident("doc"));
336 }
337
338 let formatted_struct = concise_format(&cleaned_struct.into_token_stream().to_string());
339 for line in formatted_struct.lines() {
340 if !line.trim().is_empty() {
341 result.push(format!("# {line}"));
342 }
343 }
344 }
345
346 for enum_def in &extracted.enum_definitions {
347 let mut cleaned_enum = enum_def.clone();
348 cleaned_enum
349 .attrs
350 .retain(|attr| !is_fieldwork_attr(attr) && !attr.path().is_ident("doc"));
351
352 for variant in &mut cleaned_enum.variants {
353 variant.attrs.retain(|attr| !is_fieldwork_attr(attr));
354 for field in &mut variant.fields {
355 field.attrs.retain(|attr| !is_fieldwork_attr(attr));
356 }
357 }
358
359 let formatted_enum = concise_format(&cleaned_enum.into_token_stream().to_string());
360 for line in formatted_enum.lines() {
361 if !line.trim().is_empty() {
362 result.push(format!("# {line}"));
363 }
364 }
365 }
366
367 for impl_block in &extracted.fieldwork_impls {
368 let formatted_impl = prettyplease::unparse(&syn::parse_quote! { #impl_block });
369 result.push(formatted_impl);
370 }
371
372 Ok(result.join("\n"))
373}
374
375fn is_fieldwork_attr(attr: &Attribute) -> bool {
376 let path = attr.path();
377 path.is_ident("fieldwork") || path.is_ident("field") || path.is_ident("variant")
378}
379
380fn concise_format(s: &str) -> String {
381 s.replace(" : ", ": ")
382 .replace(" < ", "<")
383 .replace(" > ", ">")
384 .replace(" , ", ", ")
385 .replace(" ; ", "; ")
386 .replace(" :: ", "::")
387 .replace("# ", "#")
388 .replace(" ;", ";")
389}
390
391fn expand_single_example(input: &str, example_file: &Path) -> Result<String, Box<dyn Error>> {
392 let file_content = format!("use fieldwork::Fieldwork;\n\n{input}");
393 fs::write(example_file, file_content)?;
394
395 let output = Command::new("cargo")
396 .current_dir(env::current_dir()?)
397 .args(["expand", "--example", "docs-expansion"])
398 .output()?;
399
400 if output.status.success() {
401 fs::remove_file(example_file)?;
402 } else {
403 return Err(format!(
404 "cargo expand failed: {}",
405 String::from_utf8_lossy(&output.stderr)
406 )
407 .into());
408 }
409
410 if output.stdout.is_empty() {
411 return Err("cargo expand was empty, that's probably not right".into());
412 }
413
414 Ok(String::from_utf8(output.stdout)?)
415}
416
417#[cfg(test)]
418mod tests {
419 use super::*;
420
421 #[test]
422 fn test_extract_item_names() {
423 let input = r#"
424 #[derive(fieldwork::Fieldwork)]
425 struct User { name: String }
426
427 #[derive(fieldwork::Fieldwork)]
428 struct Post { title: String }
429
430 #[derive(fieldwork::Fieldwork)]
431 enum Status { Active { name: String }, Inactive { name: String } }
432 "#;
433
434 let names = extract_item_names_from_input(input).unwrap();
435 assert!(names.contains("User"));
436 assert!(names.contains("Post"));
437 assert!(names.contains("Status"));
438 assert_eq!(names.len(), 3);
439 }
440}