Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Justfile
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ fmt:
just --unstable --fmt
shfmt -w .
nix fmt .
cargo fmt

lint:
cargo clippy --fix --allow-dirty --allow-staged
Expand Down
1 change: 0 additions & 1 deletion cargo-sync-readme2/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -114,4 +114,3 @@ pub struct Package {
pub metadata: Metadata,
pub rustdoc_json: Utf8PathBuf,
}

12 changes: 9 additions & 3 deletions cargo-sync-readme2/src/content/badge.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,9 @@ pub fn create(package: &Package) -> String {
badges.push(format!("![Crates.io Downloads](https://img.shields.io/crates/dv/{name}/{version}.svg?&label=downloads&style={badge_style})"));
}

let repository = repository.as_ref().and_then(|r| r.strip_prefix("https://github.com/"));
let repository = repository
.as_ref()
.and_then(|r| r.strip_prefix("https://github.com/"));

match (&metadata.badges.codecov, repository) {
(Codecov::Simple(false), _) => {}
Expand All @@ -56,7 +58,12 @@ pub fn create(package: &Package) -> String {
}
}

for CustomBadge { link, name: text, url } in &metadata.custom_badges {
for CustomBadge {
link,
name: text,
url,
} in &metadata.custom_badges
{
let badge = format!("![{text}]({url})");
if let Some(link) = link {
badges.push(format!("[{badge}]({link})"))
Expand All @@ -67,4 +74,3 @@ pub fn create(package: &Package) -> String {

badges.join("\n")
}

1 change: 0 additions & 1 deletion cargo-sync-readme2/src/content/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,3 @@ pub fn create(package: &Package) -> anyhow::Result<Content> {
badge: badge::create(package),
})
}

33 changes: 22 additions & 11 deletions cargo-sync-readme2/src/content/rustdoc/code_block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@ use std::borrow::Cow;

use pulldown_cmark::{CodeBlockKind, CowStr, Event, Tag, TagEnd};

pub(super) fn convert<'a, 'b>(events: impl IntoIterator<Item = Event<'a>> + 'b) -> impl Iterator<Item = Event<'a>> + 'b {
pub(super) fn convert<'a, 'b>(
events: impl IntoIterator<Item = Event<'a>> + 'b,
) -> impl Iterator<Item = Event<'a>> + 'b {
let mut in_codeblock = None;
events.into_iter().map(move |mut event| {
if let Some(is_rust) = in_codeblock {
Expand Down Expand Up @@ -61,19 +63,29 @@ pub(super) fn convert<'a, 'b>(events: impl IntoIterator<Item = Event<'a>> + 'b)
}

fn is_attribute_tag(tag: &str) -> bool {
matches!(tag, "" | "ignore" | "should_panic" | "no_run" | "compile_fail" | "standalone_crate" | "test_harness")
|| tag
.strip_prefix("edition")
.map(|x| x.len() == 4 && x.chars().all(|ch| ch.is_ascii_digit()))
.unwrap_or_default()
matches!(
tag,
"" | "ignore"
| "should_panic"
| "no_run"
| "compile_fail"
| "standalone_crate"
| "test_harness"
) || tag
.strip_prefix("edition")
.map(|x| x.len() == 4 && x.chars().all(|ch| ch.is_ascii_digit()))
.unwrap_or_default()
}

fn update_codeblock_tag(tag: &mut CowStr<'_>) -> bool {
let mut tag_count = 0;
let is_rust = tag.split(',').filter(|tag| !is_attribute_tag(tag)).all(|tag| {
tag_count += 1;
tag == "rust"
});
let is_rust = tag
.split(',')
.filter(|tag| !is_attribute_tag(tag))
.all(|tag| {
tag_count += 1;
tag == "rust"
});
if is_rust && tag_count == 0 {
if tag.is_empty() {
*tag = "rust".into();
Expand All @@ -83,4 +95,3 @@ fn update_codeblock_tag(tag: &mut CowStr<'_>) -> bool {
}
is_rust
}

5 changes: 3 additions & 2 deletions cargo-sync-readme2/src/content/rustdoc/heading.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
use pulldown_cmark::{Event, Tag, TagEnd};

pub(super) fn convert<'a, 'b>(events: impl IntoIterator<Item = Event<'a>> + 'b) -> impl Iterator<Item = Event<'a>> + 'b {
pub(super) fn convert<'a, 'b>(
events: impl IntoIterator<Item = Event<'a>> + 'b,
) -> impl Iterator<Item = Event<'a>> + 'b {
use pulldown_cmark::HeadingLevel::*;
events.into_iter().map(|mut event| {
match &mut event {
Expand All @@ -19,4 +21,3 @@ pub(super) fn convert<'a, 'b>(events: impl IntoIterator<Item = Event<'a>> + 'b)
event
})
}

57 changes: 38 additions & 19 deletions cargo-sync-readme2/src/content/rustdoc/intra_link.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,9 @@ use std::hash::BuildHasher;
use std::rc::Rc;

use pulldown_cmark::{BrokenLink, CowStr, Event, Options, Tag};
use rustdoc_types::{Crate, Id, Item, ItemEnum, ItemKind, ItemSummary, MacroKind, StructKind, VariantKind};
use rustdoc_types::{
Crate, Id, Item, ItemEnum, ItemKind, ItemSummary, MacroKind, StructKind, VariantKind,
};

trait CowStrExt<'a> {
fn as_str(&'a self) -> &'a str;
Expand Down Expand Up @@ -36,7 +38,10 @@ impl Parser<(), ()> {
item: &'a Item,
local_html_root_url: &str,
mappings: &BTreeMap<String, String>,
) -> Parser<impl FnMut(BrokenLink<'_>) -> Option<BrokenLinkPair<'a>>, impl FnMut(Event<'a>) -> Option<Event<'a>>> {
) -> Parser<
impl FnMut(BrokenLink<'_>) -> Option<BrokenLinkPair<'a>>,
impl FnMut(Event<'a>) -> Option<Event<'a>>,
> {
let url_map = Rc::new(resolve_links(doc, item, local_html_root_url, mappings));

let broken_link_callback = {
Expand Down Expand Up @@ -64,8 +69,12 @@ where
where
'a: 'b,
{
pulldown_cmark::Parser::new_with_broken_link_callback(doc, Options::all(), Some(&mut self.broken_link_callback))
.filter_map(&mut self.iterator_map)
pulldown_cmark::Parser::new_with_broken_link_callback(
doc,
Options::all(),
Some(&mut self.broken_link_callback),
)
.filter_map(&mut self.iterator_map)
}
}

Expand Down Expand Up @@ -147,7 +156,11 @@ fn extra_paths<'doc, S: BuildHasher + Default>(
let mut heap: BinaryHeap<HeapItem<'_>> = index
.iter()
.map(|(id, item)| {
let depth = if paths.contains_key(id) { 0 } else { usize::MAX };
let depth = if paths.contains_key(id) {
0
} else {
usize::MAX
};
HeapItem {
depth: Reverse(depth),
id,
Expand Down Expand Up @@ -262,7 +275,10 @@ fn item_children<'doc>(parent: &'doc Item) -> Option<Box<dyn Iterator<Item = &'d
}
}

fn convert_link<'a>(url_map: &BTreeMap<&str, Option<String>>, mut event: Event<'a>) -> Option<Event<'a>> {
fn convert_link<'a>(
url_map: &BTreeMap<&str, Option<String>>,
mut event: Event<'a>,
) -> Option<Event<'a>> {
if let Event::Start(Tag::Link { dest_url: url, .. }) = &mut event
&& let Some(full_url) = url_map.get(url.as_ref())
{
Expand Down Expand Up @@ -298,14 +314,16 @@ fn id_to_url<S: BuildHasher + Default>(
match (&item.kind, item.path.as_slice()) {
(ItemKind::Module, ps) => join(ps, format_args!("index.html")),
(ItemKind::Struct, [ps @ .., name]) => join(ps, format_args!("struct.{name}.html")),
(ItemKind::StructField, [ps @ .., struct_name, field]) => {
join(ps, format_args!("struct.{struct_name}.html#structfield.{field}"))
}
(ItemKind::StructField, [ps @ .., struct_name, field]) => join(
ps,
format_args!("struct.{struct_name}.html#structfield.{field}"),
),
(ItemKind::Union, [ps @ .., name]) => join(ps, format_args!("union.{name}.html")),
(ItemKind::Enum, [ps @ .., name]) => join(ps, format_args!("enum.{name}.html")),
(ItemKind::Variant, [ps @ .., enum_name, variant_name]) => {
join(ps, format_args!("enum.{enum_name}.html#variant.{variant_name}"))
}
(ItemKind::Variant, [ps @ .., enum_name, variant_name]) => join(
ps,
format_args!("enum.{enum_name}.html#variant.{variant_name}"),
),
(ItemKind::Function, [ps @ .., name]) => join(ps, format_args!("fn.{name}.html")),
(ItemKind::TypeAlias, [ps @ .., name]) => join(ps, format_args!("type.{name}.html")),
(ItemKind::Constant, [ps @ .., name]) => join(ps, format_args!("constant.{name}.html")),
Expand All @@ -314,12 +332,14 @@ fn id_to_url<S: BuildHasher + Default>(
(ItemKind::Macro, [ps @ .., name]) => join(ps, format_args!("macro.{name}.html")),
(ItemKind::ProcAttribute, [ps @ .., name]) => join(ps, format_args!("attr.{name}.html")),
(ItemKind::ProcDerive, [ps @ .., name]) => join(ps, format_args!("derive.{name}.html")),
(ItemKind::AssocConst, [ps @ .., trait_name, const_name]) => {
join(ps, format_args!("trait.{trait_name}.html#associatedconstant.{const_name}"))
}
(ItemKind::AssocType, [ps @ .., trait_name, type_name]) => {
join(ps, format_args!("trait.{trait_name}.html#associatedtype.{type_name}"))
}
(ItemKind::AssocConst, [ps @ .., trait_name, const_name]) => join(
ps,
format_args!("trait.{trait_name}.html#associatedconstant.{const_name}"),
),
(ItemKind::AssocType, [ps @ .., trait_name, type_name]) => join(
ps,
format_args!("trait.{trait_name}.html#associatedtype.{type_name}"),
),
(ItemKind::Primitive, [ps @ .., name]) => join(ps, format_args!("primitive.{name}.html")),
(item, path) => {
eprintln!("unexpected intra-doc link item & path found; path={path:?}, item={item:?}");
Expand Down Expand Up @@ -359,4 +379,3 @@ fn item_summary<'doc, S: BuildHasher + Default>(
}
None
}

26 changes: 17 additions & 9 deletions cargo-sync-readme2/src/content/rustdoc/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,16 +11,25 @@ pub(super) fn create(package: &Package) -> anyhow::Result<String> {
let doc_string = std::fs::read_to_string(&package.rustdoc_json).context("read rustdoc")?;
let doc: Crate = serde_json::from_str(&doc_string).context("parse rustdoc")?;
let root = doc.index.get(&doc.root).unwrap();
let local_html_root_url = package.metadata.rustdoc_html_root_url.clone().unwrap_or_else(|| {
format!(
"https://docs.rs/{}/{}",
package.name,
doc.crate_version.as_ref().unwrap_or(&package.version)
)
});
let local_html_root_url = package
.metadata
.rustdoc_html_root_url
.clone()
.unwrap_or_else(|| {
format!(
"https://docs.rs/{}/{}",
package.name,
doc.crate_version.as_ref().unwrap_or(&package.version)
)
});

let root_doc = extract_doc(root);
let mut parser = intra_link::Parser::new(&doc, root, &local_html_root_url, &package.metadata.rustdoc_mappings);
let mut parser = intra_link::Parser::new(
&doc,
root,
&local_html_root_url,
&package.metadata.rustdoc_mappings,
);
let events = parser.events(&root_doc);
let events = heading::convert(events);
let events = code_block::convert(events);
Expand All @@ -38,4 +47,3 @@ pub(super) fn create(package: &Package) -> anyhow::Result<String> {
fn extract_doc(item: &Item) -> String {
item.docs.clone().unwrap_or_default()
}

1 change: 0 additions & 1 deletion cargo-sync-readme2/src/content/title.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,3 @@ use crate::config::Package;
pub(super) fn create(package: &Package) -> String {
format!("# {}", package.name)
}

38 changes: 28 additions & 10 deletions cargo-sync-readme2/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -134,8 +134,8 @@ fn main() {
}

fn load_package(cargo_toml: &Utf8PathBuf, rustdoc_json: Utf8PathBuf) -> anyhow::Result<Package> {
let cargo_toml =
cargo_toml::Manifest::<ManifestMetadata>::from_path_with_metadata(cargo_toml).context("cargo toml")?;
let cargo_toml = cargo_toml::Manifest::<ManifestMetadata>::from_path_with_metadata(cargo_toml)
.context("cargo toml")?;
let pkg = cargo_toml.package();

Ok(Package {
Expand All @@ -157,7 +157,8 @@ fn render_readme(package: &Package, readme_path: &Utf8Path) -> anyhow::Result<(S

fn sync(args: SyncArgs) -> anyhow::Result<()> {
let package = load_package(&args.cargo_toml, args.rustdoc_json)?;
let (_, rendered) = render_readme(&package, &args.readme_md).with_context(|| args.readme_md.to_string())?;
let (_, rendered) =
render_readme(&package, &args.readme_md).with_context(|| args.readme_md.to_string())?;
std::fs::write(&args.readme_md, rendered).context("write readme")?;
println!("synced {}", args.readme_md);
Ok(())
Expand All @@ -169,7 +170,8 @@ fn test(args: TestArgs) -> anyhow::Result<()> {
}

fn test_package(package: &Package, readme_path: &Utf8Path) -> anyhow::Result<()> {
let (source, rendered) = render_readme(package, readme_path).with_context(|| readme_path.to_string())?;
let (source, rendered) =
render_readme(package, readme_path).with_context(|| readme_path.to_string())?;

if rendered == source {
println!("readme matches render: {}", readme_path);
Expand Down Expand Up @@ -224,20 +226,33 @@ fn build_rustdoc_json(packages: &[WorkspacePackage], target_dir: &Utf8Path) -> a
return Ok(());
}

println!("building rustdoc json for {}", packages.iter().map(|p| p.name.clone()).collect::<Vec<_>>().join(", "));
println!(
"building rustdoc json for {}",
packages
.iter()
.map(|p| p.name.clone())
.collect::<Vec<_>>()
.join(", ")
);

let mut cmd = ProcessCommand::new("cargo");
cmd.env("RUSTC_BOOTSTRAP", "1")
.env("RUSTDOCFLAGS", "-Z unstable-options --output-format json")
.arg("doc")
.arg("--no-deps")
.arg("--target-dir").arg(target_dir);
.arg("--target-dir")
.arg(target_dir);

let mut features = Vec::new();

for pkg in packages {
cmd.args(["-p", &pkg.name]);
features.extend(pkg.metadata.features.iter().map(|f| format!("{}/{f}", pkg.name)));
features.extend(
pkg.metadata
.features
.iter()
.map(|f| format!("{}/{f}", pkg.name)),
);
}

if !features.is_empty() {
Expand Down Expand Up @@ -267,7 +282,10 @@ fn workspace(args: WorkspaceArgs) -> anyhow::Result<()> {

for pkg in &packages {
let json_name = pkg.name.replace('-', "_");
let rustdoc_json = args.target_dir.join("doc").join(format!("{}.json", json_name));
let rustdoc_json = args
.target_dir
.join("doc")
.join(format!("{}.json", json_name));
let package = load_package(&pkg.manifest_path, rustdoc_json)?;
let readme_path = pkg.manifest_path.parent().unwrap().join(&pkg.readme_path);

Expand All @@ -292,7 +310,8 @@ fn workspace(args: WorkspaceArgs) -> anyhow::Result<()> {

fn sync_package(package: &Package, readme_path: &Utf8Path) -> anyhow::Result<()> {
let readme_path_buf = readme_path.to_path_buf();
let (_, rendered) = render_readme(package, &readme_path_buf).with_context(|| readme_path.to_string())?;
let (_, rendered) =
render_readme(package, &readme_path_buf).with_context(|| readme_path.to_string())?;
let original = std::fs::read_to_string(readme_path).context("read original readme")?;
if original == rendered {
println!("readme is already in sync: {}", readme_path);
Expand Down Expand Up @@ -352,4 +371,3 @@ fn diff(old: &str, new: &str) -> String {

output
}

Loading
Loading