1use std::collections::BTreeMap;
2use std::collections::BTreeSet;
3use std::collections::HashMap;
4use std::fs::{self, File};
5use std::io::prelude::*;
6use std::io::SeekFrom;
7use std::path::{Path, PathBuf};
8use std::task::Poll;
9
10use crate::core::dependency::DepKind;
11use crate::core::manifest::Target;
12use crate::core::resolver::CliFeatures;
13use crate::core::resolver::HasDevUnits;
14use crate::core::PackageIdSpecQuery;
15use crate::core::Shell;
16use crate::core::Verbosity;
17use crate::core::Workspace;
18use crate::core::{Package, PackageId, PackageSet, Resolve, SourceId};
19use crate::ops::lockfile::LOCKFILE_NAME;
20use crate::ops::registry::{infer_registry, RegistryOrIndex};
21use crate::sources::path::PathEntry;
22use crate::sources::registry::index::{IndexPackage, RegistryDependency};
23use crate::sources::{PathSource, CRATES_IO_REGISTRY};
24use crate::util::cache_lock::CacheLockMode;
25use crate::util::context::JobsConfig;
26use crate::util::errors::CargoResult;
27use crate::util::restricted_names;
28use crate::util::toml::prepare_for_publish;
29use crate::util::FileLock;
30use crate::util::Filesystem;
31use crate::util::GlobalContext;
32use crate::util::Graph;
33use crate::util::HumanBytes;
34use crate::{drop_println, ops};
35use anyhow::{bail, Context as _};
36use cargo_util::paths;
37use cargo_util_schemas::messages;
38use flate2::{Compression, GzBuilder};
39use tar::{Builder, EntryType, Header, HeaderMode};
40use tracing::debug;
41use unicase::Ascii as UncasedAscii;
42
43mod vcs;
44mod verify;
45
46#[derive(Debug, Clone)]
50pub enum PackageMessageFormat {
51 Human,
52 Json,
53}
54
55impl PackageMessageFormat {
56 pub const POSSIBLE_VALUES: [&str; 2] = ["human", "json"];
57
58 pub const DEFAULT: &str = "human";
59}
60
61impl std::str::FromStr for PackageMessageFormat {
62 type Err = anyhow::Error;
63
64 fn from_str(s: &str) -> Result<PackageMessageFormat, anyhow::Error> {
65 match s {
66 "human" => Ok(PackageMessageFormat::Human),
67 "json" => Ok(PackageMessageFormat::Json),
68 f => bail!("unknown message format `{f}`"),
69 }
70 }
71}
72
73#[derive(Clone)]
74pub struct PackageOpts<'gctx> {
75 pub gctx: &'gctx GlobalContext,
76 pub list: bool,
77 pub fmt: PackageMessageFormat,
78 pub check_metadata: bool,
79 pub allow_dirty: bool,
80 pub include_lockfile: bool,
81 pub verify: bool,
82 pub jobs: Option<JobsConfig>,
83 pub keep_going: bool,
84 pub to_package: ops::Packages,
85 pub targets: Vec<String>,
86 pub cli_features: CliFeatures,
87 pub reg_or_index: Option<ops::RegistryOrIndex>,
88}
89
90const ORIGINAL_MANIFEST_FILE: &str = "Cargo.toml.orig";
91const VCS_INFO_FILE: &str = ".cargo_vcs_info.json";
92
93struct ArchiveFile {
94 rel_path: PathBuf,
97 rel_str: String,
99 contents: FileContents,
101}
102
103enum FileContents {
104 OnDisk(PathBuf),
106 Generated(GeneratedFile),
108}
109
110enum GeneratedFile {
111 Manifest(PathBuf),
115 Lockfile(Option<PathBuf>),
119 VcsInfo(vcs::VcsInfo),
121}
122
123#[tracing::instrument(skip_all)]
125fn create_package(
126 ws: &Workspace<'_>,
127 pkg: &Package,
128 ar_files: Vec<ArchiveFile>,
129 local_reg: Option<&TmpRegistry<'_>>,
130) -> CargoResult<FileLock> {
131 let gctx = ws.gctx();
132 let filecount = ar_files.len();
133
134 for dep in pkg.dependencies() {
136 super::check_dep_has_version(dep, false)?;
137 }
138
139 let filename = pkg.package_id().tarball_name();
140 let dir = ws.target_dir().join("package");
141 let mut dst = {
142 let tmp = format!(".{}", filename);
143 dir.open_rw_exclusive_create(&tmp, gctx, "package scratch space")?
144 };
145
146 gctx.shell()
151 .status("Packaging", pkg.package_id().to_string())?;
152 dst.file().set_len(0)?;
153 let uncompressed_size = tar(ws, pkg, local_reg, ar_files, dst.file(), &filename)
154 .context("failed to prepare local package for uploading")?;
155
156 dst.seek(SeekFrom::Start(0))?;
157 let src_path = dst.path();
158 let dst_path = dst.parent().join(&filename);
159 fs::rename(&src_path, &dst_path)
160 .context("failed to move temporary tarball into final location")?;
161
162 let dst_metadata = dst
163 .file()
164 .metadata()
165 .with_context(|| format!("could not learn metadata for: `{}`", dst_path.display()))?;
166 let compressed_size = dst_metadata.len();
167
168 let uncompressed = HumanBytes(uncompressed_size);
169 let compressed = HumanBytes(compressed_size);
170
171 let message = format!("{filecount} files, {uncompressed:.1} ({compressed:.1} compressed)");
172 drop(gctx.shell().status("Packaged", message));
174
175 return Ok(dst);
176}
177
178pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Vec<FileLock>> {
183 let specs = &opts.to_package.to_package_id_specs(ws)?;
184 if let ops::Packages::Packages(_) = opts.to_package {
186 for spec in specs.iter() {
187 let member_ids = ws.members().map(|p| p.package_id());
188 spec.query(member_ids)?;
189 }
190 }
191 let mut pkgs = ws.members_with_features(specs, &opts.cli_features)?;
192
193 pkgs.retain(|(pkg, _feats)| specs.iter().any(|spec| spec.matches(pkg.package_id())));
196
197 Ok(do_package(ws, opts, pkgs)?
198 .into_iter()
199 .map(|x| x.2)
200 .collect())
201}
202
203pub(crate) fn package_with_dep_graph(
209 ws: &Workspace<'_>,
210 opts: &PackageOpts<'_>,
211 pkgs: Vec<(&Package, CliFeatures)>,
212) -> CargoResult<LocalDependencies<(CliFeatures, FileLock)>> {
213 let output = do_package(ws, opts, pkgs)?;
214
215 Ok(local_deps(output.into_iter().map(
216 |(pkg, opts, tarball)| (pkg, (opts.cli_features, tarball)),
217 )))
218}
219
220fn do_package<'a>(
221 ws: &Workspace<'_>,
222 opts: &PackageOpts<'a>,
223 pkgs: Vec<(&Package, CliFeatures)>,
224) -> CargoResult<Vec<(Package, PackageOpts<'a>, FileLock)>> {
225 if ws
226 .lock_root()
227 .as_path_unlocked()
228 .join(LOCKFILE_NAME)
229 .exists()
230 && opts.include_lockfile
231 {
232 let dry_run = false;
234 let _ = ops::resolve_ws(ws, dry_run)?;
235 }
238
239 let deps = local_deps(pkgs.iter().map(|(p, f)| ((*p).clone(), f.clone())));
240 let just_pkgs: Vec<_> = pkgs.iter().map(|p| p.0).collect();
241
242 let mut local_reg = if ws.gctx().cli_unstable().package_workspace {
243 let sid = if deps.has_no_dependencies() && opts.reg_or_index.is_none() {
247 None
248 } else {
249 let sid = get_registry(ws.gctx(), &just_pkgs, opts.reg_or_index.clone())?;
250 debug!("packaging for registry {}", sid);
251 Some(sid)
252 };
253 let reg_dir = ws.build_dir().join("package").join("tmp-registry");
254 sid.map(|sid| TmpRegistry::new(ws.gctx(), reg_dir, sid))
255 .transpose()?
256 } else {
257 None
258 };
259
260 let sorted_pkgs = deps.sort();
263 let mut outputs: Vec<(Package, PackageOpts<'_>, FileLock)> = Vec::new();
264 for (pkg, cli_features) in sorted_pkgs {
265 let opts = PackageOpts {
266 cli_features: cli_features.clone(),
267 to_package: ops::Packages::Default,
268 ..opts.clone()
269 };
270 let ar_files = prepare_archive(ws, &pkg, &opts)?;
271
272 if opts.list {
273 match opts.fmt {
274 PackageMessageFormat::Human => {
275 for ar_file in &ar_files {
278 drop_println!(ws.gctx(), "{}", ar_file.rel_str);
279 }
280 }
281 PackageMessageFormat::Json => {
282 let message = messages::PackageList {
283 id: pkg.package_id().to_spec(),
284 files: BTreeMap::from_iter(ar_files.into_iter().map(|f| {
285 let file = match f.contents {
286 FileContents::OnDisk(path) => messages::PackageFile::Copy { path },
287 FileContents::Generated(
288 GeneratedFile::Manifest(path)
289 | GeneratedFile::Lockfile(Some(path)),
290 ) => messages::PackageFile::Generate { path: Some(path) },
291 FileContents::Generated(
292 GeneratedFile::VcsInfo(_) | GeneratedFile::Lockfile(None),
293 ) => messages::PackageFile::Generate { path: None },
294 };
295 (f.rel_path, file)
296 })),
297 };
298 let _ = ws.gctx().shell().print_json(&message);
299 }
300 }
301 } else {
302 let tarball = create_package(ws, &pkg, ar_files, local_reg.as_ref())?;
303 if let Some(local_reg) = local_reg.as_mut() {
304 if pkg.publish() != &Some(Vec::new()) {
305 local_reg.add_package(ws, &pkg, &tarball)?;
306 }
307 }
308 outputs.push((pkg, opts, tarball));
309 }
310 }
311
312 if opts.verify {
315 for (pkg, opts, tarball) in &outputs {
316 verify::run_verify(ws, pkg, tarball, local_reg.as_ref(), opts)
317 .context("failed to verify package tarball")?
318 }
319 }
320
321 Ok(outputs)
322}
323
324fn get_registry(
331 gctx: &GlobalContext,
332 pkgs: &[&Package],
333 reg_or_index: Option<RegistryOrIndex>,
334) -> CargoResult<SourceId> {
335 let reg_or_index = match reg_or_index.clone() {
336 Some(r) => Some(r),
337 None => infer_registry(pkgs)?,
338 };
339
340 let reg = reg_or_index
342 .clone()
343 .unwrap_or_else(|| RegistryOrIndex::Registry(CRATES_IO_REGISTRY.to_owned()));
344 if let RegistryOrIndex::Registry(reg_name) = reg {
345 for pkg in pkgs {
346 if let Some(allowed) = pkg.publish().as_ref() {
347 if !allowed.is_empty() && !allowed.iter().any(|a| a == ®_name) {
351 bail!(
352 "`{}` cannot be packaged.\n\
353 The registry `{}` is not listed in the `package.publish` value in Cargo.toml.",
354 pkg.name(),
355 reg_name
356 );
357 }
358 }
359 }
360 }
361 Ok(ops::registry::get_source_id(gctx, reg_or_index.as_ref())?.replacement)
362}
363
364#[derive(Clone, Debug, Default)]
366pub(crate) struct LocalDependencies<T> {
367 pub packages: HashMap<PackageId, (Package, T)>,
368 pub graph: Graph<PackageId, ()>,
369}
370
371impl<T: Clone> LocalDependencies<T> {
372 pub fn sort(&self) -> Vec<(Package, T)> {
373 self.graph
374 .sort()
375 .into_iter()
376 .map(|name| self.packages[&name].clone())
377 .collect()
378 }
379
380 pub fn has_no_dependencies(&self) -> bool {
381 self.graph
382 .iter()
383 .all(|node| self.graph.edges(node).next().is_none())
384 }
385}
386
387fn local_deps<T>(packages: impl Iterator<Item = (Package, T)>) -> LocalDependencies<T> {
392 let packages: HashMap<PackageId, (Package, T)> = packages
393 .map(|(pkg, payload)| (pkg.package_id(), (pkg, payload)))
394 .collect();
395
396 let source_to_pkg: HashMap<_, _> = packages
401 .keys()
402 .map(|pkg_id| (pkg_id.source_id(), *pkg_id))
403 .collect();
404
405 let mut graph = Graph::new();
406 for (pkg, _payload) in packages.values() {
407 graph.add(pkg.package_id());
408 for dep in pkg.dependencies() {
409 if !dep.source_id().is_path() {
411 continue;
412 }
413
414 if dep.kind() == DepKind::Development && !dep.specified_req() {
417 continue;
418 };
419
420 if let Some(dep_pkg) = source_to_pkg.get(&dep.source_id()) {
421 graph.link(pkg.package_id(), *dep_pkg);
422 }
423 }
424 }
425
426 LocalDependencies { packages, graph }
427}
428
429#[tracing::instrument(skip_all)]
431fn prepare_archive(
432 ws: &Workspace<'_>,
433 pkg: &Package,
434 opts: &PackageOpts<'_>,
435) -> CargoResult<Vec<ArchiveFile>> {
436 let gctx = ws.gctx();
437 let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), gctx);
438 src.load()?;
439
440 if opts.check_metadata {
441 check_metadata(pkg, gctx)?;
442 }
443
444 if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() {
445 gctx.shell().warn(
446 "both package.include and package.exclude are specified; \
447 the exclude list will be ignored",
448 )?;
449 }
450 let src_files = src.list_files(pkg)?;
451
452 let vcs_info = vcs::check_repo_state(pkg, &src_files, ws, &opts)?;
454
455 build_ar_list(ws, pkg, src_files, vcs_info, opts.include_lockfile)
456}
457
458#[tracing::instrument(skip_all)]
460fn build_ar_list(
461 ws: &Workspace<'_>,
462 pkg: &Package,
463 src_files: Vec<PathEntry>,
464 vcs_info: Option<vcs::VcsInfo>,
465 include_lockfile: bool,
466) -> CargoResult<Vec<ArchiveFile>> {
467 let mut result = HashMap::new();
468 let root = pkg.root();
469 for src_file in &src_files {
470 let rel_path = src_file.strip_prefix(&root)?;
471 check_filename(rel_path, &mut ws.gctx().shell())?;
472 let rel_str = rel_path.to_str().ok_or_else(|| {
473 anyhow::format_err!("non-utf8 path in source directory: {}", rel_path.display())
474 })?;
475 match rel_str {
476 "Cargo.lock" => continue,
477 VCS_INFO_FILE | ORIGINAL_MANIFEST_FILE => anyhow::bail!(
478 "invalid inclusion of reserved file name {} in package source",
479 rel_str
480 ),
481 _ => {
482 result
483 .entry(UncasedAscii::new(rel_str))
484 .or_insert_with(Vec::new)
485 .push(ArchiveFile {
486 rel_path: rel_path.to_owned(),
487 rel_str: rel_str.to_owned(),
488 contents: FileContents::OnDisk(src_file.to_path_buf()),
489 });
490 }
491 }
492 }
493
494 if result.remove(&UncasedAscii::new("Cargo.toml")).is_some() {
497 result
498 .entry(UncasedAscii::new(ORIGINAL_MANIFEST_FILE))
499 .or_insert_with(Vec::new)
500 .push(ArchiveFile {
501 rel_path: PathBuf::from(ORIGINAL_MANIFEST_FILE),
502 rel_str: ORIGINAL_MANIFEST_FILE.to_string(),
503 contents: FileContents::OnDisk(pkg.manifest_path().to_owned()),
504 });
505 result
506 .entry(UncasedAscii::new("Cargo.toml"))
507 .or_insert_with(Vec::new)
508 .push(ArchiveFile {
509 rel_path: PathBuf::from("Cargo.toml"),
510 rel_str: "Cargo.toml".to_string(),
511 contents: FileContents::Generated(GeneratedFile::Manifest(
512 pkg.manifest_path().to_owned(),
513 )),
514 });
515 } else {
516 ws.gctx().shell().warn(&format!(
517 "no `Cargo.toml` file found when packaging `{}` (note the case of the file name).",
518 pkg.name()
519 ))?;
520 }
521
522 if include_lockfile {
523 let lockfile_path = ws.lock_root().as_path_unlocked().join(LOCKFILE_NAME);
524 let lockfile_path = lockfile_path.exists().then_some(lockfile_path);
525 let rel_str = "Cargo.lock";
526 result
527 .entry(UncasedAscii::new(rel_str))
528 .or_insert_with(Vec::new)
529 .push(ArchiveFile {
530 rel_path: PathBuf::from(rel_str),
531 rel_str: rel_str.to_string(),
532 contents: FileContents::Generated(GeneratedFile::Lockfile(lockfile_path)),
533 });
534 }
535
536 if let Some(vcs_info) = vcs_info {
537 let rel_str = VCS_INFO_FILE;
538 result
539 .entry(UncasedAscii::new(rel_str))
540 .or_insert_with(Vec::new)
541 .push(ArchiveFile {
542 rel_path: PathBuf::from(rel_str),
543 rel_str: rel_str.to_string(),
544 contents: FileContents::Generated(GeneratedFile::VcsInfo(vcs_info)),
545 });
546 }
547
548 let mut invalid_manifest_field: Vec<String> = vec![];
549
550 let mut result = result.into_values().flatten().collect();
551 if let Some(license_file) = &pkg.manifest().metadata().license_file {
552 let license_path = Path::new(license_file);
553 let abs_file_path = paths::normalize_path(&pkg.root().join(license_path));
554 if abs_file_path.is_file() {
555 check_for_file_and_add(
556 "license-file",
557 license_path,
558 abs_file_path,
559 pkg,
560 &mut result,
561 ws,
562 )?;
563 } else {
564 error_on_nonexistent_file(
565 &pkg,
566 &license_path,
567 "license-file",
568 &mut invalid_manifest_field,
569 );
570 }
571 }
572 if let Some(readme) = &pkg.manifest().metadata().readme {
573 let readme_path = Path::new(readme);
574 let abs_file_path = paths::normalize_path(&pkg.root().join(readme_path));
575 if abs_file_path.is_file() {
576 check_for_file_and_add("readme", readme_path, abs_file_path, pkg, &mut result, ws)?;
577 } else {
578 error_on_nonexistent_file(&pkg, &readme_path, "readme", &mut invalid_manifest_field);
579 }
580 }
581
582 if !invalid_manifest_field.is_empty() {
583 return Err(anyhow::anyhow!(invalid_manifest_field.join("\n")));
584 }
585
586 for t in pkg
587 .manifest()
588 .targets()
589 .iter()
590 .filter(|t| t.is_custom_build())
591 {
592 if let Some(custome_build_path) = t.src_path().path() {
593 let abs_custome_build_path =
594 paths::normalize_path(&pkg.root().join(custome_build_path));
595 if !abs_custome_build_path.is_file() || !abs_custome_build_path.starts_with(pkg.root())
596 {
597 error_custom_build_file_not_in_package(pkg, &abs_custome_build_path, t)?;
598 }
599 }
600 }
601
602 result.sort_unstable_by(|a, b| a.rel_path.cmp(&b.rel_path));
603
604 Ok(result)
605}
606
607fn check_for_file_and_add(
608 label: &str,
609 file_path: &Path,
610 abs_file_path: PathBuf,
611 pkg: &Package,
612 result: &mut Vec<ArchiveFile>,
613 ws: &Workspace<'_>,
614) -> CargoResult<()> {
615 match abs_file_path.strip_prefix(&pkg.root()) {
616 Ok(rel_file_path) => {
617 if !result.iter().any(|ar| ar.rel_path == rel_file_path) {
618 result.push(ArchiveFile {
619 rel_path: rel_file_path.to_path_buf(),
620 rel_str: rel_file_path
621 .to_str()
622 .expect("everything was utf8")
623 .to_string(),
624 contents: FileContents::OnDisk(abs_file_path),
625 })
626 }
627 }
628 Err(_) => {
629 let file_name = file_path.file_name().unwrap();
631 if result.iter().any(|ar| ar.rel_path == file_name) {
632 ws.gctx().shell().warn(&format!(
633 "{} `{}` appears to be a path outside of the package, \
634 but there is already a file named `{}` in the root of the package. \
635 The archived crate will contain the copy in the root of the package. \
636 Update the {} to point to the path relative \
637 to the root of the package to remove this warning.",
638 label,
639 file_path.display(),
640 file_name.to_str().unwrap(),
641 label,
642 ))?;
643 } else {
644 result.push(ArchiveFile {
645 rel_path: PathBuf::from(file_name),
646 rel_str: file_name.to_str().unwrap().to_string(),
647 contents: FileContents::OnDisk(abs_file_path),
648 })
649 }
650 }
651 }
652 Ok(())
653}
654
655fn error_on_nonexistent_file(
656 pkg: &Package,
657 path: &Path,
658 manifest_key_name: &'static str,
659 invalid: &mut Vec<String>,
660) {
661 let rel_msg = if path.is_absolute() {
662 "".to_string()
663 } else {
664 format!(" (relative to `{}`)", pkg.root().display())
665 };
666
667 let msg = format!(
668 "{manifest_key_name} `{}` does not appear to exist{}.\n\
669 Please update the {manifest_key_name} setting in the manifest at `{}`.",
670 path.display(),
671 rel_msg,
672 pkg.manifest_path().display()
673 );
674
675 invalid.push(msg);
676}
677
678fn error_custom_build_file_not_in_package(
679 pkg: &Package,
680 path: &Path,
681 target: &Target,
682) -> CargoResult<Vec<ArchiveFile>> {
683 let tip = {
684 let description_name = target.description_named();
685 if path.is_file() {
686 format!("the source file of {description_name} doesn't appear to be a path inside of the package.\n\
687 It is at `{}`, whereas the root the package is `{}`.\n",
688 path.display(), pkg.root().display()
689 )
690 } else {
691 format!("the source file of {description_name} doesn't appear to exist.\n",)
692 }
693 };
694 let msg = format!(
695 "{}\
696 This may cause issue during packaging, as modules resolution and resources included via macros are often relative to the path of source files.\n\
697 Please update the `build` setting in the manifest at `{}` and point to a path inside the root of the package.",
698 tip, pkg.manifest_path().display()
699 );
700 anyhow::bail!(msg)
701}
702
703fn build_lock(
705 ws: &Workspace<'_>,
706 publish_pkg: &Package,
707 local_reg: Option<&TmpRegistry<'_>>,
708) -> CargoResult<String> {
709 let gctx = ws.gctx();
710 let orig_resolve = ops::load_pkg_lockfile(ws)?;
711
712 let mut tmp_ws = Workspace::ephemeral(publish_pkg.clone(), ws.gctx(), None, true)?;
713
714 if let Some(local_reg) = local_reg {
718 tmp_ws.add_local_overlay(
719 local_reg.upstream,
720 local_reg.root.as_path_unlocked().to_owned(),
721 );
722 }
723 let mut tmp_reg = tmp_ws.package_registry()?;
724
725 let mut new_resolve = ops::resolve_with_previous(
726 &mut tmp_reg,
727 &tmp_ws,
728 &CliFeatures::new_all(true),
729 HasDevUnits::Yes,
730 orig_resolve.as_ref(),
731 None,
732 &[],
733 true,
734 )?;
735
736 let pkg_set = ops::get_resolved_packages(&new_resolve, tmp_reg)?;
737
738 if let Some(orig_resolve) = orig_resolve {
739 compare_resolve(gctx, tmp_ws.current()?, &orig_resolve, &new_resolve)?;
740 }
741 check_yanked(
742 gctx,
743 &pkg_set,
744 &new_resolve,
745 "consider updating to a version that is not yanked",
746 )?;
747
748 ops::resolve_to_string(&tmp_ws, &mut new_resolve)
749}
750
751fn check_metadata(pkg: &Package, gctx: &GlobalContext) -> CargoResult<()> {
754 let md = pkg.manifest().metadata();
755
756 let mut missing = vec![];
757
758 macro_rules! lacking {
759 ($( $($field: ident)||* ),*) => {{
760 $(
761 if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* {
762 $(missing.push(stringify!($field).replace("_", "-"));)*
763 }
764 )*
765 }}
766 }
767 lacking!(
768 description,
769 license || license_file,
770 documentation || homepage || repository
771 );
772
773 if !missing.is_empty() {
774 let mut things = missing[..missing.len() - 1].join(", ");
775 if !things.is_empty() {
778 things.push_str(" or ");
779 }
780 things.push_str(missing.last().unwrap());
781
782 gctx.shell().warn(&format!(
783 "manifest has no {things}.\n\
784 See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.",
785 things = things
786 ))?
787 }
788
789 Ok(())
790}
791
792fn tar(
796 ws: &Workspace<'_>,
797 pkg: &Package,
798 local_reg: Option<&TmpRegistry<'_>>,
799 ar_files: Vec<ArchiveFile>,
800 dst: &File,
801 filename: &str,
802) -> CargoResult<u64> {
803 let filename = Path::new(filename);
805 let encoder = GzBuilder::new()
806 .filename(paths::path2bytes(filename)?)
807 .write(dst, Compression::best());
808
809 let mut ar = Builder::new(encoder);
811 ar.sparse(false);
812 let gctx = ws.gctx();
813
814 let base_name = format!("{}-{}", pkg.name(), pkg.version());
815 let base_path = Path::new(&base_name);
816 let included = ar_files
817 .iter()
818 .map(|ar_file| ar_file.rel_path.clone())
819 .collect::<Vec<_>>();
820 let publish_pkg = prepare_for_publish(pkg, ws, Some(&included))?;
821
822 let mut uncompressed_size = 0;
823 for ar_file in ar_files {
824 let ArchiveFile {
825 rel_path,
826 rel_str,
827 contents,
828 } = ar_file;
829 let ar_path = base_path.join(&rel_path);
830 gctx.shell()
831 .verbose(|shell| shell.status("Archiving", &rel_str))?;
832 let mut header = Header::new_gnu();
833 match contents {
834 FileContents::OnDisk(disk_path) => {
835 let mut file = File::open(&disk_path).with_context(|| {
836 format!("failed to open for archiving: `{}`", disk_path.display())
837 })?;
838 let metadata = file.metadata().with_context(|| {
839 format!("could not learn metadata for: `{}`", disk_path.display())
840 })?;
841 header.set_metadata_in_mode(&metadata, HeaderMode::Deterministic);
842 header.set_cksum();
843 ar.append_data(&mut header, &ar_path, &mut file)
844 .with_context(|| {
845 format!("could not archive source file `{}`", disk_path.display())
846 })?;
847 uncompressed_size += metadata.len() as u64;
848 }
849 FileContents::Generated(generated_kind) => {
850 let contents = match generated_kind {
851 GeneratedFile::Manifest(_) => {
852 publish_pkg.manifest().to_normalized_contents()?
853 }
854 GeneratedFile::Lockfile(_) => build_lock(ws, &publish_pkg, local_reg)?,
855 GeneratedFile::VcsInfo(ref s) => serde_json::to_string_pretty(s)?,
856 };
857 header.set_entry_type(EntryType::file());
858 header.set_mode(0o644);
859 header.set_size(contents.len() as u64);
860 header.set_mtime(1);
862 header.set_cksum();
863 ar.append_data(&mut header, &ar_path, contents.as_bytes())
864 .with_context(|| format!("could not archive source file `{}`", rel_str))?;
865 uncompressed_size += contents.len() as u64;
866 }
867 }
868 }
869
870 let encoder = ar.into_inner()?;
871 encoder.finish()?;
872 Ok(uncompressed_size)
873}
874
875fn compare_resolve(
877 gctx: &GlobalContext,
878 current_pkg: &Package,
879 orig_resolve: &Resolve,
880 new_resolve: &Resolve,
881) -> CargoResult<()> {
882 if gctx.shell().verbosity() != Verbosity::Verbose {
883 return Ok(());
884 }
885 let new_set: BTreeSet<PackageId> = new_resolve.iter().collect();
886 let orig_set: BTreeSet<PackageId> = orig_resolve.iter().collect();
887 let added = new_set.difference(&orig_set);
888 let removed: Vec<&PackageId> = orig_set.difference(&new_set).collect();
891 for pkg_id in added {
892 if pkg_id.name() == current_pkg.name() && pkg_id.version() == current_pkg.version() {
893 continue;
896 }
897 let removed_candidates: Vec<&PackageId> = removed
900 .iter()
901 .filter(|orig_pkg_id| {
902 orig_pkg_id.name() == pkg_id.name() && orig_pkg_id.version() == pkg_id.version()
903 })
904 .cloned()
905 .collect();
906 let extra = match removed_candidates.len() {
907 0 => {
908 let previous_versions: Vec<&PackageId> = removed
910 .iter()
911 .filter(|orig_pkg_id| orig_pkg_id.name() == pkg_id.name())
912 .cloned()
913 .collect();
914 match previous_versions.len() {
915 0 => String::new(),
916 1 => format!(
917 ", previous version was `{}`",
918 previous_versions[0].version()
919 ),
920 _ => format!(
921 ", previous versions were: {}",
922 previous_versions
923 .iter()
924 .map(|pkg_id| format!("`{}`", pkg_id.version()))
925 .collect::<Vec<_>>()
926 .join(", ")
927 ),
928 }
929 }
930 1 => {
931 format!(
935 ", was originally sourced from `{}`",
936 removed_candidates[0].source_id()
937 )
938 }
939 _ => {
940 let comma_list = removed_candidates
943 .iter()
944 .map(|pkg_id| format!("`{}`", pkg_id.source_id()))
945 .collect::<Vec<_>>()
946 .join(", ");
947 format!(
948 ", was originally sourced from one of these sources: {}",
949 comma_list
950 )
951 }
952 };
953 let msg = format!(
954 "package `{}` added to the packaged Cargo.lock file{}",
955 pkg_id, extra
956 );
957 gctx.shell().note(msg)?;
958 }
959 Ok(())
960}
961
962pub fn check_yanked(
963 gctx: &GlobalContext,
964 pkg_set: &PackageSet<'_>,
965 resolve: &Resolve,
966 hint: &str,
967) -> CargoResult<()> {
968 let _lock = gctx.acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?;
971
972 let mut sources = pkg_set.sources_mut();
973 let mut pending: Vec<PackageId> = resolve.iter().collect();
974 let mut results = Vec::new();
975 for (_id, source) in sources.sources_mut() {
976 source.invalidate_cache();
977 }
978 while !pending.is_empty() {
979 pending.retain(|pkg_id| {
980 if let Some(source) = sources.get_mut(pkg_id.source_id()) {
981 match source.is_yanked(*pkg_id) {
982 Poll::Ready(result) => results.push((*pkg_id, result)),
983 Poll::Pending => return true,
984 }
985 }
986 false
987 });
988 for (_id, source) in sources.sources_mut() {
989 source.block_until_ready()?;
990 }
991 }
992
993 for (pkg_id, is_yanked) in results {
994 if is_yanked? {
995 gctx.shell().warn(format!(
996 "package `{}` in Cargo.lock is yanked in registry `{}`, {}",
997 pkg_id,
998 pkg_id.source_id().display_registry_name(),
999 hint
1000 ))?;
1001 }
1002 }
1003 Ok(())
1004}
1005
1006fn check_filename(file: &Path, shell: &mut Shell) -> CargoResult<()> {
1013 let Some(name) = file.file_name() else {
1014 return Ok(());
1015 };
1016 let Some(name) = name.to_str() else {
1017 anyhow::bail!(
1018 "path does not have a unicode filename which may not unpack \
1019 on all platforms: {}",
1020 file.display()
1021 )
1022 };
1023 let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*'];
1024 if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {
1025 anyhow::bail!(
1026 "cannot package a filename with a special character `{}`: {}",
1027 c,
1028 file.display()
1029 )
1030 }
1031 if restricted_names::is_windows_reserved_path(file) {
1032 shell.warn(format!(
1033 "file {} is a reserved Windows filename, \
1034 it will not work on Windows platforms",
1035 file.display()
1036 ))?;
1037 }
1038 Ok(())
1039}
1040
1041struct TmpRegistry<'a> {
1045 gctx: &'a GlobalContext,
1046 upstream: SourceId,
1047 root: Filesystem,
1048 _lock: FileLock,
1049}
1050
1051impl<'a> TmpRegistry<'a> {
1052 fn new(gctx: &'a GlobalContext, root: Filesystem, upstream: SourceId) -> CargoResult<Self> {
1053 root.create_dir()?;
1054 let _lock = root.open_rw_exclusive_create(".cargo-lock", gctx, "temporary registry")?;
1055 let slf = Self {
1056 gctx,
1057 root,
1058 upstream,
1059 _lock,
1060 };
1061 let index_path = slf.index_path().into_path_unlocked();
1063 if index_path.exists() {
1064 paths::remove_dir_all(index_path)?;
1065 }
1066 slf.index_path().create_dir()?;
1067 Ok(slf)
1068 }
1069
1070 fn index_path(&self) -> Filesystem {
1071 self.root.join("index")
1072 }
1073
1074 fn add_package(
1075 &mut self,
1076 ws: &Workspace<'_>,
1077 package: &Package,
1078 tar: &FileLock,
1079 ) -> CargoResult<()> {
1080 debug!(
1081 "adding package {}@{} to local overlay at {}",
1082 package.name(),
1083 package.version(),
1084 self.root.as_path_unlocked().display()
1085 );
1086 {
1087 let mut tar_copy = self.root.open_rw_exclusive_create(
1088 package.package_id().tarball_name(),
1089 self.gctx,
1090 "temporary package registry",
1091 )?;
1092 tar.file().seek(SeekFrom::Start(0))?;
1093 std::io::copy(&mut tar.file(), &mut tar_copy)?;
1094 tar_copy.flush()?;
1095 }
1096
1097 let new_crate = super::registry::prepare_transmit(self.gctx, ws, package, self.upstream)?;
1098
1099 tar.file().seek(SeekFrom::Start(0))?;
1100 let cksum = cargo_util::Sha256::new()
1101 .update_file(tar.file())?
1102 .finish_hex();
1103
1104 let deps: Vec<_> = new_crate
1105 .deps
1106 .into_iter()
1107 .map(|dep| {
1108 let name = dep
1109 .explicit_name_in_toml
1110 .clone()
1111 .unwrap_or_else(|| dep.name.clone())
1112 .into();
1113 let package = dep
1114 .explicit_name_in_toml
1115 .as_ref()
1116 .map(|_| dep.name.clone().into());
1117 RegistryDependency {
1118 name: name,
1119 req: dep.version_req.into(),
1120 features: dep.features.into_iter().map(|x| x.into()).collect(),
1121 optional: dep.optional,
1122 default_features: dep.default_features,
1123 target: dep.target.map(|x| x.into()),
1124 kind: Some(dep.kind.into()),
1125 registry: dep.registry.map(|x| x.into()),
1126 package: package,
1127 public: None,
1128 artifact: dep
1129 .artifact
1130 .map(|xs| xs.into_iter().map(|x| x.into()).collect()),
1131 bindep_target: dep.bindep_target.map(|x| x.into()),
1132 lib: dep.lib,
1133 }
1134 })
1135 .collect();
1136
1137 let index_line = serde_json::to_string(&IndexPackage {
1138 name: new_crate.name.into(),
1139 vers: package.version().clone(),
1140 deps,
1141 features: new_crate
1142 .features
1143 .into_iter()
1144 .map(|(k, v)| (k.into(), v.into_iter().map(|x| x.into()).collect()))
1145 .collect(),
1146 features2: None,
1147 cksum,
1148 yanked: None,
1149 links: new_crate.links.map(|x| x.into()),
1150 rust_version: None,
1151 v: Some(2),
1152 })?;
1153
1154 let file =
1155 cargo_util::registry::make_dep_path(&package.name().as_str().to_lowercase(), false);
1156 let mut dst = self.index_path().open_rw_exclusive_create(
1157 file,
1158 self.gctx,
1159 "temporary package registry",
1160 )?;
1161 dst.write_all(index_line.as_bytes())?;
1162 Ok(())
1163 }
1164}