diff --git a/doc/repository-spec-v2.md b/doc/repository-spec-v2.md index b65b4d2f6..06ad714eb 100644 --- a/doc/repository-spec-v2.md +++ b/doc/repository-spec-v2.md @@ -120,7 +120,7 @@ parse the ``. The `dart pub` client allows users to save an opaque `` for each ``. When the `dart pub` client makes a request to a `` for which it has a `` stored, it will attach an `Authorization` header -as follows: +as follows: * `Authorization: Bearer ` @@ -229,6 +229,7 @@ server, this could work in many different ways. "version": "", "retracted": true || false, /* optional field, false if omitted */ "archive_url": "https://.../archive.tar.gz", + "archive_sha256": "95cbaad58e2cf32d1aa852f20af1fcda1820ead92a4b1447ea7ba1ba18195d27" "pubspec": { /* pubspec contents as JSON object */ } @@ -238,6 +239,7 @@ server, this could work in many different ways. "version": "", "retracted": true || false, /* optional field, false if omitted */ "archive_url": "https://.../archive.tar.gz", + "archive_sha256": "95cbaad58e2cf32d1aa852f20af1fcda1820ead92a4b1447ea7ba1ba18195d27" "pubspec": { /* pubspec contents as JSON object */ } @@ -256,6 +258,15 @@ parameters. This allows for the server to return signed-URLs for S3, GCS or other blob storage service. If temporary URLs are returned it is wise to not set expiration to less than 25 minutes (to allow for retries and clock drift). +The `archive_sha256` should be the hex-encoded sha256 checksum of the file at +archive_url. It is an optional field that allows the pub client to verify the +integrity of the downloaded archive. + +The `archive_sha256` also provides an easy way for clients to detect if +something has changed on the server. In the absense of this field the client can +still download the archive to obtain a checksum and detect changes to the +archive. + If `` for the server returning `archive_url` is a prefix of `archive_url`, then the `Authorization: Bearer ` is also included when `archive_url` is requested. Example: if `https://pub.example.com/path` returns diff --git a/lib/src/command/dependency_services.dart b/lib/src/command/dependency_services.dart index 193f34513..13c6e7013 100644 --- a/lib/src/command/dependency_services.dart +++ b/lib/src/command/dependency_services.dart @@ -24,6 +24,7 @@ import '../pubspec.dart'; import '../pubspec_utils.dart'; import '../solver.dart'; import '../source/git.dart'; +import '../source/hosted.dart'; import '../system_cache.dart'; import '../utils.dart'; @@ -357,6 +358,7 @@ class DependencyServicesApplyCommand extends PubCommand { : null; final lockFileYaml = lockFile == null ? null : loadYaml(lockFile); final lockFileEditor = lockFile == null ? null : YamlEditor(lockFile); + final hasContentHashes = _lockFileHasContentHashes(lockFileYaml); for (final p in toApply) { final targetPackage = p.name; final targetVersion = p.version; @@ -394,6 +396,16 @@ class DependencyServicesApplyCommand extends PubCommand { lockFileYaml['packages'].containsKey(targetPackage)) { lockFileEditor.update( ['packages', targetPackage, 'version'], targetVersion.toString()); + // Remove the now outdated content-hash - it will be restored below + // after resolution. + if (lockFileEditor + .parseAt(['packages', targetPackage, 'description']) + .value + .containsKey('sha256')) { + lockFileEditor.remove( + ['packages', targetPackage, 'description', 'sha256'], + ); + } } else if (targetRevision != null && lockFileYaml['packages'].containsKey(targetPackage)) { final ref = entrypoint.lockFile.packages[targetPackage]!.toRef(); @@ -457,8 +469,58 @@ class DependencyServicesApplyCommand extends PubCommand { writeTextFile(entrypoint.pubspecPath, updatedPubspec); } // Only if we originally had a lock-file we write the resulting lockfile back. - if (lockFileEditor != null) { - entrypoint.saveLockFile(solveResult); + if (updatedLockfile != null) { + final updatedPackages = []; + for (var package in solveResult.packages) { + if (package.isRoot) continue; + final description = package.description; + + // Handle content-hashes of hosted dependencies. + if (description is ResolvedHostedDescription) { + // Ensure we get content-hashes if the original lock-file had + // them. + if (hasContentHashes) { + if (description.sha256 == null) { + // We removed the hash above before resolution - as we get the + // locked id back we need to find the content-hash from the + // version listing. + // + // `pub get` gets this version-listing from the downloaded + // archive but we don't want to download all archives - so we + // copy it from the version listing. + package = (await cache.getVersions(package.toRef())) + .firstWhere((id) => id == package, orElse: () => package); + if ((package.description as ResolvedHostedDescription) + .sha256 == + null) { + // This happens when we resolved a package from a legacy + // server not providing archive_sha256. As a side-effect of + // downloading the package we compute and store the sha256. + package = await cache.downloadPackage(package); + } + } + } else { + // The original pubspec.lock did not have content-hashes. Remove + // any content hash, so we don't start adding them. + package = PackageId( + package.name, + package.version, + description.withSha256(null), + ); + } + } + updatedPackages.add(package); + } + + final newLockFile = LockFile( + updatedPackages, + sdkConstraints: updatedLockfile.sdkConstraints, + mainDependencies: pubspec.dependencies.keys.toSet(), + devDependencies: pubspec.devDependencies.keys.toSet(), + overriddenDependencies: pubspec.dependencyOverrides.keys.toSet(), + ); + + newLockFile.writeToFile(entrypoint.lockFilePath, cache); } }, ); @@ -541,3 +603,23 @@ VersionConstraint _compatibleWithIfPossible(VersionRange versionRange) { } return versionRange; } + +/// `true` iff any of the packages described by the [lockfile] has a +/// content-hash. +/// +/// Undefined for invalid lock files, but mostly `true`. +bool _lockFileHasContentHashes(dynamic lockfile) { + if (lockfile is! Map) return true; + final packages = lockfile['packages']; + if (packages is! Map) return true; + + /// We consider an empty lockfile ready to get content-hashes. + if (packages.isEmpty) return true; + for (final package in packages.values) { + if (package is! Map) return true; + final descriptor = package['description']; + if (descriptor is! Map) return true; + if (descriptor['sha256'] != null) return true; + } + return false; +} diff --git a/lib/src/command/get.dart b/lib/src/command/get.dart index 0be67a067..251028e36 100644 --- a/lib/src/command/get.dart +++ b/lib/src/command/get.dart @@ -49,6 +49,7 @@ class GetCommand extends PubCommand { log.warning(log.yellow( 'The --packages-dir flag is no longer used and does nothing.')); } + await entrypoint.acquireDependencies( SolveType.get, dryRun: argResults['dry-run'], diff --git a/lib/src/command/outdated.dart b/lib/src/command/outdated.dart index 6f22fd570..e35fa9c2b 100644 --- a/lib/src/command/outdated.dart +++ b/lib/src/command/outdated.dart @@ -204,7 +204,11 @@ class OutdatedCommand extends PubCommand { latestIsOverridden = true; } - final packageStatus = await current?.source.status(current, cache); + final packageStatus = await current?.source.status( + current.toRef(), + current.version, + cache, + ); final discontinued = packageStatus == null ? false : packageStatus.isDiscontinued; final discontinuedReplacedBy = packageStatus?.discontinuedReplacedBy; diff --git a/lib/src/entrypoint.dart b/lib/src/entrypoint.dart index 2683cdfbd..7a1527dc2 100644 --- a/lib/src/entrypoint.dart +++ b/lib/src/entrypoint.dart @@ -8,7 +8,6 @@ import 'dart:io'; import 'dart:math'; import 'package:collection/collection.dart'; -import 'package:meta/meta.dart'; import 'package:path/path.dart' as p; import 'package:pool/pool.dart'; import 'package:pub_semver/pub_semver.dart'; @@ -31,6 +30,7 @@ import 'pub_embeddable_command.dart'; import 'pubspec.dart'; import 'sdk.dart'; import 'solver.dart'; +import 'solver/report.dart'; import 'source/cached.dart'; import 'source/unknown.dart'; import 'system_cache.dart'; @@ -291,11 +291,11 @@ class Entrypoint { /// /// Performs version resolution according to [SolveType]. /// - /// [useLatest], if provided, defines a list of packages that will be - /// unlocked and forced to their latest versions. If [upgradeAll] is - /// true, the previous lockfile is ignored and all packages are re-resolved - /// from scratch. Otherwise, it will attempt to preserve the versions of all - /// previously locked packages. + /// [useLatest], if provided, defines a list of packages that will be unlocked + /// and forced to their latest versions. If [upgradeAll] is true, the previous + /// lockfile is ignored and all packages are re-resolved from scratch. + /// Otherwise, it will attempt to preserve the versions of all previously + /// locked packages. /// /// Shows a report of the changes made relative to the previous lockfile. If /// this is an upgrade or downgrade, all transitive dependencies are shown in @@ -305,8 +305,8 @@ class Entrypoint { /// If [precompile] is `true` (the default), this snapshots dependencies' /// executables. /// - /// if [onlyReportSuccessOrFailure] is `true` only success or failure will be shown --- - /// in case of failure, a reproduction command is shown. + /// if [onlyReportSuccessOrFailure] is `true` only success or failure will be + /// shown --- in case of failure, a reproduction command is shown. /// /// Updates [lockFile] and [packageRoot] accordingly. Future acquireDependencies( @@ -365,17 +365,26 @@ class Entrypoint { } } + // We have to download files also with --dry-run to ensure we know the + // archive hashes for downloaded files. + final newLockFile = await result.downloadCachedPackages(cache); + + final report = SolveReport( + type, root, lockFile, newLockFile, result.availableVersions, cache, + dryRun: dryRun); if (!onlyReportSuccessOrFailure) { - await result.showReport(type, cache); + await report.show(); } + _lockFile = newLockFile; + if (!dryRun) { - await result.downloadCachedPackages(cache); - saveLockFile(result); + newLockFile.writeToFile(lockFilePath, cache); } + if (onlyReportSuccessOrFailure) { log.message('Got dependencies$suffix.'); } else { - await result.summarizeChanges(type, cache, dryRun: dryRun); + await report.summarize(); } if (!dryRun) { @@ -833,21 +842,6 @@ class Entrypoint { } } - /// Saves a list of concrete package versions to the `pubspec.lock` file. - /// - /// Will use Windows line endings (`\r\n`) if a `pubspec.lock` exists, and - /// uses that. - void saveLockFile(SolveResult result) { - _lockFile = result.lockFile; - - final windowsLineEndings = fileExists(lockFilePath) && - detectWindowsLineEndings(readTextFile(lockFilePath)); - - final serialized = lockFile.serialize(root.dir); - writeTextFile(lockFilePath, - windowsLineEndings ? serialized.replaceAll('\n', '\r\n') : serialized); - } - /// If the entrypoint uses the old-style `.pub` cache directory, migrates it /// to the new-style `.dart_tool/pub` directory. void migrateCache() { @@ -926,22 +920,3 @@ See https://dart.dev/go/sdk-constraint '"pub" version, please run "$topLevelProgram pub get".'); } } - -/// Returns `true` if the [text] looks like it uses windows line endings. -/// -/// The heuristic used is to count all `\n` in the text and if stricly more than -/// half of them are preceded by `\r` we report `true`. -@visibleForTesting -bool detectWindowsLineEndings(String text) { - var index = -1; - var unixNewlines = 0; - var windowsNewlines = 0; - while ((index = text.indexOf('\n', index + 1)) != -1) { - if (index != 0 && text[index - 1] == '\r') { - windowsNewlines++; - } else { - unixNewlines++; - } - } - return windowsNewlines > unixNewlines; -} diff --git a/lib/src/global_packages.dart b/lib/src/global_packages.dart index 9d79a164c..0cbb15a91 100644 --- a/lib/src/global_packages.dart +++ b/lib/src/global_packages.dart @@ -23,6 +23,7 @@ import 'sdk.dart'; import 'sdk/dart.dart'; import 'solver.dart'; import 'solver/incompatibility_cause.dart'; +import 'solver/report.dart'; import 'source/cached.dart'; import 'source/git.dart'; import 'source/hosted.dart'; @@ -178,7 +179,7 @@ class GlobalPackages { final tempDir = cache.createTempDir(); // TODO(rnystrom): Look in "bin" and display list of binaries that // user can run. - _writeLockFile(tempDir, LockFile([id])); + LockFile([id]).writeToFile(p.join(tempDir, 'pubspec.lock'), cache); tryDeleteEntry(_packageDir(name)); tryRenameDir(tempDir, _packageDir(name)); @@ -223,10 +224,11 @@ class GlobalPackages { // We want the entrypoint to be rooted at 'dep' not the dummy-package. result.packages.removeWhere((id) => id.name == 'pub global activate'); - final sameVersions = originalLockFile != null && - originalLockFile.samePackageIds(result.lockFile); + final lockFile = await result.downloadCachedPackages(cache); + final sameVersions = + originalLockFile != null && originalLockFile.samePackageIds(lockFile); - final PackageId id = result.lockFile.packages[name]!; + final PackageId id = lockFile.packages[name]!; if (sameVersions) { log.message(''' The package $name is already activated at newest available version. @@ -234,13 +236,20 @@ To recompile executables, first run `$topLevelProgram pub global deactivate $nam '''); } else { // Only precompile binaries if we have a new resolution. - if (!silent) await result.showReport(SolveType.get, cache); - - await result.downloadCachedPackages(cache); + if (!silent) { + await SolveReport( + SolveType.get, + root, + originalLockFile ?? LockFile.empty(), + lockFile, + result.availableVersions, + cache, + dryRun: false, + ).show(); + } - final lockFile = result.lockFile; final tempDir = cache.createTempDir(); - _writeLockFile(tempDir, lockFile); + lockFile.writeToFile(p.join(tempDir, 'pubspec.lock'), cache); // Load the package graph from [result] so we don't need to re-parse all // the pubspecs. @@ -263,7 +272,7 @@ To recompile executables, first run `$topLevelProgram pub global deactivate $nam final entrypoint = Entrypoint.global( _packageDir(id.name), cache.loadCached(id), - result.lockFile, + lockFile, cache, solveResult: result, ); @@ -276,11 +285,6 @@ To recompile executables, first run `$topLevelProgram pub global deactivate $nam if (!silent) log.message('Activated ${_formatPackage(id)}.'); } - /// Finishes activating package [package] by saving [lockFile] in the cache. - void _writeLockFile(String dir, LockFile lockFile) { - writeTextFile(p.join(dir, 'pubspec.lock'), lockFile.serialize(null)); - } - /// Shows the user the currently active package with [name], if any. LockFile? _describeActive(String name, SystemCache cache) { late final LockFile lockFile; diff --git a/lib/src/lock_file.dart b/lib/src/lock_file.dart index 6c662249e..343388dc4 100644 --- a/lib/src/lock_file.dart +++ b/lib/src/lock_file.dart @@ -5,6 +5,7 @@ import 'dart:convert'; import 'package:collection/collection.dart' hide mapMap; +import 'package:meta/meta.dart'; import 'package:path/path.dart' as p; import 'package:pub_semver/pub_semver.dart'; import 'package:source_span/source_span.dart'; @@ -29,15 +30,15 @@ class LockFile { /// Dependency names that appeared in the root package's `dependencies` /// section. - final Set _mainDependencies; + final Set mainDependencies; /// Dependency names that appeared in the root package's `dev_dependencies` /// section. - final Set _devDependencies; + final Set devDependencies; /// Dependency names that appeared in the root package's /// `dependency_overrides` section. - final Set _overriddenDependencies; + final Set overriddenDependencies; /// Creates a new lockfile containing [ids]. /// @@ -59,20 +60,16 @@ class LockFile { devDependencies ?? const UnmodifiableSetView.empty(), overriddenDependencies ?? const UnmodifiableSetView.empty()); - LockFile._( - Map packages, - this.sdkConstraints, - this._mainDependencies, - this._devDependencies, - this._overriddenDependencies) + LockFile._(Map packages, this.sdkConstraints, + this.mainDependencies, this.devDependencies, this.overriddenDependencies) : packages = UnmodifiableMapView(packages); LockFile.empty() : packages = const {}, sdkConstraints = {'dart': VersionConstraint.any}, - _mainDependencies = const UnmodifiableSetView.empty(), - _devDependencies = const UnmodifiableSetView.empty(), - _overriddenDependencies = const UnmodifiableSetView.empty(); + mainDependencies = const UnmodifiableSetView.empty(), + devDependencies = const UnmodifiableSetView.empty(), + overriddenDependencies = const UnmodifiableSetView.empty(); /// Loads a lockfile from [filePath]. factory LockFile.load(String filePath, SourceRegistry sources) { @@ -274,8 +271,13 @@ class LockFile { var packages = Map.from(this.packages); packages.remove(name); - return LockFile._(packages, sdkConstraints, _mainDependencies, - _devDependencies, _overriddenDependencies); + return LockFile._( + packages, + sdkConstraints, + mainDependencies, + devDependencies, + overriddenDependencies, + ); } /// Returns the contents of the `.dart_tool/package_config` file generated @@ -341,20 +343,18 @@ class LockFile { /// [packageDir] is the containing directory of the root package, used to /// serialize relative path package descriptions. If it is null, they will be /// serialized as absolute. - String serialize(String? packageDir) { + String serialize(String? packageDir, SystemCache cache) { // Convert the dependencies to a simple object. var packageMap = {}; - packages.forEach((name, package) { - var description = - package.description.serializeForLockfile(containingDir: packageDir); - - packageMap[name] = { - 'version': package.version.toString(), - 'source': package.source.name, - 'description': description, - 'dependency': _dependencyType(package.name) + for (final id in packages.values) { + packageMap[id.name] = { + 'version': id.version.toString(), + 'source': id.source.name, + 'description': + id.description.serializeForLockfile(containingDir: packageDir), + 'dependency': _dependencyType(id.name) }; - }); + } var data = { 'sdks': mapMap(sdkConstraints, @@ -368,6 +368,21 @@ ${yamlToString(data)} '''; } + /// Saves the list of concrete package versions to [lockFilePath]. + /// + /// Will use Windows line endings (`\r\n`) if the file already exists, and + /// uses that. + /// + /// Relative paths will be resolved relative to [lockFilePath] + void writeToFile(String lockFilePath, SystemCache cache) { + final windowsLineEndings = fileExists(lockFilePath) && + detectWindowsLineEndings(readTextFile(lockFilePath)); + + final serialized = serialize(p.dirname(lockFilePath), cache); + writeTextFile(lockFilePath, + windowsLineEndings ? serialized.replaceAll('\n', '\r\n') : serialized); + } + static const _directMain = 'direct main'; static const _directDev = 'direct dev'; static const _directOverridden = 'direct overridden'; @@ -375,12 +390,12 @@ ${yamlToString(data)} /// Returns the dependency classification for [package]. String _dependencyType(String package) { - if (_mainDependencies.contains(package)) return _directMain; - if (_devDependencies.contains(package)) return _directDev; + if (mainDependencies.contains(package)) return _directMain; + if (devDependencies.contains(package)) return _directDev; // If a package appears in `dependency_overrides` and another dependency // section, the main section it appears in takes precedence. - if (_overriddenDependencies.contains(package)) { + if (overriddenDependencies.contains(package)) { return _directOverridden; } return _transitive; @@ -399,3 +414,22 @@ ${yamlToString(data)} return true; } } + +/// Returns `true` if the [text] looks like it uses windows line endings. +/// +/// The heuristic used is to count all `\n` in the text and if stricly more than +/// half of them are preceded by `\r` we report `true`. +@visibleForTesting +bool detectWindowsLineEndings(String text) { + var index = -1; + var unixNewlines = 0; + var windowsNewlines = 0; + while ((index = text.indexOf('\n', index + 1)) != -1) { + if (index != 0 && text[index - 1] == '\r') { + windowsNewlines++; + } else { + unixNewlines++; + } + } + return windowsNewlines > unixNewlines; +} diff --git a/lib/src/solver/report.dart b/lib/src/solver/report.dart index dab29c63c..176ab671d 100644 --- a/lib/src/solver/report.dart +++ b/lib/src/solver/report.dart @@ -10,6 +10,7 @@ import '../lock_file.dart'; import '../log.dart' as log; import '../package.dart'; import '../package_name.dart'; +import '../source/hosted.dart'; import '../source/root.dart'; import '../system_cache.dart'; import '../utils.dart'; @@ -25,42 +26,114 @@ class SolveReport { final SolveType _type; final Package _root; final LockFile _previousLockFile; - final SolveResult _result; + final LockFile _newLockFile; final SystemCache _cache; + final bool _dryRun; - /// The dependencies in [_result], keyed by package name. - final _dependencies = {}; + /// The available versions of all selected packages from their source. + /// + /// An entry here may not include the full list of versions available if the + /// given package was locked and did not need to be unlocked during the solve. + /// + /// Version list will not contain any retracted package versions. + final Map> _availableVersions; final _output = StringBuffer(); - SolveReport(this._type, this._root, this._previousLockFile, this._result, - this._cache) { - // Fill the map so we can use it later. - for (var id in _result.packages) { - _dependencies[id.name] = id; - } - } - - /// Displays a report of the results of the version resolution relative to - /// the previous lock file. + SolveReport( + this._type, + this._root, + this._previousLockFile, + this._newLockFile, + this._availableVersions, + this._cache, { + required bool dryRun, + }) : _dryRun = dryRun; + + /// Displays a report of the results of the version resolution in + /// [_newLockFile] relative to the [_previousLockFile] file. Future show() async { await _reportChanges(); await _reportOverrides(); + _checkContentHashesMatchOldLockfile(); + } + + void _checkContentHashesMatchOldLockfile() { + final issues = []; + + final newPackageNames = _newLockFile.packages.keys.toSet(); + final oldPackageNames = _previousLockFile.packages.keys.toSet(); + // We only care about packages that exist in both new and old lockfile. + for (final name in newPackageNames.intersection(oldPackageNames)) { + final newId = _newLockFile.packages[name]!; + final oldId = _previousLockFile.packages[name]!; + + // We only care about hosted packages + final newDescription = newId.description; + final oldDescription = oldId.description; + if (newDescription is! ResolvedHostedDescription || + oldDescription is! ResolvedHostedDescription) { + continue; + } + + // We don't care about changes in the hash if the version number changed! + if (newId.version != oldId.version) { + continue; + } + + // Use the cached content-hashes after downloading to ensure that + // content-hashes from legacy servers gets used. + final cachedHash = newDescription.sha256; + assert(cachedHash != null); + + // Ignore cases where the old lockfile doesn't have a content-hash + final oldHash = oldDescription.sha256; + if (oldHash == null) { + continue; + } + + if (!fixedTimeBytesEquals(cachedHash, oldHash)) { + issues.add( + '$name-${newId.version} from "${newDescription.description.url}"', + ); + } + } + + if (issues.isNotEmpty) { + log.warning(''' +The existing content-hash from pubspec.lock doesn't match contents for: + * ${issues.join('\n * ')} + +This indicates one of: + * The content has changed on the server since you created the pubspec.lock. + * The pubspec.lock has been corrupted. +${_dryRun ? '' : '\nThe content-hashes in pubspec.lock has been updated.'} + +For more information see: +$contentHashesDocumentationUrl +'''); + } } /// Displays a one-line message summarizing what changes were made (or would /// be made) to the lockfile. /// /// If [dryRun] is true, describes it in terms of what would be done. - void summarize({bool dryRun = false}) { + /// + /// [type] is the type of version resolution that was run. + + /// If [type] is `SolveType.UPGRADE` it also shows the number of packages that + /// are not at the latest available version and the number of outdated + /// packages. + Future summarize() async { // Count how many dependencies actually changed. - var dependencies = _dependencies.keys.toSet(); + var dependencies = _newLockFile.packages.keys.toSet(); dependencies.addAll(_previousLockFile.packages.keys); dependencies.remove(_root.name); var numChanged = dependencies.where((name) { var oldId = _previousLockFile.packages[name]; - var newId = _dependencies[name]; + var newId = _newLockFile.packages[name]; // Added or removed dependencies count. if (oldId == null) return true; @@ -78,7 +151,7 @@ class SolveReport { } } - if (dryRun) { + if (_dryRun) { if (numChanged == 0) { log.message('No dependencies would change$suffix.'); } else if (numChanged == 1) { @@ -99,6 +172,10 @@ class SolveReport { log.message('Changed $numChanged dependencies$suffix!'); } } + if (_type == SolveType.upgrade) { + await reportDiscontinued(); + reportOutdated(); + } } /// Displays a report of all of the previous and current dependencies and @@ -107,7 +184,7 @@ class SolveReport { _output.clear(); // Show the new set of dependencies ordered by name. - var names = _result.packages.map((id) => id.name).toList(); + var names = _newLockFile.packages.keys.toList(); names.remove(_root.name); names.sort(); for (final name in names) { @@ -146,10 +223,10 @@ class SolveReport { /// if discontinued packages are detected. Future reportDiscontinued() async { var numDiscontinued = 0; - for (var id in _result.packages) { + for (var id in _newLockFile.packages.values) { if (id.description is RootDescription) continue; - final status = - await id.source.status(id, _cache, maxAge: Duration(days: 3)); + final status = await id.source + .status(id.toRef(), id.version, _cache, maxAge: Duration(days: 3)); if (status.isDiscontinued && (_root.dependencyType(id.name) == DependencyType.direct || _root.dependencyType(id.name) == DependencyType.dev)) { @@ -168,8 +245,8 @@ class SolveReport { /// Displays a two-line message, number of outdated packages and an /// instruction to run `pub outdated` if outdated packages are detected. void reportOutdated() { - final outdatedPackagesCount = _result.packages.where((id) { - final versions = _result.availableVersions[id.name]!; + final outdatedPackagesCount = _newLockFile.packages.values.where((id) { + final versions = _availableVersions[id.name]!; // A version is counted: // - if there is a newer version which is not a pre-release and current // version is also not a pre-release or, @@ -198,7 +275,7 @@ class SolveReport { /// "(override)" next to overridden packages. Future _reportPackage(String name, {bool alwaysShow = false, bool highlightOverride = true}) async { - var newId = _dependencies[name]; + var newId = _newLockFile.packages[name]; var oldId = _previousLockFile.packages[name]; var id = newId ?? oldId!; @@ -218,6 +295,7 @@ class SolveReport { // + The package was added. // > The package was upgraded from a lower version. // < The package was downgraded from a higher version. + // ~ Package contents has changed, but not the version number. // * Any other change between the old and new package. String icon; if (isOverridden) { @@ -228,7 +306,8 @@ class SolveReport { } else if (oldId == null) { icon = log.green('+ '); addedOrRemoved = true; - } else if (oldId.description != newId.description) { + } else if (oldId.description.description != newId.description.description) { + // Eg. a changed source in pubspec.yaml. icon = log.cyan('* '); changed = true; } else if (oldId.version < newId.version) { @@ -237,6 +316,10 @@ class SolveReport { } else if (oldId.version > newId.version) { icon = log.cyan('< '); changed = true; + } else if (oldId.description != newId.description) { + // Eg. a changed hash or revision. + icon = log.cyan('~ '); + changed = true; } else { // Unchanged. icon = ' '; @@ -245,7 +328,7 @@ class SolveReport { // See if there are any newer versions of the package that we were // unable to upgrade to. if (newId != null && _type != SolveType.downgrade) { - var versions = _result.availableVersions[newId.name]!; + var versions = _availableVersions[newId.name]!; var newerStable = false; var newerUnstable = false; @@ -259,8 +342,12 @@ class SolveReport { } } } - final status = - await id.source.status(id, _cache, maxAge: Duration(days: 3)); + final status = await id.source.status( + id.toRef(), + id.version, + _cache, + maxAge: Duration(days: 3), + ); if (status.isRetracted) { if (newerStable) { diff --git a/lib/src/solver/result.dart b/lib/src/solver/result.dart index df3aa4a8e..d958b1355 100644 --- a/lib/src/solver/result.dart +++ b/lib/src/solver/result.dart @@ -16,8 +16,6 @@ import '../pubspec.dart'; import '../source/cached.dart'; import '../source/hosted.dart'; import '../system_cache.dart'; -import 'report.dart'; -import 'type.dart'; /// The result of a successful version resolution. class SolveResult { @@ -50,9 +48,37 @@ class SolveResult { /// The wall clock time the resolution took. final Duration resolutionTime; - /// The [LockFile] representing the packages selected by this version - /// resolution. - LockFile get lockFile { + /// Downloads all the cached packages selected by this version resolution. + /// + /// If some already cached package differs from what is provided by the server + /// (according to the content-hash) a warning is printed and the package is + /// redownloaded. + /// + /// Returns the [LockFile] representing the packages selected by this version + /// resolution. Any resolved [PackageId]s will correspond to those in the + /// cache (and thus to the one provided by the server). + /// + /// If there is a mismatch between the previous content-hash from pubspec.lock + /// and the new one a warning will be printed but the new one will be + /// returned. + Future downloadCachedPackages(SystemCache cache) async { + final resolvedPackageIds = await Future.wait( + packages.map((id) async { + if (id.source is CachedSource) { + return await withDependencyType(_root.dependencyType(id.name), + () async { + return await cache.downloadPackage( + id, + ); + }); + } + return id; + }), + ); + + // Invariant: the content-hashes in PUB_CACHE matches those provided by the + // server. + // Don't factor in overridden dependencies' SDK constraints, because we'll // accept those packages even if their constraints don't match. var nonOverrides = pubspecs.values @@ -67,27 +93,17 @@ class SolveResult { .intersect(sdkConstraints[identifier] ?? VersionConstraint.any); }); } - - return LockFile(packages, - sdkConstraints: sdkConstraints, - mainDependencies: MapKeySet(_root.dependencies), - devDependencies: MapKeySet(_root.devDependencies), - overriddenDependencies: MapKeySet(_root.dependencyOverrides)); + return LockFile( + resolvedPackageIds, + sdkConstraints: sdkConstraints, + mainDependencies: MapKeySet(_root.dependencies), + devDependencies: MapKeySet(_root.devDependencies), + overriddenDependencies: MapKeySet(_root.dependencyOverrides), + ); } final LockFile _previousLockFile; - /// Downloads all cached packages in [packages]. - Future downloadCachedPackages(SystemCache cache) async { - await Future.wait(packages.map((id) async { - final source = id.source; - if (source is! CachedSource) return; - return await withDependencyType(_root.dependencyType(id.name), () async { - await source.downloadToSystemCache(id, cache); - }); - })); - } - /// Returns the names of all packages that were changed. /// /// This includes packages that were added or removed. @@ -105,30 +121,6 @@ class SolveResult { SolveResult(this._root, this._previousLockFile, this.packages, this.pubspecs, this.availableVersions, this.attemptedSolutions, this.resolutionTime); - /// Displays a report of what changes were made to the lockfile. - /// - /// [type] is the type of version resolution that was run. - Future showReport(SolveType type, SystemCache cache) async { - await SolveReport(type, _root, _previousLockFile, this, cache).show(); - } - - /// Displays a one-line message summarizing what changes were made (or would - /// be made) to the lockfile. - /// - /// If [type] is `SolveType.UPGRADE` it also shows the number of packages - /// that are not at the latest available version. - /// - /// [type] is the type of version resolution that was run. - Future summarizeChanges(SolveType type, SystemCache cache, - {bool dryRun = false}) async { - final report = SolveReport(type, _root, _previousLockFile, this, cache); - report.summarize(dryRun: dryRun); - if (type == SolveType.upgrade) { - await report.reportDiscontinued(); - report.reportOutdated(); - } - } - /// Send analytics about the package resolution. void sendAnalytics(PubAnalytics pubAnalytics) { ArgumentError.checkNotNull(pubAnalytics); diff --git a/lib/src/source.dart b/lib/src/source.dart index a918c68a8..e7714eb67 100644 --- a/lib/src/source.dart +++ b/lib/src/source.dart @@ -133,7 +133,7 @@ abstract class Source { String doGetDirectory(PackageId id, SystemCache cache, {String? relativeFrom}); - /// Returns metadata about a given package. + /// Returns metadata about a given package-version. /// /// For remotely hosted packages, the information can be cached for up to /// [maxAge]. If [maxAge] is not given, the information is not cached. @@ -141,7 +141,8 @@ abstract class Source { /// In the case of offline sources, [maxAge] is not used, since information is /// per definition cached. Future status( - PackageId id, + PackageRef ref, + Version version, SystemCache cache, { Duration? maxAge, }) async { diff --git a/lib/src/source/cached.dart b/lib/src/source/cached.dart index 16a7313dd..54630c73d 100644 --- a/lib/src/source/cached.dart +++ b/lib/src/source/cached.dart @@ -55,7 +55,7 @@ abstract class CachedSource extends Source { dirExists(getDirectoryInCache(id, cache)); /// Downloads the package identified by [id] to the system cache. - Future downloadToSystemCache(PackageId id, SystemCache cache); + Future downloadToSystemCache(PackageId id, SystemCache cache); /// Returns the [Package]s that have been downloaded to the system cache. List getCachedPackages(SystemCache cache); diff --git a/lib/src/source/git.dart b/lib/src/source/git.dart index efd085ebb..24b1294aa 100644 --- a/lib/src/source/git.dart +++ b/lib/src/source/git.dart @@ -188,7 +188,7 @@ class GitSource extends CachedSource { /// /// This lets us avoid race conditions when getting multiple different /// packages from the same repository. - final _revisionCacheClones = {}; + final _revisionCacheClones = >{}; /// The paths to the canonical clones of repositories for which "git fetch" /// has already been run during this run of pub. @@ -298,7 +298,10 @@ class GitSource extends CachedSource { /// itself; each of the commit-specific directories are clones of a directory /// in `cache/`. @override - Future downloadToSystemCache(PackageId id, SystemCache cache) async { + Future downloadToSystemCache( + PackageId id, + SystemCache cache, + ) async { return await _pool.withResource(() async { final ref = id.toRef(); final description = ref.description; @@ -327,12 +330,7 @@ class GitSource extends CachedSource { _updatePackageList(revisionCachePath, path); } }); - - return Package.load( - id.name, - p.join(revisionCachePath, p.fromUri(path)), - cache.sources, - ); + return id; }); } diff --git a/lib/src/source/hosted.dart b/lib/src/source/hosted.dart index 451dce135..0def1edff 100644 --- a/lib/src/source/hosted.dart +++ b/lib/src/source/hosted.dart @@ -9,7 +9,8 @@ import 'dart:math' as math; import 'dart:typed_data'; import 'package:collection/collection.dart' - show maxBy, IterableNullableExtension; + show IterableExtension, IterableNullableExtension, ListEquality, maxBy; +import 'package:crypto/crypto.dart'; import 'package:http/http.dart' as http; import 'package:meta/meta.dart'; import 'package:path/path.dart' as p; @@ -32,6 +33,8 @@ import '../system_cache.dart'; import '../utils.dart'; import 'cached.dart'; +const contentHashesDocumentationUrl = 'https://dart.dev/go/content-hashes'; + /// Validates and normalizes a [hostedUrl] which is pointing to a pub server. /// /// A [hostedUrl] is a URL pointing to a _hosted pub server_ as defined by the @@ -174,24 +177,6 @@ class HostedSource extends CachedSource { return PackageRef(name, d); } - /// Returns an ID for a hosted package named [name] at [version]. - /// - /// If [url] is passed, it's the URL of the pub server from which the package - /// should be downloaded. [url] most be normalized and validated using - /// [validateAndNormalizeHostedUrl]. - PackageId idFor( - String name, - Version version, { - String? url, - }) => - PackageId( - name, - version, - ResolvedHostedDescription( - HostedDescription(name, url ?? defaultUrl.toString()), - ), - ); - /// Ensures that [description] is a valid hosted package description. /// /// Simple hosted dependencies only consist of a plain string, which is @@ -221,7 +206,10 @@ class HostedSource extends CachedSource { return PackageId( name, version, - ResolvedHostedDescription(HostedDescription(name, defaultUrl)), + ResolvedHostedDescription( + HostedDescription(name, defaultUrl), + sha256: null, + ), ); } if (description is! Map) { @@ -231,6 +219,10 @@ class HostedSource extends CachedSource { if (url is! String) { throw FormatException('The url should be a string.'); } + final sha256 = description['sha256']; + if (sha256 != null && sha256 is! String) { + throw FormatException('The sha256 should be a string.'); + } final foundName = description['name']; if (foundName is! String) { throw FormatException('The name should be a string.'); @@ -243,6 +235,7 @@ class HostedSource extends CachedSource { version, ResolvedHostedDescription( HostedDescription(name, Uri.parse(url).toString()), + sha256: sha256 == null ? null : hexDecode(sha256), ), ); } @@ -319,48 +312,53 @@ class HostedSource extends CachedSource { static final RegExp _looksLikePackageName = RegExp(r'^[a-zA-Z_]+[a-zA-Z0-9_]*$'); - late final RateLimitedScheduler<_RefAndCache, Map?> - _scheduler = RateLimitedScheduler( + late final RateLimitedScheduler<_RefAndCache, List<_VersionInfo>> _scheduler = + RateLimitedScheduler( _fetchVersions, maxConcurrentOperations: 10, ); - Map _versionInfoFromPackageListing( + List<_VersionInfo> _versionInfoFromPackageListing( Map body, PackageRef ref, Uri location, SystemCache cache) { final description = ref.description; if (description is! HostedDescription) { throw ArgumentError('Wrong source'); } final versions = body['versions']; - if (versions is List) { - return Map.fromEntries(versions.map((map) { - final pubspecData = map['pubspec']; - if (pubspecData is Map) { - var pubspec = Pubspec.fromMap(pubspecData, cache.sources, - expectedName: ref.name, location: location); - var id = idFor( - ref.name, - pubspec.version, - url: description.url, - ); - var archiveUrl = map['archive_url']; - if (archiveUrl is String) { - final status = PackageStatus( - isDiscontinued: body['isDiscontinued'] ?? false, - discontinuedReplacedBy: body['replacedBy'], - isRetracted: map['retracted'] ?? false); - return MapEntry( - id, _VersionInfo(pubspec, Uri.parse(archiveUrl), status)); - } - throw FormatException('archive_url must be a String'); - } - throw FormatException('pubspec must be a map'); - })); + if (versions is! List) { + throw FormatException('versions must be a list'); } - throw FormatException('versions must be a list'); + return versions.map((map) { + final pubspecData = map['pubspec']; + if (pubspecData is! Map) { + throw FormatException('pubspec must be a map'); + } + var pubspec = Pubspec.fromMap(pubspecData, cache.sources, + expectedName: ref.name, location: location); + final archiveSha256 = map['archive_sha256']; + if (archiveSha256 != null && archiveSha256 is! String) { + throw FormatException('archive_sha256 must be a String'); + } + final archiveUrl = map['archive_url']; + if (archiveUrl is! String) { + throw FormatException('archive_url must be a String'); + } + final status = PackageStatus( + isDiscontinued: body['isDiscontinued'] ?? false, + discontinuedReplacedBy: body['replacedBy'], + isRetracted: map['retracted'] ?? false, + ); + return _VersionInfo( + pubspec.version, + pubspec, + Uri.parse(archiveUrl), + status, + archiveSha256 == null ? null : hexDecode(archiveSha256), + ); + }).toList(); } - Future?> _fetchVersionsNoPrefetching( + Future> _fetchVersionsNoPrefetching( PackageRef ref, SystemCache cache) async { final description = ref.description; @@ -371,9 +369,9 @@ class HostedSource extends CachedSource { final url = _listVersionsUrl(ref); log.io('Get versions from $url.'); - late final String bodyText; - late final dynamic body; - late final Map result; + final String bodyText; + final dynamic body; + final List<_VersionInfo> result; try { // TODO(sigurdm): Implement cancellation of requests. This probably // requires resolution of: https://github.com/dart-lang/sdk/issues/22265. @@ -401,8 +399,7 @@ class HostedSource extends CachedSource { return result; } - Future?> _fetchVersions( - _RefAndCache refAndCache) async { + Future> _fetchVersions(_RefAndCache refAndCache) async { final ref = refAndCache.ref; final description = ref.description; if (description is! HostedDescription) { @@ -414,16 +411,13 @@ class HostedSource extends CachedSource { /// Prefetch the dependencies of the latest version, we are likely to need /// them later. void prescheduleDependenciesOfLatest( - Map? listing, + List<_VersionInfo>? listing, SystemCache cache, ) { - if (listing == null) return; + if (listing == null || listing.isEmpty) return; final latestVersion = - maxBy(listing.keys.map((id) => id.version), (e) => e)!; - final latestVersionId = PackageId( - ref.name, latestVersion, ResolvedHostedDescription(description)); - final dependencies = - listing[latestVersionId]?.pubspec.dependencies.values ?? []; + maxBy<_VersionInfo, Version>(listing, (e) => e.version)!; + final dependencies = latestVersion.pubspec.dependencies.values; unawaited(withDependencyType(DependencyType.none, () async { for (final packageRange in dependencies) { if (packageRange.source is HostedSource) { @@ -456,8 +450,7 @@ class HostedSource extends CachedSource { /// Invariant: Entries in this cache are the parsed version of the exact same /// information cached on disk. I.e. if the entry is present in this cache, /// there will not be a newer version on disk. - final Map>> - _responseCache = {}; + final Map>> _responseCache = {}; /// If a cached version listing response for [ref] exists on disk and is less /// than [maxAge] old it is parsed and returned. @@ -466,7 +459,7 @@ class HostedSource extends CachedSource { /// /// If [maxAge] is not given, we will try to get the cached version no matter /// how old it is. - Future?> _cachedVersionListingResponse( + Future?> _cachedVersionListingResponse( PackageRef ref, SystemCache cache, {Duration? maxAge}) async { if (_responseCache.containsKey(ref)) { @@ -544,26 +537,39 @@ class HostedSource extends CachedSource { } @override - Future status(PackageId id, SystemCache cache, - {Duration? maxAge}) async { + Future status( + PackageRef ref, + Version version, + SystemCache cache, { + Duration? maxAge, + }) async { + // If we don't have the specific version we return the empty response, since + // it is more or less harmless.. + // + // This can happen if the connection is broken, or the server is faulty. + // We want to avoid a crash + // + // TODO(sigurdm): Consider representing the non-existence of the + // package-version in the return value. + return (await _versionInfo(ref, version, cache, maxAge: maxAge))?.status ?? + PackageStatus(); + } + + Future<_VersionInfo?> _versionInfo( + PackageRef ref, + Version version, + SystemCache cache, { + Duration? maxAge, + }) async { if (cache.isOffline) { // Do we have a cached version response on disk? - final versionListing = - await _cachedVersionListingResponse(id.toRef(), cache); + final versionListing = await _cachedVersionListingResponse(ref, cache); if (versionListing == null) { - return PackageStatus(); + return null; } - // If we don't have the specific version we return the empty response. - // - // This should not happen. But in production we want to avoid a crash, since - // it is more or less harmless. - // - // TODO(sigurdm): Consider representing the non-existence of the - // package-version in the return value. - return versionListing[id]?.status ?? PackageStatus(); + return versionListing.firstWhereOrNull((l) => l.version == version); } - final ref = id.toRef(); // Did we already get info for this package? var versionListing = _scheduler.peek(_RefAndCache(ref, cache)); if (maxAge != null) { @@ -576,20 +582,11 @@ class HostedSource extends CachedSource { .schedule(_RefAndCache(ref, cache)) // Failures retrieving the listing here should just be ignored. .catchError( - (_) => {}, + (_) async => <_VersionInfo>[], test: (error) => error is Exception, ); - final listing = versionListing![id]; - // If we don't have the specific version we return the empty response, since - // it is more or less harmless.. - // - // This can happen if the connection is broken, or the server is faulty. - // We want to avoid a crash - // - // TODO(sigurdm): Consider representing the non-existence of the - // package-version in the return value. - return listing?.status ?? PackageStatus(); + return versionListing.firstWhereOrNull((l) => l.version == version); } // The path where the response from the package-listing api is cached. @@ -652,7 +649,18 @@ class HostedSource extends CachedSource { await _cachedVersionListingResponse(ref, cache, maxAge: maxAge); } versionListing ??= await _scheduler.schedule(_RefAndCache(ref, cache)); - return versionListing!.keys.toList(); + return versionListing + .map( + (i) => PackageId( + ref.name, + i.version, + ResolvedHostedDescription( + ref.description as HostedDescription, + sha256: i.archiveSha256, + ), + ), + ) + .toList(); } /// Parses [description] into its server and package name components, then @@ -678,23 +686,102 @@ class HostedSource extends CachedSource { } final versions = await _scheduler.schedule(_RefAndCache(id.toRef(), cache)); final url = _listVersionsUrl(id.toRef()); - return versions![id]?.pubspec ?? + return versions.firstWhereOrNull((i) => i.version == id.version)?.pubspec ?? (throw PackageNotFoundException('Could not find package $id at $url')); } - /// Downloads the package identified by [id] to the system cache. + /// Downloads the package identified by [id] to the system cache if needed. + /// + /// Validates that the content hash of [id] corresponds to what is already in + /// cache, if not the file is redownloaded. + /// + /// If [allowOutdatedHashChecks] is `true` we use a cached version listing + /// response if present instead of probing the server. Not probing allows for + /// `pub get` with a filled cache to be a fast case that doesn't require any + /// new version-listings. @override - Future downloadToSystemCache(PackageId id, SystemCache cache) async { - if (!isInSystemCache(id, cache)) { + Future downloadToSystemCache( + PackageId id, SystemCache cache) async { + final packageDir = getDirectoryInCache(id, cache); + + // Use the content-hash from the version-info to compare with what we + // already downloaded. + // + // The content-hash from [id] will be compared with that when the lockfile + // is written. + // + // We allow the version-listing to be a few days outdated in order for `pub + // get` with an existing working resolution and everything in cache to be + // fast. + final versionInfo = await _versionInfo( + id.toRef(), + id.version, + cache, + maxAge: Duration(days: 3), + ); + + final expectedContentHash = versionInfo?.archiveSha256 ?? + // Handling of legacy server - we use the hash from the id (typically + // from the lockfile) to compare to the existing download. + (id.description as ResolvedHostedDescription).sha256; + Uint8List? contentHash; + if (!fileExists(hashPath(id, cache))) { + if (dirExists(packageDir) && !cache.isOffline) { + log.fine( + 'Cache entry for ${id.name}-${id.version} has no content-hash - redownloading.'); + deleteEntry(packageDir); + } + } else if (expectedContentHash == null) { + // Can happen with a legacy server combined with a legacy lock file. + log.fine( + 'Content-hash of ${id.name}-${id.version} not known from resolution.'); + } else { + final hashFromCache = sha256FromCache(id, cache); + if (!fixedTimeBytesEquals(hashFromCache, expectedContentHash)) { + log.warning( + 'Cached version of ${id.name}-${id.version} has wrong hash - redownloading.'); + if (cache.isOffline) { + fail('Cannot redownload while offline. Try again without --offline.'); + } + deleteEntry(packageDir); + } else { + contentHash = hashFromCache; + } + } + if (dirExists(packageDir)) { + contentHash ??= sha256FromCache(id, cache); + } else { if (cache.isOffline) { - throw StateError('Cannot download packages when offline.'); + fail( + 'Missing package ${id.name}-${id.version}. Try again without --offline.'); } - var packageDir = getDirectoryInCache(id, cache); - ensureDir(p.dirname(packageDir)); - await _download(id, packageDir, cache); + contentHash = await _download(id, packageDir, cache); } + return PackageId( + id.name, + id.version, + (id.description as ResolvedHostedDescription).withSha256(contentHash), + ); + } - return Package.load(id.name, getDirectoryInCache(id, cache), cache.sources); + /// Determines if the package identified by [id] is already downloaded to the + /// system cache and has the expected content-hash. + @override + bool isInSystemCache(PackageId id, SystemCache cache) { + if ((id.description as ResolvedHostedDescription).sha256 != null) { + try { + final cachedSha256 = readTextFile(hashPath(id, cache)); + if (!const ListEquality().equals(hexDecode(cachedSha256), + (id.description as ResolvedHostedDescription).sha256)) { + return false; + } + } on io.IOException { + // Most likely the hash file was not written, because we had a legacy + // entry. + return false; + } + } + return dirExists(getDirectoryInCache(id, cache)); } /// The system cache directory for the hosted source contains subdirectories @@ -714,6 +801,32 @@ class HostedSource extends CachedSource { return p.join(rootDir, dir, '${id.name}-${id.version}'); } + /// The system cache directory for the hosted source contains subdirectories + /// for each separate repository URL that's used on the system. + /// + /// Parallel to this there is a `hosted-hashes` directory with a stored hash + /// of all downloaded packages. + String hashPath(PackageId id, SystemCache cache) { + final description = id.description.description; + if (description is! HostedDescription) { + throw ArgumentError('Wrong source'); + } + final rootDir = cache.rootDir; + + var serverDir = _urlToDirectory(description.url); + return p.join( + rootDir, 'hosted-hashes', serverDir, '${id.name}-${id.version}.sha256'); + } + + /// Loads the hash at `hashPath(id)`. + Uint8List? sha256FromCache(PackageId id, SystemCache cache) { + try { + return hexDecode(readTextFile(hashPath(id, cache))); + } on io.IOException { + return null; + } + } + /// Re-downloads all packages that have been previously downloaded into the /// system cache from any server. @override @@ -766,7 +879,14 @@ class HostedSource extends CachedSource { return results ..addAll(await Future.wait( packages.map((package) async { - var id = idFor(package.name, package.version, url: url); + var id = PackageId( + package.name, + package.version, + ResolvedHostedDescription( + HostedDescription(package.name, url), + sha256: null, + ), + ); try { deleteEntry(package.dir); await _download(id, package.dir, cache); @@ -803,7 +923,7 @@ class HostedSource extends CachedSource { return PackageId( name, version, - ResolvedHostedDescription(HostedDescription(name, url)), + ResolvedHostedDescription(HostedDescription(name, url), sha256: null), ); } @@ -849,7 +969,9 @@ class HostedSource extends CachedSource { /// If there is no archive_url, try to fetch it from /// `$server/packages/$package/versions/$version.tar.gz` where server comes /// from `id.description`. - Future _download( + /// + /// Returns the content-hash of the downloaded archive. + Future _download( PackageId id, String destPath, SystemCache cache, @@ -868,9 +990,11 @@ class HostedSource extends CachedSource { // query-string as is the case with signed S3 URLs. And we wish to allow for // such URLs to be used. final versions = await _scheduler.schedule(_RefAndCache(id.toRef(), cache)); - final versionInfo = versions![id]; + final versionInfo = + versions.firstWhereOrNull((i) => i.version == id.version); final packageName = id.name; final version = id.version; + late Uint8List contentHash; if (versionInfo == null) { throw PackageNotFoundException( 'Package $packageName has no version $version'); @@ -878,13 +1002,51 @@ class HostedSource extends CachedSource { final archiveUrl = versionInfo.archiveUrl; log.io('Get package from $archiveUrl.'); - log.message('Downloading ${log.bold(id.name)} ${id.version}...'); + log.fine('Downloading ${log.bold(id.name)} ${id.version}...'); // Download and extract the archive to a temp directory. - await withTempDir((tempDirForArchive) async { + return await withTempDir((tempDirForArchive) async { var fileName = '$packageName-$version.tar.gz'; var archivePath = p.join(tempDirForArchive, fileName); + Stream> validateSha256( + Stream> stream, + Digest? expectedHash, + ) async* { + final output = _SingleValueSink(); + final input = sha256.startChunkedConversion(output); + await for (final v in stream) { + input.add(v); + yield v; + } + input.close(); + final actualHash = output.value; + if (expectedHash != null && output.value != expectedHash) { + log.fine( + 'Expected content-hash for ${id.name}-${id.version} $expectedHash actual: ${output.value}.'); + throw PackageIntegrityException(''' +Downloaded archive for ${id.name}-${id.version} had wrong content-hash. + +This indicates a problem on the package repository: `${description.url}`. + +See $contentHashesDocumentationUrl. +'''); + } + final path = hashPath(id, cache); + ensureDir(p.dirname(path)); + writeTextFile( + path, + hexEncode(actualHash.bytes), + ); + contentHash = Uint8List.fromList(actualHash.bytes); + } + + // It is important that we do not compare against id.description.sha256, + // as we need to check against the newly fetched version listing to ensure + // that content changes result in updated lockfiles, not failure to + // download. + final expectedSha256 = versionInfo.archiveSha256; + // The client from `withAuthenticatedClient` will retry HTTP requests. // This wrapper is one layer up and will retry checksum validation errors. await retry( @@ -893,14 +1055,16 @@ class HostedSource extends CachedSource { final request = http.Request('GET', archiveUrl); final response = await withAuthenticatedClient(cache, Uri.parse(description.url), (client) => client.send(request)); - final expectedChecksum = _parseCrc32c(response.headers, fileName); + final expectedCrc32Checksum = + _parseCrc32c(response.headers, fileName); Stream> stream = response.stream; - if (expectedChecksum != null) { - stream = _validateStream( - response.stream, expectedChecksum, id, archiveUrl); + if (expectedCrc32Checksum != null) { + stream = _validateStreamCrc32Checksum( + response.stream, expectedCrc32Checksum, id, archiveUrl); } - + stream = validateSha256( + stream, (expectedSha256 == null) ? null : Digest(expectedSha256)); // We download the archive to disk instead of streaming it directly // into the tar unpacking. This simplifies stream handling. // Package:tar cancels the stream when it reaches end-of-archive, and @@ -924,6 +1088,7 @@ class HostedSource extends CachedSource { var tempDir = cache.createTempDir(); await extractTarGz(readBinaryFileAsStream(archivePath), tempDir); + ensureDir(p.dirname(destPath)); // Now that the get has succeeded, move it to the real location in the // cache. // @@ -931,6 +1096,7 @@ class HostedSource extends CachedSource { // another pub process has installed the same package version while we // downloaded. tryRenameDir(tempDir, destPath); + return contentHash; }); } @@ -1051,7 +1217,22 @@ class ResolvedHostedDescription extends ResolvedDescription { @override HostedDescription get description => super.description as HostedDescription; - ResolvedHostedDescription(HostedDescription description) : super(description); + /// The content hash of the package archive (the `tar.gz` file) of the + /// PackageId described by this. + /// + /// This can be obtained in several ways: + /// * Reported from a server in the archive_sha256 field. + /// (will be null if the server does not report this.) + /// * Obtained from a pubspec.lock + /// (will be null for legacy lock-files). + /// * Read from the /hosted-hashes//-.sha256 file. + /// (will be null if the file doesn't exist for corrupt or legacy caches). + final Uint8List? sha256; + + ResolvedHostedDescription( + HostedDescription description, { + required this.sha256, + }) : super(description); @override Object? serializeForLockfile({required String? containingDir}) { @@ -1061,26 +1242,46 @@ class ResolvedHostedDescription extends ResolvedDescription { } on FormatException catch (e) { throw ArgumentError.value(url, 'url', 'url must be normalized: $e'); } - return {'name': description.packageName, 'url': url.toString()}; + final hash = sha256; + return { + 'name': description.packageName, + 'url': url.toString(), + if (hash != null) 'sha256': hexEncode(hash), + }; } @override + // We do not include the sha256 in the hashCode because of the equality + // semantics. int get hashCode => description.hashCode; @override bool operator ==(Object other) { return other is ResolvedHostedDescription && - other.description == description; + other.description == description && + // A [sha256] of `null` means that we don't know the hash yet. + // Therefore we have to assume it is equal to any known value. + (sha256 == null || + other.sha256 == null || + fixedTimeBytesEquals(sha256, other.sha256)); } + + ResolvedHostedDescription withSha256(Uint8List? newSha256) => + ResolvedHostedDescription(description, sha256: newSha256); } /// Information about a package version retrieved from /api/packages/$package< class _VersionInfo { final Pubspec pubspec; final Uri archiveUrl; + final Version version; + + /// The sha256 digest of the archive according to the package-repository. + final Uint8List? archiveSha256; final PackageStatus status; - _VersionInfo(this.pubspec, this.archiveUrl, this.status); + _VersionInfo(this.version, this.pubspec, this.archiveUrl, this.status, + this.archiveSha256); } /// Given a URL, returns a "normalized" string to be used as a directory name @@ -1152,6 +1353,20 @@ class _RefAndCache { bool operator ==(Object other) => other is _RefAndCache && other.ref == ref; } +/// A sink that can only have `add` called once, and that can retrieve the +/// value. +class _SingleValueSink implements Sink { + late final T value; + + @override + void add(T data) { + value = data; + } + + @override + void close() {} +} + @visibleForTesting const checksumHeaderName = 'x-goog-hash'; @@ -1163,7 +1378,7 @@ const checksumHeaderName = 'x-goog-hash'; /// the one present in the checksum response header. /// /// Throws [PackageIntegrityException] if there is a checksum mismatch. -Stream> _validateStream(Stream> stream, +Stream> _validateStreamCrc32Checksum(Stream> stream, int expectedChecksum, PackageId id, Uri archiveUrl) async* { final crc32c = Crc32c(); diff --git a/lib/src/system_cache.dart b/lib/src/system_cache.dart index ebbaa9792..11a10a174 100644 --- a/lib/src/system_cache.dart +++ b/lib/src/system_cache.dart @@ -186,7 +186,12 @@ class SystemCache { var versions = await ref.source.doGetVersions(ref, maxAge, this); versions = (await Future.wait(versions.map((id) async { - final packageStatus = await ref.source.status(id, this, maxAge: maxAge); + final packageStatus = await ref.source.status( + id.toRef(), + id.version, + this, + maxAge: maxAge, + ); if (!packageStatus.isRetracted || id.version == allowedRetractedVersion) { return id; } @@ -208,10 +213,24 @@ class SystemCache { return id.source.doGetDirectory(id, this, relativeFrom: relativeFrom); } - Future downloadPackage(PackageId id) async { + /// Downloads a cached package identified by [id] to the cache. + /// + /// [id] must refer to a cached package. + /// + /// If [allowOutdatedHashChecks] is `true` we use a cached version listing + /// response if present instead of probing the server. Not probing allows for + /// `pub get` with a filled cache to be a fast case that doesn't require any + /// new version-listings. + /// + /// Returns [id] with an updated [ResolvedDescription], this can be different + /// if the content-hash changed while downloading. + Future downloadPackage(PackageId id) async { final source = id.source; assert(source is CachedSource); - await (source as CachedSource).downloadToSystemCache(id, this); + return await (source as CachedSource).downloadToSystemCache( + id, + this, + ); } /// Get the latest version of [package]. diff --git a/lib/src/utils.dart b/lib/src/utils.dart index dbc762255..0f1bf5172 100644 --- a/lib/src/utils.dart +++ b/lib/src/utils.dart @@ -7,7 +7,9 @@ import 'dart:async'; import 'dart:convert'; import 'dart:io'; import 'dart:math' as math; +import 'dart:typed_data'; +import 'package:convert/convert.dart'; import 'package:crypto/crypto.dart' as crypto; import 'package:pub_semver/pub_semver.dart'; import 'package:stack_trace/stack_trace.dart'; @@ -328,6 +330,10 @@ String replace(String source, Pattern matcher, String Function(Match) fn) { String sha1(String source) => crypto.sha1.convert(utf8.encode(source)).toString(); +String hexEncode(List bytes) => hex.encode(bytes); + +Uint8List hexDecode(String string) => hex.decode(string) as Uint8List; + /// A regular expression matching a trailing CR character. final _trailingCR = RegExp(r'\r$'); @@ -639,6 +645,19 @@ Map mapMap( }; } +/// Compares two lists. If the lists have equal length this comparison will +/// iterate all elements, thus taking a fixed amount of time making timing +/// attacks harder. +bool fixedTimeBytesEquals(List? a, List? b) { + if (a == null || b == null) return a == b; + if (a.length != b.length) return false; + var e = 0; + for (var i = 0; i < a.length; i++) { + e |= a[i] ^ b[i]; + } + return e == 0; +} + /// Call [fn] retrying so long as [retryIf] return `true` for the exception /// thrown, up-to [maxAttempts] times. /// diff --git a/pubspec.yaml b/pubspec.yaml index eb2983fbb..f7c10776b 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -11,6 +11,7 @@ dependencies: async: ^2.6.1 cli_util: ^0.3.5 collection: ^1.15.0 + convert: ^3.0.2 crypto: ^3.0.1 frontend_server_client: ^3.0.0 http: ^0.13.3 diff --git a/test/cache/add/adds_latest_matching_version_test.dart b/test/cache/add/adds_latest_matching_version_test.dart index 7bf962569..9e06bf792 100644 --- a/test/cache/add/adds_latest_matching_version_test.dart +++ b/test/cache/add/adds_latest_matching_version_test.dart @@ -21,8 +21,8 @@ void main() { await runPub( args: ['cache', 'add', 'foo', '-v', '>=1.0.0 <2.0.0'], - output: 'Downloading foo 1.2.3...', silent: allOf([ + contains('Downloading foo 1.2.3...'), contains('X-Pub-OS: ${Platform.operatingSystem}'), contains('X-Pub-Command: cache add'), contains('X-Pub-Session-ID:'), diff --git a/test/cache/add/adds_latest_version_test.dart b/test/cache/add/adds_latest_version_test.dart index cf34857e2..d3020399c 100644 --- a/test/cache/add/adds_latest_version_test.dart +++ b/test/cache/add/adds_latest_version_test.dart @@ -15,7 +15,9 @@ void main() { ..serve('foo', '1.2.4-dev'); await runPub( - args: ['cache', 'add', 'foo'], output: 'Downloading foo 1.2.3...'); + args: ['cache', 'add', 'foo'], + silent: contains('Downloading foo 1.2.3...'), + ); await d.cacheDir({'foo': '1.2.3'}).validate(); }); diff --git a/test/cache/add/all_adds_all_matching_versions_test.dart b/test/cache/add/all_adds_all_matching_versions_test.dart index a05e8dca5..b7c884f97 100644 --- a/test/cache/add/all_adds_all_matching_versions_test.dart +++ b/test/cache/add/all_adds_all_matching_versions_test.dart @@ -16,11 +16,13 @@ void main() { ..serve('foo', '2.0.0'); await runPub( - args: ['cache', 'add', 'foo', '-v', '>=1.0.0 <2.0.0', '--all'], - output: ''' - Downloading foo 1.2.2... - Downloading foo 1.2.3-dev... - Downloading foo 1.2.3...'''); + args: ['cache', 'add', 'foo', '-v', '>=1.0.0 <2.0.0', '--all'], + silent: allOf([ + contains('Downloading foo 1.2.2...'), + contains('Downloading foo 1.2.3-dev...'), + contains('Downloading foo 1.2.3...'), + ]), + ); await d.cacheDir({'foo': '1.2.2'}).validate(); await d.cacheDir({'foo': '1.2.3-dev'}).validate(); diff --git a/test/cache/add/all_with_some_versions_present_test.dart b/test/cache/add/all_with_some_versions_present_test.dart index 7b5b36d60..82d9fd584 100644 --- a/test/cache/add/all_with_some_versions_present_test.dart +++ b/test/cache/add/all_with_some_versions_present_test.dart @@ -18,18 +18,22 @@ void main() { // Install a couple of versions first. await runPub( args: ['cache', 'add', 'foo', '-v', '1.2.1'], - output: 'Downloading foo 1.2.1...'); + silent: contains('Downloading foo 1.2.1...')); await runPub( args: ['cache', 'add', 'foo', '-v', '1.2.3'], - output: 'Downloading foo 1.2.3...'); + silent: contains('Downloading foo 1.2.3...')); // They should show up as already installed now. - await runPub(args: ['cache', 'add', 'foo', '--all'], output: ''' - Already cached foo 1.2.1. - Downloading foo 1.2.2... - Already cached foo 1.2.3. - Downloading foo 2.0.0...'''); + await runPub( + args: ['cache', 'add', 'foo', '--all'], + silent: allOf([ + contains('Downloading foo 1.2.2...'), + contains('Downloading foo 2.0.0...') + ]), + output: ''' +Already cached foo 1.2.1. +Already cached foo 1.2.3.'''); await d.cacheDir({'foo': '1.2.1'}).validate(); await d.cacheDir({'foo': '1.2.2'}).validate(); diff --git a/test/cache/add/already_cached_test.dart b/test/cache/add/already_cached_test.dart index 8c74da9d4..ef9452e1e 100644 --- a/test/cache/add/already_cached_test.dart +++ b/test/cache/add/already_cached_test.dart @@ -14,7 +14,8 @@ void main() { // Run once to put it in the cache. await runPub( - args: ['cache', 'add', 'foo'], output: 'Downloading foo 1.2.3...'); + args: ['cache', 'add', 'foo'], + silent: contains('Downloading foo 1.2.3...')); // Should be in the cache now. await runPub( diff --git a/test/cache/repair/handles_failure_test.dart b/test/cache/repair/handles_failure_test.dart index d5637caf0..f44a42dbb 100644 --- a/test/cache/repair/handles_failure_test.dart +++ b/test/cache/repair/handles_failure_test.dart @@ -32,9 +32,6 @@ void main() { // Repair them. var pub = await startPub(args: ['cache', 'repair']); - expect(pub.stdout, emits('Downloading foo 1.2.3...')); - expect(pub.stdout, emits('Downloading foo 1.2.5...')); - expect(pub.stderr, emits(startsWith('Failed to repair foo 1.2.4. Error:'))); expect( pub.stderr, diff --git a/test/cache/repair/hosted.dart b/test/cache/repair/hosted.dart index ec3786be3..a826fad73 100644 --- a/test/cache/repair/hosted.dart +++ b/test/cache/repair/hosted.dart @@ -39,11 +39,12 @@ void main() { await runPub( args: ['cache', 'repair'], output: ''' - Downloading bar 1.2.4... - Downloading foo 1.2.3... - Downloading foo 1.2.5... + Reinstalled 3 packages.''', silent: allOf([ + contains('Downloading bar 1.2.4...'), + contains('Downloading foo 1.2.3...'), + contains('Downloading foo 1.2.5...'), contains('X-Pub-OS: ${Platform.operatingSystem}'), contains('X-Pub-Command: cache repair'), contains('X-Pub-Session-ID:'), diff --git a/test/cache/repair/recompiles_snapshots_test.dart b/test/cache/repair/recompiles_snapshots_test.dart index f3c3d6ca9..6fa4ca234 100644 --- a/test/cache/repair/recompiles_snapshots_test.dart +++ b/test/cache/repair/recompiles_snapshots_test.dart @@ -21,7 +21,6 @@ void main() { ]).create(); await runPub(args: ['cache', 'repair'], output: ''' - Downloading foo 1.0.0... Reinstalled 1 package. Reactivating foo 1.0.0... Building package executables... diff --git a/test/cache/repair/updates_binstubs_test.dart b/test/cache/repair/updates_binstubs_test.dart index e8cbfb483..65a574c04 100644 --- a/test/cache/repair/updates_binstubs_test.dart +++ b/test/cache/repair/updates_binstubs_test.dart @@ -34,7 +34,6 @@ void main() { // Repair them. await runPub(args: ['cache', 'repair'], output: ''' - Downloading foo 1.0.0... Reinstalled 1 package. Reactivating foo 1.0.0... Building package executables... diff --git a/test/content_hash_test.dart b/test/content_hash_test.dart new file mode 100644 index 000000000..6a0725bf7 --- /dev/null +++ b/test/content_hash_test.dart @@ -0,0 +1,221 @@ +// Copyright (c) 2022, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'dart:io'; + +import 'package:path/path.dart' as p; +import 'package:pub/src/exit_codes.dart' as exit_codes; +import 'package:test/test.dart'; +import 'package:yaml/yaml.dart'; +import 'package:yaml_edit/yaml_edit.dart'; + +import 'descriptor.dart'; +import 'test_pub.dart'; + +Future main() async { + test('archive_sha256 is stored in lockfile and cache upon download', + () async { + final server = await servePackages(); + server.serve('foo', '1.0.0'); + server.serveContentHashes = true; + await appDir({'foo': 'any'}).create(); + await pubGet(); + final lockfile = loadYaml( + File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync()); + final sha256 = lockfile['packages']['foo']['description']['sha256']; + expect(sha256, hasLength(64)); + await hostedHashesCache([ + file('foo-1.0.0.sha256', sha256), + ]).validate(); + }); + + test( + 'archive_sha256 is stored in lockfile upon download on legacy server without content hashes', + () async { + final server = await servePackages(); + server.serveContentHashes = false; + server.serve('foo', '1.0.0'); + await appDir({'foo': 'any'}).create(); + await pubGet(); + final lockfile = loadYaml( + File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync()); + final sha256 = lockfile['packages']['foo']['description']['sha256']; + expect(sha256, hasLength(64)); + await hostedHashesCache([ + file('foo-1.0.0.sha256', sha256), + ]).validate(); + }); + + test('archive_sha256 is checked on download', () async { + final server = await servePackages(); + server.serve('foo', '1.0.0'); + server.overrideArchiveSha256('foo', '1.0.0', + 'e7a7a0f6d9873e4c40cf68cc3cc9ca5b6c8cef6a2220241bdada4b9cb0083279'); + await appDir({'foo': 'any'}).create(); + await pubGet( + silent: contains('Retry #2'), + error: + contains('Downloaded archive for foo-1.0.0 had wrong content-hash.'), + environment: { + 'PUB_MAX_HTTP_RETRIES': '2', + }, + ); + }); + + test('If content is updated on server we warn and update the lockfile', + () async { + final server = await servePackages(); + server.serveContentHashes = true; + server.serve('foo', '1.0.0'); + await appDir({'foo': 'any'}).create(); + await pubGet(); + server.serve('foo', '1.0.0', + contents: [file('new_file.txt', 'This file could be malicious.')]); + // Pub get will not revisit the file-listing if everything resolves, and only compare with a cached value. + await pubGet(); + // Deleting the version-listing cache will cause it to be refetched, and the + // warning will happen. + File(p.join(globalServer.cachingPath, '.cache', 'foo-versions.json')) + .deleteSync(); + await pubGet( + warning: allOf( + contains('Cached version of foo-1.0.0 has wrong hash - redownloading.'), + contains( + 'The existing content-hash from pubspec.lock doesn\'t match contents for:'), + contains('* foo-1.0.0 from "${server.url}"\n'), + ), + exitCode: exit_codes.SUCCESS, + ); + final lockfile = loadYaml( + File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync()); + final newHash = lockfile['packages']['foo']['description']['sha256']; + expect(newHash, await server.peekArchiveSha256('foo', '1.0.0')); + }); + + test( + 'If content is updated on legacy server, and the download needs refreshing we warn and update the lockfile', + () async { + final server = await servePackages(); + server.serveContentHashes = false; + server.serve('foo', '1.0.0'); + await appDir({'foo': 'any'}).create(); + await pubGet(); + server.serve('foo', '1.0.0', + contents: [file('new_file.txt', 'This file could be malicious.')]); + // Deleting the hash-file cache will cause it to be refetched, and the + // warning will happen. + File(p.join(globalServer.hashesCachingPath, 'foo-1.0.0.sha256')) + .deleteSync(); + + await pubGet( + warning: allOf([ + contains( + 'The existing content-hash from pubspec.lock doesn\'t match contents for:', + ), + contains('* foo-1.0.0 from "${globalServer.url}"'), + ]), + exitCode: exit_codes.SUCCESS, + ); + final lockfile = loadYaml( + File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync()); + final newHash = lockfile['packages']['foo']['description']['sha256']; + expect(newHash, await server.peekArchiveSha256('foo', '1.0.0')); + }); + + test( + 'sha256 in cache is checked on pub get - warning and redownload on legacy server without content-hashes', + () async { + final server = await servePackages(); + server.serveContentHashes = false; + server.serve('foo', '1.0.0'); + await appDir({'foo': 'any'}).create(); + await pubGet(); + final lockfile = loadYaml( + File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync()); + final originalHash = lockfile['packages']['foo']['description']['sha256']; + // Create wrong hash on disk. + await hostedHashesCache([ + file('foo-1.0.0.sha256', + 'e7a7a0f6d9873e4c40cf68cc3cc9ca5b6c8cef6a2220241bdada4b9cb0083279'), + ]).create(); + + await pubGet( + warning: 'Cached version of foo-1.0.0 has wrong hash - redownloading.'); + await hostedHashesCache([ + file('foo-1.0.0.sha256', originalHash), + ]).validate(); + }); + + test('sha256 in cache is checked on pub get - warning and redownload', + () async { + final server = await servePackages(); + server.serveContentHashes = true; + server.serve('foo', '1.0.0'); + await appDir({'foo': 'any'}).create(); + await pubGet(); + final lockfile = loadYaml( + File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync()); + final originalHash = lockfile['packages']['foo']['description']['sha256']; + await hostedHashesCache([ + file('foo-1.0.0.sha256', + 'e7a7a0f6d9873e4c40cf68cc3cc9ca5b6c8cef6a2220241bdada4b9cb0083279'), + ]).create(); + + await pubGet( + warning: 'Cached version of foo-1.0.0 has wrong hash - redownloading.'); + await hostedHashesCache([ + file('foo-1.0.0.sha256', originalHash), + ]).validate(); + }); + + test( + 'Legacy lockfile without content-hashes is updated with the hash on pub get on legacy server without content-hashes', + () async { + final server = await servePackages(); + server.serve('foo', '1.0.0'); + server.serveContentHashes = false; + await appDir({'foo': 'any'}).create(); + await pubGet(); + // Pretend we had no hash in the lockfile. + final lockfile = YamlEditor( + File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync()); + final originalContentHash = lockfile + .remove(['packages', 'foo', 'description', 'sha256']).value as String; + File(p.join(sandbox, appPath, 'pubspec.lock')).writeAsStringSync( + lockfile.toString(), + ); + await pubGet(); + final lockfile2 = YamlEditor( + File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync()); + expect( + lockfile2.parseAt(['packages', 'foo', 'description', 'sha256']).value, + originalContentHash, + ); + }); + + test( + 'Legacy lockfile without content-hashes is updated with the hash on pub get', + () async { + final server = await servePackages(); + server.serve('foo', '1.0.0'); + server.serveContentHashes = true; + await appDir({'foo': 'any'}).create(); + await pubGet(); + // Pretend we had no hash in the lockfile. + final lockfile = YamlEditor( + File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync()); + final originalContentHash = lockfile + .remove(['packages', 'foo', 'description', 'sha256']).value as String; + File(p.join(sandbox, appPath, 'pubspec.lock')).writeAsStringSync( + lockfile.toString(), + ); + await pubGet(); + final lockfile2 = YamlEditor( + File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync()); + expect( + lockfile2.parseAt(['packages', 'foo', 'description', 'sha256']).value, + originalContentHash, + ); + }); +} diff --git a/test/dependency_services/dependency_services_test.dart b/test/dependency_services/dependency_services_test.dart index 7228b36a8..02a32a098 100644 --- a/test/dependency_services/dependency_services_test.dart +++ b/test/dependency_services/dependency_services_test.dart @@ -10,8 +10,10 @@ import 'package:pub/src/io.dart'; import 'package:pub_semver/pub_semver.dart'; import 'package:shelf/shelf.dart' as shelf; import 'package:test/test.dart'; +import 'package:yaml_edit/yaml_edit.dart'; import '../descriptor.dart' as d; +import '../descriptor.dart'; import '../golden_file.dart'; import '../test_pub.dart'; @@ -49,6 +51,7 @@ extension on GoldenTestContext { Platform.resolvedExecutable, [ snapshot, + '--verbose', ...args, ], environment: getPubTestEnvironment(), @@ -120,7 +123,8 @@ Future main() async { final server = (await servePackages()) ..serve('foo', '1.2.3', deps: {'transitive': '^1.0.0'}) ..serve('foo', '2.2.3') - ..serve('transitive', '1.0.0'); + ..serve('transitive', '1.0.0') + ..serveContentHashes = true; await d.dir(appPath, [ d.pubspec({ @@ -151,7 +155,8 @@ Future main() async { final server = (await servePackages()) ..serve('foo', '1.2.3', deps: {'transitive': '^1.0.0'}) ..serve('foo', '2.2.3') - ..serve('transitive', '1.0.0'); + ..serve('transitive', '1.0.0') + ..serveContentHashes = true; await d.git('bar.git', [d.libPubspec('bar', '1.0.0')]).create(); @@ -183,7 +188,8 @@ Future main() async { ..serve('foo', '2.2.3') ..serve('bar', '1.2.3') ..serve('bar', '2.2.3') - ..serve('boo', '1.2.3'); + ..serve('boo', '1.2.3') + ..serveContentHashes = true; await d.dir(appPath, [ d.pubspec({ @@ -211,11 +217,51 @@ Future main() async { }); }); + testWithGolden('Preserves no content-hashes', (context) async { + final server = (await servePackages()) + ..serve('foo', '1.2.3') + ..serve('foo', '2.2.3') + ..serve('bar', '1.2.3') + ..serve('bar', '2.2.3') + ..serve('boo', '1.2.3') + ..serveContentHashes = true; + + await d.dir(appPath, [ + d.pubspec({ + 'name': 'app', + 'dependencies': { + 'foo': '^1.0.0', + 'bar': '^1.0.0', + 'boo': '^1.0.0', + }, + }) + ]).create(); + await pubGet(); + final lockFile = File(path(p.join(appPath, 'pubspec.lock'))); + final lockFileYaml = YamlEditor( + lockFile.readAsStringSync(), + ); + for (final p in lockFileYaml.parseAt(['packages']).value.entries) { + lockFileYaml.remove(['packages', p.key, 'description', 'sha256']); + } + lockFile.writeAsStringSync(lockFileYaml.toString()); + + server.serve('foo', '1.2.4'); + server.serve('boo', '1.2.4'); + + server.dontAllowDownloads(); + + await _listReportApply(context, [ + _PackageVersion('foo', '1.2.4'), + ]); + }); + testWithGolden('Adding transitive', (context) async { final server = (await servePackages()) ..serve('foo', '1.2.3') ..serve('foo', '2.2.3', deps: {'transitive': '^1.0.0'}) - ..serve('transitive', '1.0.0'); + ..serve('transitive', '1.0.0') + ..serveContentHashes = true; await d.dir(appPath, [ d.pubspec({ @@ -247,7 +293,8 @@ Future main() async { final server = (await servePackages()) ..serve('foo', '1.0.0') ..serve('bar', '1.0.0') - ..serve('baz', '1.0.0'); + ..serve('baz', '1.0.0') + ..serveContentHashes = true; await d.dir(appPath, [ d.pubspec({ diff --git a/test/descriptor.dart b/test/descriptor.dart index 669d8efef..4ced393e4 100644 --- a/test/descriptor.dart +++ b/test/descriptor.dart @@ -191,6 +191,20 @@ Descriptor hostedCache(Iterable contents, {int? port}) { return dir(hostedCachePath(port: port), contents); } +/// Describes the hosted-hashes cache directory containing hashes of the hosted +/// packages downloaded from the mock package server. +/// +/// If [port] is passed, it's used as the port number of the local hosted server +/// that this cache represents. It defaults to [globalServer.port]. +Descriptor hostedHashesCache(Iterable contents, {int? port}) { + return dir(cachePath, [ + dir( + 'hosted-hashes', + [dir('localhost%58${port ?? globalServer.port}', contents)], + ) + ]); +} + String hostedCachePath({int? port}) => p.join(cachePath, 'hosted', 'localhost%58${port ?? globalServer.port}'); diff --git a/test/get/preserve_lock_file_line_endings_test.dart b/test/get/preserve_lock_file_line_endings_test.dart index bfd015a83..4484fc55e 100644 --- a/test/get/preserve_lock_file_line_endings_test.dart +++ b/test/get/preserve_lock_file_line_endings_test.dart @@ -3,7 +3,7 @@ // BSD-style license that can be found in the LICENSE file. import 'package:path/path.dart' as path; -import 'package:pub/src/entrypoint.dart'; +import 'package:pub/src/lock_file.dart'; import 'package:test/test.dart'; import '../descriptor.dart' as d; diff --git a/test/global/activate/activate_git_after_hosted_test.dart b/test/global/activate/activate_git_after_hosted_test.dart index 390aa8772..99f2fcb54 100644 --- a/test/global/activate/activate_git_after_hosted_test.dart +++ b/test/global/activate/activate_git_after_hosted_test.dart @@ -29,7 +29,7 @@ void main() { output: allOf( startsWith('Package foo is currently active at version 1.0.0.\n' 'Resolving dependencies...\n' - '+ foo 1.0.0 from git ..${separator}foo.git at '), + '* foo 1.0.0 from git ..${separator}foo.git at '), // Specific revision number goes here. endsWith('Building package executables...\n' 'Built foo:foo.\n' diff --git a/test/global/activate/activate_hosted_after_git_test.dart b/test/global/activate/activate_hosted_after_git_test.dart index 7ac174a85..15819ca28 100644 --- a/test/global/activate/activate_hosted_after_git_test.dart +++ b/test/global/activate/activate_hosted_after_git_test.dart @@ -22,14 +22,14 @@ void main() { await runPub(args: ['global', 'activate', '-sgit', '../foo.git']); - await runPub(args: ['global', 'activate', 'foo'], output: ''' - Package foo is currently active from Git repository "..${separator}foo.git". - Resolving dependencies... - + foo 2.0.0 - Downloading foo 2.0.0... - Building package executables... - Built foo:foo. - Activated foo 2.0.0.'''); + await runPub( + args: ['global', 'activate', 'foo'], + output: allOf([ + contains( + 'Package foo is currently active from Git repository "..${separator}foo.git".'), + contains('* foo 2.0.0 (was 1.0.0 from git ..${separator}foo.git at'), + contains('Activated foo 2.0.0.') + ])); // Should now run the hosted one. var pub = await pubRun(global: true, args: ['foo']); diff --git a/test/global/activate/activate_hosted_after_path_test.dart b/test/global/activate/activate_hosted_after_path_test.dart index 4cd753f09..8f91a1f7e 100644 --- a/test/global/activate/activate_hosted_after_path_test.dart +++ b/test/global/activate/activate_hosted_after_path_test.dart @@ -27,8 +27,7 @@ void main() { await runPub(args: ['global', 'activate', 'foo'], output: ''' Package foo is currently active at path "$path". Resolving dependencies... - + foo 2.0.0 - Downloading foo 2.0.0... + * foo 2.0.0 (was 1.0.0 from path $path) Building package executables... Built foo:foo. Activated foo 2.0.0.'''); diff --git a/test/global/activate/activate_hosted_twice_test.dart b/test/global/activate/activate_hosted_twice_test.dart index a4d1337ef..fb4ce928e 100644 --- a/test/global/activate/activate_hosted_twice_test.dart +++ b/test/global/activate/activate_hosted_twice_test.dart @@ -46,9 +46,7 @@ Activated foo 1.0.0.'''); await runPub(args: ['global', 'activate', 'foo'], output: ''' Package foo is currently active at version 1.0.0. Resolving dependencies... -+ bar 2.0.0 -+ foo 1.0.0 -Downloading bar 2.0.0... +> bar 2.0.0 (was 1.0.0) Building package executables... Built foo:foo. Activated foo 1.0.0.'''); diff --git a/test/global/activate/custom_hosted_url_test.dart b/test/global/activate/custom_hosted_url_test.dart index 1c0bc041e..cd5c2dc81 100644 --- a/test/global/activate/custom_hosted_url_test.dart +++ b/test/global/activate/custom_hosted_url_test.dart @@ -26,12 +26,13 @@ void main() { customServer.serve('bar', '1.0.0', deps: {'baz': 'any'}); await runPub( - args: ['global', 'activate', 'foo', '-u', customServer.url], - output: allOf([ - contains('Downloading bar 1.0.0...'), - contains('Downloading baz 1.0.0...'), - contains('Downloading foo 1.0.0...'), - contains('Activated foo 1.0.0') - ])); + args: ['global', 'activate', 'foo', '-u', customServer.url], + silent: allOf([ + contains('Downloading bar 1.0.0...'), + contains('Downloading baz 1.0.0...'), + contains('Downloading foo 1.0.0...'), + ]), + output: contains('Activated foo 1.0.0'), + ); }); } diff --git a/test/global/activate/different_version_test.dart b/test/global/activate/different_version_test.dart index 8406b9269..d84ed8cbf 100644 --- a/test/global/activate/different_version_test.dart +++ b/test/global/activate/different_version_test.dart @@ -26,8 +26,7 @@ void main() { await runPub(args: ['global', 'activate', 'foo', '>1.0.0'], output: ''' Package foo is currently active at version 1.0.0. Resolving dependencies... - + foo 2.0.0 - Downloading foo 2.0.0... + > foo 2.0.0 (was 1.0.0) Building package executables... Built foo:foo. Activated foo 2.0.0.'''); diff --git a/test/global/activate/ignores_active_version_test.dart b/test/global/activate/ignores_active_version_test.dart index 777a63127..d3e609498 100644 --- a/test/global/activate/ignores_active_version_test.dart +++ b/test/global/activate/ignores_active_version_test.dart @@ -25,8 +25,7 @@ void main() { await runPub(args: ['global', 'activate', 'foo', '>1.0.0'], output: ''' Package foo is currently active at version 1.2.3. Resolving dependencies... - + foo 1.3.0 - Downloading foo 1.3.0... + > foo 1.3.0 (was 1.2.3) Building package executables... Built foo:foo. Activated foo 1.3.0.'''); diff --git a/test/global/activate/installs_dependencies_for_git_test.dart b/test/global/activate/installs_dependencies_for_git_test.dart index 6018f5061..04bdc5364 100644 --- a/test/global/activate/installs_dependencies_for_git_test.dart +++ b/test/global/activate/installs_dependencies_for_git_test.dart @@ -20,7 +20,7 @@ void main() { await runPub( args: ['global', 'activate', '-sgit', '../foo.git'], - output: allOf([ + silent: allOf([ contains('Downloading bar 1.0.0...'), contains('Downloading baz 1.0.0...') ])); diff --git a/test/global/activate/installs_dependencies_for_path_test.dart b/test/global/activate/installs_dependencies_for_path_test.dart index 4688f0749..ae9024244 100644 --- a/test/global/activate/installs_dependencies_for_path_test.dart +++ b/test/global/activate/installs_dependencies_for_path_test.dart @@ -20,8 +20,6 @@ void main() { var pub = await startPub(args: ['global', 'activate', '-spath', '../foo']); expect(pub.stdout, emitsThrough('Resolving dependencies in ../foo...')); - expect(pub.stdout, emitsThrough('Downloading bar 1.0.0...')); - expect(pub.stdout, emitsThrough('Downloading baz 2.0.0...')); expect(pub.stdout, emitsThrough(startsWith('Activated foo 0.0.0 at path'))); await pub.shouldExit(); diff --git a/test/global/activate/installs_dependencies_test.dart b/test/global/activate/installs_dependencies_test.dart index 770ebcfa9..9cb235480 100644 --- a/test/global/activate/installs_dependencies_test.dart +++ b/test/global/activate/installs_dependencies_test.dart @@ -15,7 +15,7 @@ void main() { await runPub( args: ['global', 'activate', 'foo'], - output: allOf([ + silent: allOf([ contains('Downloading bar 1.0.0...'), contains('Downloading baz 1.0.0...') ])); diff --git a/test/global/activate/reactivating_git_upgrades_test.dart b/test/global/activate/reactivating_git_upgrades_test.dart index 5eb8d5f88..6ce628de3 100644 --- a/test/global/activate/reactivating_git_upgrades_test.dart +++ b/test/global/activate/reactivating_git_upgrades_test.dart @@ -36,7 +36,7 @@ void main() { startsWith('Package foo is currently active from Git repository ' '"..${separator}foo.git".\n' 'Resolving dependencies...\n' - '+ foo 1.0.1 from git ..${separator}foo.git at '), + '> foo 1.0.1 from git ..${separator}foo.git at '), // Specific revision number goes here. endsWith('Building package executables...\n' 'Built foo:foo.\n' diff --git a/test/global/activate/uncached_package_test.dart b/test/global/activate/uncached_package_test.dart index 1a18d70d5..cb8d26de7 100644 --- a/test/global/activate/uncached_package_test.dart +++ b/test/global/activate/uncached_package_test.dart @@ -23,7 +23,6 @@ void main() { await runPub(args: ['global', 'activate', 'foo'], output: ''' Resolving dependencies... + foo 1.2.3 - Downloading foo 1.2.3... Building package executables... Built foo:foo. Activated foo 1.2.3.'''); diff --git a/test/global/deactivate/deactivate_and_reactivate_package_test.dart b/test/global/deactivate/deactivate_and_reactivate_package_test.dart index da3348647..5a6f6e34d 100644 --- a/test/global/deactivate/deactivate_and_reactivate_package_test.dart +++ b/test/global/deactivate/deactivate_and_reactivate_package_test.dart @@ -20,10 +20,12 @@ void main() { output: 'Deactivated package foo 1.0.0.'); // Activating again should forget the old version. - await runPub(args: ['global', 'activate', 'foo'], output: ''' + await runPub( + args: ['global', 'activate', 'foo'], + silent: contains('Downloading foo 2.0.0...'), + output: ''' Resolving dependencies... + foo 2.0.0 - Downloading foo 2.0.0... Activated foo 2.0.0.'''); }); } diff --git a/test/hosted/short_syntax_test.dart b/test/hosted/short_syntax_test.dart index 5d4cf281f..da6d75434 100644 --- a/test/hosted/short_syntax_test.dart +++ b/test/hosted/short_syntax_test.dart @@ -45,6 +45,7 @@ void main() { 'description': { 'name': 'foo', 'url': globalServer.url, + 'sha256': matches(RegExp(r'[0-9a-f]{64}')) }, 'version': '1.2.3', }); diff --git a/test/lock_file_test.dart b/test/lock_file_test.dart index 1bfac5e0f..20e397686 100644 --- a/test/lock_file_test.dart +++ b/test/lock_file_test.dart @@ -222,21 +222,27 @@ packages: test('serialize() dumps the lockfile to YAML', () { var lockfile = LockFile([ PackageId( - 'foo', - Version.parse('1.2.3'), - ResolvedHostedDescription( - HostedDescription('foo', 'https://foo.com'))), + 'foo', + Version.parse('1.2.3'), + ResolvedHostedDescription( + HostedDescription('foo', 'https://foo.com'), + sha256: null, + ), + ), PackageId( - 'bar', - Version.parse('3.2.1'), - ResolvedHostedDescription( - HostedDescription('bar', 'https://bar.com'))), + 'bar', + Version.parse('3.2.1'), + ResolvedHostedDescription( + HostedDescription('bar', 'https://bar.com'), + sha256: null, + ), + ), ], devDependencies: { 'bar' }); expect( - loadYaml(lockfile.serialize('')), + loadYaml(lockfile.serialize('', cache)), equals({ 'sdks': {'dart': 'any'}, 'packages': { diff --git a/test/package_server.dart b/test/package_server.dart index b7587e73d..06a8d1bf3 100644 --- a/test/package_server.dart +++ b/test/package_server.dart @@ -7,10 +7,12 @@ import 'dart:convert'; import 'dart:io'; import 'dart:typed_data'; +import 'package:crypto/crypto.dart'; import 'package:path/path.dart' as p; import 'package:pub/src/crc32c.dart'; import 'package:pub/src/source/hosted.dart'; import 'package:pub/src/third_party/tar/tar.dart'; +import 'package:pub/src/utils.dart' show hexEncode; import 'package:pub_semver/pub_semver.dart'; import 'package:shelf/shelf.dart' as shelf; import 'package:shelf/shelf_io.dart' as shelf_io; @@ -27,9 +29,12 @@ class PackageServer { /// Handlers of requests. Last matching handler will be used. final List<_PatternAndHandler> _handlers = []; - // A list of all the requests recieved up till now. + // A list of all the requests received up till now. final List requestedPaths = []; + // Setting this to false will disable automatic calculation of content-hashes. + bool serveContentHashes = true; + /// Whether the [IOServer] should compress the content, if possible. /// The default value is `false` (compression disabled). /// See [HttpServer.autoCompress] for details. @@ -66,7 +71,7 @@ class PackageServer { PackageServer._(await shelf_io.IOServer.bind('localhost', 0)); server.handle( _versionInfoPattern, - (shelf.Request request) { + (shelf.Request request) async { final parts = request.url.pathSegments; assert(parts[0] == 'api'); assert(parts[1] == 'packages'); @@ -76,17 +81,26 @@ class PackageServer { if (package == null) { return shelf.Response.notFound('No package named $name'); } + return shelf.Response.ok( jsonEncode({ 'name': name, 'uploaders': ['nweiz@google.com'], - 'versions': package.versions.values - .map((version) => packageVersionApiMap( - server._inner.url.toString(), - version.pubspec, - retracted: version.isRetracted, - )) - .toList(), + 'versions': [ + for (final version in package.versions.values) + { + 'pubspec': version.pubspec, + 'version': version.version.toString(), + 'archive_url': + '${server.url}/packages/$name/versions/${version.version}.tar.gz', + if (version.isRetracted) 'retracted': true, + if (version.sha256 != null || server.serveContentHashes) + 'archive_sha256': version.sha256 ?? + hexEncode( + (await sha256.bind(version.contents()).first) + .bytes) + } + ], if (package.isDiscontinued) 'isDiscontinued': true, if (package.discontinuedReplacementText != null) 'replacedBy': package.discontinuedReplacementText, @@ -196,6 +210,9 @@ class PackageServer { String get cachingPath => p.join(d.sandbox, cachePath, 'hosted', 'localhost%58$port'); + String get hashesCachingPath => + p.join(d.sandbox, cachePath, 'hosted-hashes', 'localhost%58$port'); + /// A map from package names to the concrete packages to serve. final _packages = {}; @@ -240,7 +257,7 @@ class PackageServer { // file mode mode: 420, // size: 100, - modified: DateTime.now(), + modified: DateTime.fromMicrosecondsSinceEpoch(0), userName: 'pub', groupName: 'pub', ), @@ -253,13 +270,29 @@ class PackageServer { for (final e in contents ?? []) { addDescriptor(e, ''); } - return Stream.fromIterable(entries) + return _replaceOs(Stream.fromIterable(entries) .transform(tarWriterWith(format: OutputFormat.gnuLongName)) - .transform(gzip.encoder); + .transform(gzip.encoder)); }, ); } + /// Replaces the entry at index 9 in [stream] with a 0. This replaces the os + /// entry of a gzip stream, giving us the same stream and thius stable testing + /// on all platforms. + /// + /// See https://www.rfc-editor.org/rfc/rfc1952 section 2.3 for information + /// about the OS header. + Stream> _replaceOs(Stream> stream) async* { + final bytesBuilder = BytesBuilder(); + await for (final t in stream) { + bytesBuilder.add(t); + } + final result = bytesBuilder.toBytes(); + result[9] = 0; + yield result; + } + // Mark a package discontinued. void discontinue(String name, {bool isDiscontinued = true, String? replacementText}) { @@ -277,6 +310,16 @@ class PackageServer { _packages[name]!.versions[version]!.isRetracted = true; } + /// Useful for testing handling of a wrong hash. + void overrideArchiveSha256(String name, String version, String sha256) { + _packages[name]!.versions[version]!.sha256 = sha256; + } + + Future peekArchiveSha256(String name, String version) async { + final v = _packages[name]!.versions[version]!; + return v.sha256 ?? hexEncode((await sha256.bind(v.contents()).first).bytes); + } + Future peekArchiveChecksumHeader(String name, String version) async { final v = _packages[name]!.versions[version]!; @@ -321,6 +364,8 @@ class _ServedPackageVersion { final Stream> Function() contents; final Map>? headers; bool isRetracted = false; + // Overrides the calculated sha256. + String? sha256; Version get version => Version.parse(pubspec['version']); diff --git a/test/pubspec_test.dart b/test/pubspec_test.dart index 23079c587..a75022911 100644 --- a/test/pubspec_test.dart +++ b/test/pubspec_test.dart @@ -292,8 +292,10 @@ dependencies: expect(foo.name, equals('foo')); expect(foo.source.name, 'hosted'); expect( - ResolvedHostedDescription(foo.description as HostedDescription) - .serializeForLockfile(containingDir: null), + ResolvedHostedDescription( + foo.description as HostedDescription, + sha256: null, + ).serializeForLockfile(containingDir: null), { 'url': 'https://example.org/pub/', 'name': 'bar', @@ -318,8 +320,10 @@ dependencies: expect(foo.name, equals('foo')); expect(foo.source.name, 'hosted'); expect( - ResolvedHostedDescription(foo.description as HostedDescription) - .serializeForLockfile(containingDir: null), + ResolvedHostedDescription( + foo.description as HostedDescription, + sha256: null, + ).serializeForLockfile(containingDir: null), { 'url': 'https://example.org/pub/', 'name': 'foo', @@ -343,8 +347,10 @@ dependencies: expect(foo.name, equals('foo')); expect(foo.source.name, 'hosted'); expect( - ResolvedHostedDescription(foo.description as HostedDescription) - .serializeForLockfile(containingDir: null), + ResolvedHostedDescription( + foo.description as HostedDescription, + sha256: null, + ).serializeForLockfile(containingDir: null), { 'url': 'https://example.org/pub/', 'name': 'foo', @@ -368,8 +374,10 @@ dependencies: expect(foo.name, equals('foo')); expect(foo.source.name, 'hosted'); expect( - ResolvedHostedDescription(foo.description as HostedDescription) - .serializeForLockfile(containingDir: null), + ResolvedHostedDescription( + foo.description as HostedDescription, + sha256: null, + ).serializeForLockfile(containingDir: null), { 'url': 'https://pub.dartlang.org', 'name': 'bar', @@ -412,8 +420,10 @@ dependencies: expect(foo.name, equals('foo')); expect(foo.source.name, 'hosted'); expect( - ResolvedHostedDescription(foo.description as HostedDescription) - .serializeForLockfile(containingDir: null), + ResolvedHostedDescription( + foo.description as HostedDescription, + sha256: null, + ).serializeForLockfile(containingDir: null), { 'url': 'https://pub.dartlang.org', 'name': 'foo', diff --git a/test/reformat_ranges_test.dart b/test/reformat_ranges_test.dart index d2c91ca75..2ead04dd0 100644 --- a/test/reformat_ranges_test.dart +++ b/test/reformat_ranges_test.dart @@ -12,6 +12,7 @@ import 'package:test/test.dart'; void main() { final description = ResolvedHostedDescription( HostedDescription('foo', 'https://pub.dev'), + sha256: null, ); test('reformatMax when max has a build identifier', () { expect( diff --git a/test/test_pub.dart b/test/test_pub.dart index be89bdaf1..58da455a6 100644 --- a/test/test_pub.dart +++ b/test/test_pub.dart @@ -24,6 +24,7 @@ import 'package:pub/src/io.dart'; import 'package:pub/src/lock_file.dart'; import 'package:pub/src/log.dart' as log; import 'package:pub/src/package_name.dart'; +import 'package:pub/src/source/hosted.dart'; import 'package:pub/src/system_cache.dart'; import 'package:pub/src/utils.dart'; import 'package:pub/src/validator.dart'; @@ -207,6 +208,7 @@ Future pubUpgrade( Object? output, Object? error, Object? warning, + Object? silent, int? exitCode, Map? environment, String? workingDirectory}) async => @@ -216,6 +218,7 @@ Future pubUpgrade( output: output, error: error, warning: warning, + silent: silent, exitCode: exitCode, environment: environment, workingDirectory: workingDirectory, @@ -626,7 +629,8 @@ Future createLockFile(String package, _createLockFile(cache, sandbox: dependenciesInSandBox, hosted: hosted); await d.dir(package, [ - d.file('pubspec.lock', lockFile.serialize(p.join(d.sandbox, package))) + d.file( + 'pubspec.lock', lockFile.serialize(p.join(d.sandbox, package), cache)) ]).create(); } @@ -653,7 +657,17 @@ LockFile _createLockFile(SystemCache cache, containingDir: p.join(d.sandbox, appPath))), if (hosted != null) ...hosted.entries.map( - (entry) => cache.hosted.idFor(entry.key, Version.parse(entry.value))) + (entry) => PackageId( + entry.key, + Version.parse(entry.value), + ResolvedHostedDescription( + HostedDescription( + entry.key, + 'https://pub.dev', + ), + sha256: null), + ), + ) ]; return LockFile(packages); @@ -693,38 +707,6 @@ Map packageMap( return package; } -/// Returns a Map in the format used by the pub.dev API to represent a -/// package version. -/// -/// [pubspec] is the parsed pubspec of the package version. If [full] is true, -/// this returns the complete map, including metadata that's only included when -/// requesting the package version directly. -Map packageVersionApiMap(String hostedUrl, Map pubspec, - {bool retracted = false, bool full = false}) { - var name = pubspec['name']; - var version = pubspec['version']; - var map = { - 'pubspec': pubspec, - 'version': version, - 'archive_url': '$hostedUrl/packages/$name/versions/$version.tar.gz', - }; - - if (retracted) { - map['retracted'] = true; - } - - if (full) { - map.addAll({ - 'downloads': 0, - 'created': '2012-09-25T18:38:28.685260', - 'libraries': ['$name.dart'], - 'uploader': ['nweiz@google.com'] - }); - } - - return map; -} - /// Returns the name of the shell script for a binstub named [name]. /// /// Adds a ".bat" extension on Windows. @@ -884,9 +866,6 @@ StreamMatcher emitsLines(String output) => emitsInOrder(output.split('\n')); /// Removes output from pub known to be unstable. Iterable filterUnstableLines(List input) { return input - // Downloading order is not deterministic, so to avoid flakiness we filter - // out these lines. - .where((line) => !line.startsWith('Downloading ')) // Any paths in output should be relative to the sandbox and with forward // slashes to be stable across platforms. .map((line) { diff --git a/test/testdata/goldens/dependency_services/dependency_services_test/Adding transitive.txt b/test/testdata/goldens/dependency_services/dependency_services_test/Adding transitive.txt index 7b01ac06f..3b16db157 100644 --- a/test/testdata/goldens/dependency_services/dependency_services_test/Adding transitive.txt +++ b/test/testdata/goldens/dependency_services/dependency_services_test/Adding transitive.txt @@ -10,6 +10,7 @@ packages: dependency: "direct main" description: name: foo + sha256: "1614d63c0867d0994f75a231be7ee394a4f30cdeede4c7ea471fcad354c23d1f" url: "http://localhost:$PORT" source: hosted version: "1.2.3" @@ -30,7 +31,8 @@ $ dependency_services list "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "1614d63c0867d0994f75a231be7ee394a4f30cdeede4c7ea471fcad354c23d1f" } } } @@ -51,7 +53,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "1614d63c0867d0994f75a231be7ee394a4f30cdeede4c7ea471fcad354c23d1f" } }, "latest": "2.2.3", @@ -66,7 +69,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "fc06d01652f7b73f789abeb5b61aeb68b13cd472f87610cb8fb80e402a9139ff" } }, "constraintBumped": "^2.2.3", @@ -78,7 +82,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "1614d63c0867d0994f75a231be7ee394a4f30cdeede4c7ea471fcad354c23d1f" } } }, @@ -90,7 +95,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "transitive", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "8d245de5cde3ab3293e4cdea516c6a0395e24d338688279bab5f6c97bffa0915" } }, "constraintBumped": null, @@ -110,7 +116,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "fc06d01652f7b73f789abeb5b61aeb68b13cd472f87610cb8fb80e402a9139ff" } }, "constraintBumped": "^2.2.3", @@ -122,7 +129,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "1614d63c0867d0994f75a231be7ee394a4f30cdeede4c7ea471fcad354c23d1f" } } }, @@ -134,7 +142,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "transitive", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "8d245de5cde3ab3293e4cdea516c6a0395e24d338688279bab5f6c97bffa0915" } }, "constraintBumped": null, @@ -167,6 +176,7 @@ packages: dependency: "direct main" description: name: foo + sha256: fc06d01652f7b73f789abeb5b61aeb68b13cd472f87610cb8fb80e402a9139ff url: "http://localhost:$PORT" source: hosted version: "2.2.3" @@ -174,6 +184,7 @@ packages: dependency: transitive description: name: transitive + sha256: "8d245de5cde3ab3293e4cdea516c6a0395e24d338688279bab5f6c97bffa0915" url: "http://localhost:$PORT" source: hosted version: "1.0.0" diff --git a/test/testdata/goldens/dependency_services/dependency_services_test/Compatible.txt b/test/testdata/goldens/dependency_services/dependency_services_test/Compatible.txt index b4726c5b3..4fbaffbd0 100644 --- a/test/testdata/goldens/dependency_services/dependency_services_test/Compatible.txt +++ b/test/testdata/goldens/dependency_services/dependency_services_test/Compatible.txt @@ -10,6 +10,7 @@ packages: dependency: "direct main" description: name: bar + sha256: ea004e8b0069df9e9827b101b64aaad455cc358849f1801dc48a41111cabbe20 url: "http://localhost:$PORT" source: hosted version: "1.2.3" @@ -17,6 +18,7 @@ packages: dependency: "direct main" description: name: boo + sha256: "7971e197614f18130070007a54f446366c6e594f0ed159ae2c4e2b42972c426b" url: "http://localhost:$PORT" source: hosted version: "1.2.3" @@ -24,6 +26,7 @@ packages: dependency: "direct main" description: name: foo + sha256: "1614d63c0867d0994f75a231be7ee394a4f30cdeede4c7ea471fcad354c23d1f" url: "http://localhost:$PORT" source: hosted version: "1.2.3" @@ -44,7 +47,8 @@ $ dependency_services list "type": "hosted", "description": { "name": "bar", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "ea004e8b0069df9e9827b101b64aaad455cc358849f1801dc48a41111cabbe20" } } }, @@ -57,7 +61,8 @@ $ dependency_services list "type": "hosted", "description": { "name": "boo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "7971e197614f18130070007a54f446366c6e594f0ed159ae2c4e2b42972c426b" } } }, @@ -70,7 +75,8 @@ $ dependency_services list "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "1614d63c0867d0994f75a231be7ee394a4f30cdeede4c7ea471fcad354c23d1f" } } } @@ -91,7 +97,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "bar", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "ea004e8b0069df9e9827b101b64aaad455cc358849f1801dc48a41111cabbe20" } }, "latest": "2.2.3", @@ -106,7 +113,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "bar", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "adcfe9ac3d6955fd4332f29f47bf3e814e388e2da7c2bc55d4561971bf8b5335" } }, "constraintBumped": "^2.2.3", @@ -118,7 +126,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "bar", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "ea004e8b0069df9e9827b101b64aaad455cc358849f1801dc48a41111cabbe20" } } } @@ -132,7 +141,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "bar", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "adcfe9ac3d6955fd4332f29f47bf3e814e388e2da7c2bc55d4561971bf8b5335" } }, "constraintBumped": "^2.2.3", @@ -144,7 +154,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "bar", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "ea004e8b0069df9e9827b101b64aaad455cc358849f1801dc48a41111cabbe20" } } } @@ -158,7 +169,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "boo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "7971e197614f18130070007a54f446366c6e594f0ed159ae2c4e2b42972c426b" } }, "latest": "1.2.4", @@ -172,7 +184,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "boo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "b060c0315b77c8383da5f9a7eee7667dbdc8108969e0a7855e294e35e7f42230" } }, "constraintBumped": "^1.0.0", @@ -184,7 +197,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "boo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "7971e197614f18130070007a54f446366c6e594f0ed159ae2c4e2b42972c426b" } } } @@ -198,7 +212,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "boo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "b060c0315b77c8383da5f9a7eee7667dbdc8108969e0a7855e294e35e7f42230" } }, "constraintBumped": "^1.2.4", @@ -210,7 +225,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "boo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "7971e197614f18130070007a54f446366c6e594f0ed159ae2c4e2b42972c426b" } } } @@ -224,7 +240,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "boo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "b060c0315b77c8383da5f9a7eee7667dbdc8108969e0a7855e294e35e7f42230" } }, "constraintBumped": "^1.2.4", @@ -236,7 +253,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "boo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "7971e197614f18130070007a54f446366c6e594f0ed159ae2c4e2b42972c426b" } } } @@ -250,7 +268,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "1614d63c0867d0994f75a231be7ee394a4f30cdeede4c7ea471fcad354c23d1f" } }, "latest": "2.2.3", @@ -264,7 +283,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "88f2f9251967bf04bd478873f074b9d8df9f1c959afc150ba3b0ea813d48161e" } }, "constraintBumped": "^1.0.0", @@ -276,7 +296,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "1614d63c0867d0994f75a231be7ee394a4f30cdeede4c7ea471fcad354c23d1f" } } } @@ -290,7 +311,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "bf378a3f6c4840f911d66ab375f6d3eae78a015a41f0b8b202c31d4af010892e" } }, "constraintBumped": "^2.2.3", @@ -302,7 +324,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "1614d63c0867d0994f75a231be7ee394a4f30cdeede4c7ea471fcad354c23d1f" } } } @@ -316,7 +339,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "bf378a3f6c4840f911d66ab375f6d3eae78a015a41f0b8b202c31d4af010892e" } }, "constraintBumped": "^2.2.3", @@ -328,7 +352,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "1614d63c0867d0994f75a231be7ee394a4f30cdeede4c7ea471fcad354c23d1f" } } } @@ -355,6 +380,7 @@ packages: dependency: "direct main" description: name: bar + sha256: ea004e8b0069df9e9827b101b64aaad455cc358849f1801dc48a41111cabbe20 url: "http://localhost:$PORT" source: hosted version: "1.2.3" @@ -362,6 +388,7 @@ packages: dependency: "direct main" description: name: boo + sha256: "7971e197614f18130070007a54f446366c6e594f0ed159ae2c4e2b42972c426b" url: "http://localhost:$PORT" source: hosted version: "1.2.3" @@ -369,6 +396,7 @@ packages: dependency: "direct main" description: name: foo + sha256: "88f2f9251967bf04bd478873f074b9d8df9f1c959afc150ba3b0ea813d48161e" url: "http://localhost:$PORT" source: hosted version: "1.2.4" diff --git a/test/testdata/goldens/dependency_services/dependency_services_test/No pubspec.lock.txt b/test/testdata/goldens/dependency_services/dependency_services_test/No pubspec.lock.txt index e5ab484fd..a6709fc40 100644 --- a/test/testdata/goldens/dependency_services/dependency_services_test/No pubspec.lock.txt +++ b/test/testdata/goldens/dependency_services/dependency_services_test/No pubspec.lock.txt @@ -34,7 +34,8 @@ $ dependency_services list "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "72f6a04c4af0d78e4f1a1e2eb00a850843e6c0c5233ac2ca911aa061cbd5f8f1" } } }, @@ -47,7 +48,8 @@ $ dependency_services list "type": "hosted", "description": { "name": "transitive", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "8d245de5cde3ab3293e4cdea516c6a0395e24d338688279bab5f6c97bffa0915" } } } @@ -86,7 +88,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "bf378a3f6c4840f911d66ab375f6d3eae78a015a41f0b8b202c31d4af010892e" } }, "constraintBumped": "^2.2.3", @@ -98,7 +101,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "72f6a04c4af0d78e4f1a1e2eb00a850843e6c0c5233ac2ca911aa061cbd5f8f1" } } } @@ -112,7 +116,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "72f6a04c4af0d78e4f1a1e2eb00a850843e6c0c5233ac2ca911aa061cbd5f8f1" } }, "latest": "2.2.3", @@ -127,7 +132,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "bf378a3f6c4840f911d66ab375f6d3eae78a015a41f0b8b202c31d4af010892e" } }, "constraintBumped": "^2.2.3", @@ -139,7 +145,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "72f6a04c4af0d78e4f1a1e2eb00a850843e6c0c5233ac2ca911aa061cbd5f8f1" } } } @@ -153,7 +160,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "bf378a3f6c4840f911d66ab375f6d3eae78a015a41f0b8b202c31d4af010892e" } }, "constraintBumped": "^2.2.3", @@ -165,7 +173,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "72f6a04c4af0d78e4f1a1e2eb00a850843e6c0c5233ac2ca911aa061cbd5f8f1" } } } @@ -179,7 +188,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "transitive", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "8d245de5cde3ab3293e4cdea516c6a0395e24d338688279bab5f6c97bffa0915" } }, "latest": "1.0.0", diff --git a/test/testdata/goldens/dependency_services/dependency_services_test/Preserves no content-hashes.txt b/test/testdata/goldens/dependency_services/dependency_services_test/Preserves no content-hashes.txt new file mode 100644 index 000000000..bd707b488 --- /dev/null +++ b/test/testdata/goldens/dependency_services/dependency_services_test/Preserves no content-hashes.txt @@ -0,0 +1,384 @@ +# GENERATED BY: test/dependency_services/dependency_services_test.dart + +$ cat pubspec.yaml +{"name":"app","dependencies":{"foo":"^1.0.0","bar":"^1.0.0","boo":"^1.0.0"},"environment":{"sdk":">=0.1.2 <1.0.0"}} +$ cat pubspec.lock +# Generated by pub +# See https://dart.dev/tools/pub/glossary#lockfile +packages: + bar: + dependency: "direct main" + description: + name: bar + url: "http://localhost:$PORT" + source: hosted + version: "1.2.3" + boo: + dependency: "direct main" + description: + name: boo + url: "http://localhost:$PORT" + source: hosted + version: "1.2.3" + foo: + dependency: "direct main" + description: + name: foo + url: "http://localhost:$PORT" + source: hosted + version: "1.2.3" +sdks: + dart: ">=0.1.2 <1.0.0" +-------------------------------- END OF OUTPUT --------------------------------- + +## Section list +$ dependency_services list +{ + "dependencies": [ + { + "name": "bar", + "version": "1.2.3", + "kind": "direct", + "constraint": "^1.0.0", + "source": { + "type": "hosted", + "description": { + "name": "bar", + "url": "http://localhost:$PORT" + } + } + }, + { + "name": "boo", + "version": "1.2.3", + "kind": "direct", + "constraint": "^1.0.0", + "source": { + "type": "hosted", + "description": { + "name": "boo", + "url": "http://localhost:$PORT" + } + } + }, + { + "name": "foo", + "version": "1.2.3", + "kind": "direct", + "constraint": "^1.0.0", + "source": { + "type": "hosted", + "description": { + "name": "foo", + "url": "http://localhost:$PORT" + } + } + } + ] +} + +-------------------------------- END OF OUTPUT --------------------------------- + +## Section report +$ dependency_services report +{ + "dependencies": [ + { + "name": "bar", + "version": "1.2.3", + "kind": "direct", + "source": { + "type": "hosted", + "description": { + "name": "bar", + "url": "http://localhost:$PORT" + } + }, + "latest": "2.2.3", + "constraint": "^1.0.0", + "compatible": [], + "singleBreaking": [ + { + "name": "bar", + "version": "2.2.3", + "kind": "direct", + "source": { + "type": "hosted", + "description": { + "name": "bar", + "url": "http://localhost:$PORT", + "sha256": "adcfe9ac3d6955fd4332f29f47bf3e814e388e2da7c2bc55d4561971bf8b5335" + } + }, + "constraintBumped": "^2.2.3", + "constraintWidened": ">=1.0.0 <3.0.0", + "constraintBumpedIfNeeded": "^2.2.3", + "previousVersion": "1.2.3", + "previousConstraint": "^1.0.0", + "previousSource": { + "type": "hosted", + "description": { + "name": "bar", + "url": "http://localhost:$PORT" + } + } + } + ], + "multiBreaking": [ + { + "name": "bar", + "version": "2.2.3", + "kind": "direct", + "source": { + "type": "hosted", + "description": { + "name": "bar", + "url": "http://localhost:$PORT", + "sha256": "adcfe9ac3d6955fd4332f29f47bf3e814e388e2da7c2bc55d4561971bf8b5335" + } + }, + "constraintBumped": "^2.2.3", + "constraintWidened": ">=1.0.0 <3.0.0", + "constraintBumpedIfNeeded": "^2.2.3", + "previousVersion": "1.2.3", + "previousConstraint": "^1.0.0", + "previousSource": { + "type": "hosted", + "description": { + "name": "bar", + "url": "http://localhost:$PORT" + } + } + } + ] + }, + { + "name": "boo", + "version": "1.2.3", + "kind": "direct", + "source": { + "type": "hosted", + "description": { + "name": "boo", + "url": "http://localhost:$PORT" + } + }, + "latest": "1.2.4", + "constraint": "^1.0.0", + "compatible": [ + { + "name": "boo", + "version": "1.2.4", + "kind": "direct", + "source": { + "type": "hosted", + "description": { + "name": "boo", + "url": "http://localhost:$PORT", + "sha256": "b060c0315b77c8383da5f9a7eee7667dbdc8108969e0a7855e294e35e7f42230" + } + }, + "constraintBumped": "^1.0.0", + "constraintWidened": "^1.0.0", + "constraintBumpedIfNeeded": "^1.0.0", + "previousVersion": "1.2.3", + "previousConstraint": "^1.0.0", + "previousSource": { + "type": "hosted", + "description": { + "name": "boo", + "url": "http://localhost:$PORT" + } + } + } + ], + "singleBreaking": [ + { + "name": "boo", + "version": "1.2.4", + "kind": "direct", + "source": { + "type": "hosted", + "description": { + "name": "boo", + "url": "http://localhost:$PORT", + "sha256": "b060c0315b77c8383da5f9a7eee7667dbdc8108969e0a7855e294e35e7f42230" + } + }, + "constraintBumped": "^1.2.4", + "constraintWidened": "^1.0.0", + "constraintBumpedIfNeeded": "^1.0.0", + "previousVersion": "1.2.3", + "previousConstraint": "^1.0.0", + "previousSource": { + "type": "hosted", + "description": { + "name": "boo", + "url": "http://localhost:$PORT" + } + } + } + ], + "multiBreaking": [ + { + "name": "boo", + "version": "1.2.4", + "kind": "direct", + "source": { + "type": "hosted", + "description": { + "name": "boo", + "url": "http://localhost:$PORT", + "sha256": "b060c0315b77c8383da5f9a7eee7667dbdc8108969e0a7855e294e35e7f42230" + } + }, + "constraintBumped": "^1.2.4", + "constraintWidened": "^1.0.0", + "constraintBumpedIfNeeded": "^1.0.0", + "previousVersion": "1.2.3", + "previousConstraint": "^1.0.0", + "previousSource": { + "type": "hosted", + "description": { + "name": "boo", + "url": "http://localhost:$PORT" + } + } + } + ] + }, + { + "name": "foo", + "version": "1.2.3", + "kind": "direct", + "source": { + "type": "hosted", + "description": { + "name": "foo", + "url": "http://localhost:$PORT" + } + }, + "latest": "2.2.3", + "constraint": "^1.0.0", + "compatible": [ + { + "name": "foo", + "version": "1.2.4", + "kind": "direct", + "source": { + "type": "hosted", + "description": { + "name": "foo", + "url": "http://localhost:$PORT", + "sha256": "88f2f9251967bf04bd478873f074b9d8df9f1c959afc150ba3b0ea813d48161e" + } + }, + "constraintBumped": "^1.0.0", + "constraintWidened": "^1.0.0", + "constraintBumpedIfNeeded": "^1.0.0", + "previousVersion": "1.2.3", + "previousConstraint": "^1.0.0", + "previousSource": { + "type": "hosted", + "description": { + "name": "foo", + "url": "http://localhost:$PORT" + } + } + } + ], + "singleBreaking": [ + { + "name": "foo", + "version": "2.2.3", + "kind": "direct", + "source": { + "type": "hosted", + "description": { + "name": "foo", + "url": "http://localhost:$PORT", + "sha256": "bf378a3f6c4840f911d66ab375f6d3eae78a015a41f0b8b202c31d4af010892e" + } + }, + "constraintBumped": "^2.2.3", + "constraintWidened": ">=1.0.0 <3.0.0", + "constraintBumpedIfNeeded": "^2.2.3", + "previousVersion": "1.2.3", + "previousConstraint": "^1.0.0", + "previousSource": { + "type": "hosted", + "description": { + "name": "foo", + "url": "http://localhost:$PORT" + } + } + } + ], + "multiBreaking": [ + { + "name": "foo", + "version": "2.2.3", + "kind": "direct", + "source": { + "type": "hosted", + "description": { + "name": "foo", + "url": "http://localhost:$PORT", + "sha256": "bf378a3f6c4840f911d66ab375f6d3eae78a015a41f0b8b202c31d4af010892e" + } + }, + "constraintBumped": "^2.2.3", + "constraintWidened": ">=1.0.0 <3.0.0", + "constraintBumpedIfNeeded": "^2.2.3", + "previousVersion": "1.2.3", + "previousConstraint": "^1.0.0", + "previousSource": { + "type": "hosted", + "description": { + "name": "foo", + "url": "http://localhost:$PORT" + } + } + } + ] + } + ] +} + +-------------------------------- END OF OUTPUT --------------------------------- + +## Section apply +$ echo '{"dependencyChanges":[{"name":"foo","version":"1.2.4"}]}' | dependency_services apply +{"dependencies":[]} + +-------------------------------- END OF OUTPUT --------------------------------- + +$ cat pubspec.yaml +{"name":"app","dependencies":{"foo":"^1.0.0","bar":"^1.0.0","boo":"^1.0.0"},"environment":{"sdk":">=0.1.2 <1.0.0"}} +$ cat pubspec.lock +# Generated by pub +# See https://dart.dev/tools/pub/glossary#lockfile +packages: + bar: + dependency: "direct main" + description: + name: bar + url: "http://localhost:$PORT" + source: hosted + version: "1.2.3" + boo: + dependency: "direct main" + description: + name: boo + url: "http://localhost:$PORT" + source: hosted + version: "1.2.3" + foo: + dependency: "direct main" + description: + name: foo + url: "http://localhost:$PORT" + source: hosted + version: "1.2.4" +sdks: + dart: ">=0.1.2 <1.0.0" diff --git a/test/testdata/goldens/dependency_services/dependency_services_test/Relative paths are allowed.txt b/test/testdata/goldens/dependency_services/dependency_services_test/Relative paths are allowed.txt index 69a06d4c4..2e3877339 100644 --- a/test/testdata/goldens/dependency_services/dependency_services_test/Relative paths are allowed.txt +++ b/test/testdata/goldens/dependency_services/dependency_services_test/Relative paths are allowed.txt @@ -17,6 +17,7 @@ packages: dependency: "direct main" description: name: foo + sha256: "439814f59cbc73e1c28ca5ac6e437d5f2af10dfd18db786ce46fe0663e605ccb" url: "http://localhost:$PORT" source: hosted version: "1.0.0" @@ -50,7 +51,8 @@ $ dependency_services list "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "439814f59cbc73e1c28ca5ac6e437d5f2af10dfd18db786ce46fe0663e605ccb" } } } @@ -88,7 +90,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "439814f59cbc73e1c28ca5ac6e437d5f2af10dfd18db786ce46fe0663e605ccb" } }, "latest": "2.0.0", @@ -103,7 +106,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "c3bda774737102f799574749076544dea1a4745b5c38d590d4f206f997bfe8a0" } }, "constraintBumped": "^2.0.0", @@ -115,7 +119,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "439814f59cbc73e1c28ca5ac6e437d5f2af10dfd18db786ce46fe0663e605ccb" } } } @@ -129,7 +134,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "c3bda774737102f799574749076544dea1a4745b5c38d590d4f206f997bfe8a0" } }, "constraintBumped": "^2.0.0", @@ -141,7 +147,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "439814f59cbc73e1c28ca5ac6e437d5f2af10dfd18db786ce46fe0663e605ccb" } } } @@ -175,6 +182,7 @@ packages: dependency: "direct main" description: name: foo + sha256: c3bda774737102f799574749076544dea1a4745b5c38d590d4f206f997bfe8a0 url: "http://localhost:$PORT" source: hosted version: "2.0.0" diff --git a/test/testdata/goldens/dependency_services/dependency_services_test/Removing transitive.txt b/test/testdata/goldens/dependency_services/dependency_services_test/Removing transitive.txt index 50ee46fbe..04986112b 100644 --- a/test/testdata/goldens/dependency_services/dependency_services_test/Removing transitive.txt +++ b/test/testdata/goldens/dependency_services/dependency_services_test/Removing transitive.txt @@ -10,6 +10,7 @@ packages: dependency: "direct main" description: name: foo + sha256: "72f6a04c4af0d78e4f1a1e2eb00a850843e6c0c5233ac2ca911aa061cbd5f8f1" url: "http://localhost:$PORT" source: hosted version: "1.2.3" @@ -17,6 +18,7 @@ packages: dependency: transitive description: name: transitive + sha256: "8d245de5cde3ab3293e4cdea516c6a0395e24d338688279bab5f6c97bffa0915" url: "http://localhost:$PORT" source: hosted version: "1.0.0" @@ -37,7 +39,8 @@ $ dependency_services list "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "72f6a04c4af0d78e4f1a1e2eb00a850843e6c0c5233ac2ca911aa061cbd5f8f1" } } }, @@ -50,7 +53,8 @@ $ dependency_services list "type": "hosted", "description": { "name": "transitive", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "8d245de5cde3ab3293e4cdea516c6a0395e24d338688279bab5f6c97bffa0915" } } } @@ -71,7 +75,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "72f6a04c4af0d78e4f1a1e2eb00a850843e6c0c5233ac2ca911aa061cbd5f8f1" } }, "latest": "2.2.3", @@ -86,7 +91,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "bf378a3f6c4840f911d66ab375f6d3eae78a015a41f0b8b202c31d4af010892e" } }, "constraintBumped": "^2.2.3", @@ -98,7 +104,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "72f6a04c4af0d78e4f1a1e2eb00a850843e6c0c5233ac2ca911aa061cbd5f8f1" } } }, @@ -115,7 +122,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "transitive", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "8d245de5cde3ab3293e4cdea516c6a0395e24d338688279bab5f6c97bffa0915" } } } @@ -129,7 +137,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "bf378a3f6c4840f911d66ab375f6d3eae78a015a41f0b8b202c31d4af010892e" } }, "constraintBumped": "^2.2.3", @@ -141,7 +150,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "72f6a04c4af0d78e4f1a1e2eb00a850843e6c0c5233ac2ca911aa061cbd5f8f1" } } }, @@ -158,7 +168,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "transitive", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "8d245de5cde3ab3293e4cdea516c6a0395e24d338688279bab5f6c97bffa0915" } } } @@ -172,7 +183,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "transitive", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "8d245de5cde3ab3293e4cdea516c6a0395e24d338688279bab5f6c97bffa0915" } }, "latest": "1.0.0", @@ -202,6 +214,7 @@ packages: dependency: "direct main" description: name: foo + sha256: bf378a3f6c4840f911d66ab375f6d3eae78a015a41f0b8b202c31d4af010892e url: "http://localhost:$PORT" source: hosted version: "2.2.3" diff --git a/test/testdata/goldens/dependency_services/dependency_services_test/multibreaking.txt b/test/testdata/goldens/dependency_services/dependency_services_test/multibreaking.txt index da9222d6c..3767075d5 100644 --- a/test/testdata/goldens/dependency_services/dependency_services_test/multibreaking.txt +++ b/test/testdata/goldens/dependency_services/dependency_services_test/multibreaking.txt @@ -10,6 +10,7 @@ packages: dependency: "direct main" description: name: bar + sha256: "4de00552ae3719481f5f0e30b82ecb8b14a62907553b217e7ca178e80625329a" url: "http://localhost:$PORT" source: hosted version: "1.0.0" @@ -17,6 +18,7 @@ packages: dependency: "direct main" description: name: baz + sha256: "377433f0e0aff092191e57de97f5869cad0dd0779ee6d31e7096b84878ca41e8" url: "http://localhost:$PORT" source: hosted version: "1.0.0" @@ -24,6 +26,7 @@ packages: dependency: "direct main" description: name: foo + sha256: "439814f59cbc73e1c28ca5ac6e437d5f2af10dfd18db786ce46fe0663e605ccb" url: "http://localhost:$PORT" source: hosted version: "1.0.0" @@ -44,7 +47,8 @@ $ dependency_services list "type": "hosted", "description": { "name": "bar", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "4de00552ae3719481f5f0e30b82ecb8b14a62907553b217e7ca178e80625329a" } } }, @@ -57,7 +61,8 @@ $ dependency_services list "type": "hosted", "description": { "name": "baz", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "377433f0e0aff092191e57de97f5869cad0dd0779ee6d31e7096b84878ca41e8" } } }, @@ -70,7 +75,8 @@ $ dependency_services list "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "439814f59cbc73e1c28ca5ac6e437d5f2af10dfd18db786ce46fe0663e605ccb" } } } @@ -91,7 +97,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "bar", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "4de00552ae3719481f5f0e30b82ecb8b14a62907553b217e7ca178e80625329a" } }, "latest": "2.0.0", @@ -107,7 +114,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "bar", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "b8187621010649d6385788d7630adcd88d6548a7938899b6f18820961df3b879" } }, "constraintBumped": "^2.0.0", @@ -119,7 +127,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "bar", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "4de00552ae3719481f5f0e30b82ecb8b14a62907553b217e7ca178e80625329a" } } }, @@ -131,7 +140,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "2347a7792f73d0f8cc8aa41d4895317bd1745724b8bc77d8c03faf821c9059b7" } }, "constraintBumped": "^3.0.1", @@ -143,7 +153,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "439814f59cbc73e1c28ca5ac6e437d5f2af10dfd18db786ce46fe0663e605ccb" } } } @@ -157,7 +168,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "baz", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "377433f0e0aff092191e57de97f5869cad0dd0779ee6d31e7096b84878ca41e8" } }, "latest": "1.1.0", @@ -172,7 +184,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "baz", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "7474da026b513eafecba9d1c79a8a3b4a9ef5158730e0968383063b3237c5dec" } }, "constraintBumped": "^1.1.0", @@ -184,7 +197,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "baz", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "377433f0e0aff092191e57de97f5869cad0dd0779ee6d31e7096b84878ca41e8" } } } @@ -198,7 +212,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "baz", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "7474da026b513eafecba9d1c79a8a3b4a9ef5158730e0968383063b3237c5dec" } }, "constraintBumped": "^1.1.0", @@ -210,7 +225,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "baz", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "377433f0e0aff092191e57de97f5869cad0dd0779ee6d31e7096b84878ca41e8" } } } @@ -224,7 +240,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "439814f59cbc73e1c28ca5ac6e437d5f2af10dfd18db786ce46fe0663e605ccb" } }, "latest": "3.0.1", @@ -238,7 +255,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "efa386ac7cc7698525e2e820a90e6bcee5d6c071de4315051a0fb2f3aff5d084" } }, "constraintBumped": "^1.0.0", @@ -250,7 +268,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "439814f59cbc73e1c28ca5ac6e437d5f2af10dfd18db786ce46fe0663e605ccb" } } } @@ -264,7 +283,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "c3bda774737102f799574749076544dea1a4745b5c38d590d4f206f997bfe8a0" } }, "constraintBumped": "^2.0.0", @@ -276,7 +296,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "439814f59cbc73e1c28ca5ac6e437d5f2af10dfd18db786ce46fe0663e605ccb" } } } @@ -290,7 +311,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "2347a7792f73d0f8cc8aa41d4895317bd1745724b8bc77d8c03faf821c9059b7" } }, "constraintBumped": "^3.0.1", @@ -302,7 +324,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "foo", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "439814f59cbc73e1c28ca5ac6e437d5f2af10dfd18db786ce46fe0663e605ccb" } } }, @@ -314,7 +337,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "bar", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "b8187621010649d6385788d7630adcd88d6548a7938899b6f18820961df3b879" } }, "constraintBumped": "^2.0.0", @@ -326,7 +350,8 @@ $ dependency_services report "type": "hosted", "description": { "name": "bar", - "url": "http://localhost:$PORT" + "url": "http://localhost:$PORT", + "sha256": "4de00552ae3719481f5f0e30b82ecb8b14a62907553b217e7ca178e80625329a" } } } @@ -353,6 +378,7 @@ packages: dependency: "direct main" description: name: bar + sha256: b8187621010649d6385788d7630adcd88d6548a7938899b6f18820961df3b879 url: "http://localhost:$PORT" source: hosted version: "2.0.0" @@ -360,6 +386,7 @@ packages: dependency: "direct main" description: name: baz + sha256: "377433f0e0aff092191e57de97f5869cad0dd0779ee6d31e7096b84878ca41e8" url: "http://localhost:$PORT" source: hosted version: "1.0.0" @@ -367,6 +394,7 @@ packages: dependency: "direct main" description: name: foo + sha256: "2347a7792f73d0f8cc8aa41d4895317bd1745724b8bc77d8c03faf821c9059b7" url: "http://localhost:$PORT" source: hosted version: "3.0.1" diff --git a/test/testdata/goldens/embedding/embedding_test/--color forces colors.txt b/test/testdata/goldens/embedding/embedding_test/--color forces colors.txt index d468889b0..e4ad1d17c 100644 --- a/test/testdata/goldens/embedding/embedding_test/--color forces colors.txt +++ b/test/testdata/goldens/embedding/embedding_test/--color forces colors.txt @@ -3,7 +3,6 @@ $ tool/test-bin/pub_command_runner.dart pub --no-color get Resolving dependencies... + foo 1.0.0 (2.0.0 available) -Downloading foo 1.0.0... Changed 1 dependency! -------------------------------- END OF OUTPUT --------------------------------- diff --git a/test/testdata/goldens/embedding/embedding_test/logfile is written with --verbose and on unexpected exceptions.txt b/test/testdata/goldens/embedding/embedding_test/logfile is written with --verbose and on unexpected exceptions.txt index dc09988f5..219820e29 100644 --- a/test/testdata/goldens/embedding/embedding_test/logfile is written with --verbose and on unexpected exceptions.txt +++ b/test/testdata/goldens/embedding/embedding_test/logfile is written with --verbose and on unexpected exceptions.txt @@ -3,7 +3,6 @@ $ tool/test-bin/pub_command_runner.dart pub --verbose get MSG : Resolving dependencies... MSG : + foo 1.0.0 -MSG : Downloading foo 1.0.0... MSG : Changed 1 dependency! MSG : Logs written to $SANDBOX/cache/log/pub_log.txt. [E] FINE: Pub 0.1.2+3 @@ -25,19 +24,20 @@ MSG : Logs written to $SANDBOX/cache/log/pub_log.txt. [E] | took: $TIME [E] | x-powered-by: Dart with package:shelf [E] | date: $TIME -[E] | content-length: 197 +[E] | content-length: 281 [E] | x-frame-options: SAMEORIGIN [E] | content-type: application/vnd.pub.v2+json [E] | x-xss-protection: 1; mode=block [E] | x-content-type-options: nosniff [E] IO : Writing $N characters to text file $SANDBOX/cache/hosted/localhost%58$PORT/.cache/foo-versions.json. [E] FINE: Contents: -[E] | {"name":"foo","uploaders":["nweiz@google.com"],"versions":[{"pubspec":{"name":"foo","version":"1.0.0"},"version":"1.0.0","archive_url":"http://localhost:$PORT/packages/foo/versions/1.0.0.tar.gz"}],"_fetchedAt": "$TIME"} +[E] | {"name":"foo","uploaders":["nweiz@google.com"],"versions":[{"pubspec":{"name":"foo","version":"1.0.0"},"version":"1.0.0","archive_url":"http://localhost:$PORT/packages/foo/versions/1.0.0.tar.gz","archive_sha256":"439814f59cbc73e1c28ca5ac6e437d5f2af10dfd18db786ce46fe0663e605ccb"}],"_fetchedAt": "$TIME"} [E] SLVR: selecting foo 1.0.0 [E] SLVR: Version solving took: $TIME [E] | Tried 1 solutions. [E] FINE: Resolving dependencies finished ($TIME) [E] IO : Get package from http://localhost:$PORT/packages/foo/versions/1.0.0.tar.gz. +[E] FINE: Downloading foo 1.0.0... [E] IO : Created temp directory $DIR [E] IO : HTTP GET http://localhost:$PORT/packages/foo/versions/1.0.0.tar.gz [E] | X-Pub-OS: $OS @@ -58,6 +58,9 @@ MSG : Logs written to $SANDBOX/cache/log/pub_log.txt. [E] | x-content-type-options: nosniff [E] IO : Creating $FILE from stream [E] FINE: Computed checksum $CRC32C for foo 1.0.0 with expected CRC32C of $CRC32C. +[E] IO : Writing $N characters to text file $SANDBOX/cache/hosted-hashes/localhost%58$PORT/foo-1.0.0.sha256. +[E] FINE: Contents: +[E] | 439814f59cbc73e1c28ca5ac6e437d5f2af10dfd18db786ce46fe0663e605ccb [E] FINE: Created $FILE from stream [E] IO : Created temp directory $DIR [E] IO : Reading binary file $FILE. @@ -78,6 +81,7 @@ MSG : Logs written to $SANDBOX/cache/log/pub_log.txt. [E] | dependency: "direct main" [E] | description: [E] | name: foo +[E] | sha256: "439814f59cbc73e1c28ca5ac6e437d5f2af10dfd18db786ce46fe0663e605ccb" [E] | url: "http://localhost:$PORT" [E] | source: hosted [E] | version: "1.0.0" @@ -135,6 +139,7 @@ packages: dependency: "direct main" description: name: foo + sha256: "439814f59cbc73e1c28ca5ac6e437d5f2af10dfd18db786ce46fe0663e605ccb" url: "http://localhost:$PORT" source: hosted version: "1.0.0" @@ -163,21 +168,20 @@ IO : HTTP response 200 OK for GET http://localhost:$PORT/api/packages/foo | took: $TIME | x-powered-by: Dart with package:shelf | date: $TIME - | content-length: 197 + | content-length: 281 | x-frame-options: SAMEORIGIN | content-type: application/vnd.pub.v2+json | x-xss-protection: 1; mode=block | x-content-type-options: nosniff IO : Writing $N characters to text file $SANDBOX/cache/hosted/localhost%58$PORT/.cache/foo-versions.json. FINE: Contents: - | {"name":"foo","uploaders":["nweiz@google.com"],"versions":[{"pubspec":{"name":"foo","version":"1.0.0"},"version":"1.0.0","archive_url":"http://localhost:$PORT/packages/foo/versions/1.0.0.tar.gz"}],"_fetchedAt": "$TIME"} + | {"name":"foo","uploaders":["nweiz@google.com"],"versions":[{"pubspec":{"name":"foo","version":"1.0.0"},"version":"1.0.0","archive_url":"http://localhost:$PORT/packages/foo/versions/1.0.0.tar.gz","archive_sha256":"439814f59cbc73e1c28ca5ac6e437d5f2af10dfd18db786ce46fe0663e605ccb"}],"_fetchedAt": "$TIME"} SLVR: selecting foo 1.0.0 SLVR: Version solving took: $TIME | Tried 1 solutions. FINE: Resolving dependencies finished ($TIME) -MSG : + foo 1.0.0 IO : Get package from http://localhost:$PORT/packages/foo/versions/1.0.0.tar.gz. -MSG : Downloading foo 1.0.0... +FINE: Downloading foo 1.0.0... IO : Created temp directory $DIR IO : HTTP GET http://localhost:$PORT/packages/foo/versions/1.0.0.tar.gz | X-Pub-OS: $OS @@ -198,6 +202,9 @@ IO : HTTP response 200 OK for GET http://localhost:$PORT/packages/foo/versions/ | x-content-type-options: nosniff IO : Creating $FILE from stream FINE: Computed checksum $CRC32C for foo 1.0.0 with expected CRC32C of $CRC32C. +IO : Writing $N characters to text file $SANDBOX/cache/hosted-hashes/localhost%58$PORT/foo-1.0.0.sha256. +FINE: Contents: + | 439814f59cbc73e1c28ca5ac6e437d5f2af10dfd18db786ce46fe0663e605ccb FINE: Created $FILE from stream IO : Created temp directory $DIR IO : Reading binary file $FILE. @@ -209,6 +216,7 @@ FINE: Created $FILE from stream FINE: Extracted .tar.gz to $DIR IO : Renaming directory $A to $B IO : Deleting directory $DIR +MSG : + foo 1.0.0 IO : Writing $N characters to text file pubspec.lock. FINE: Contents: | # Generated by pub @@ -218,6 +226,7 @@ FINE: Contents: | dependency: "direct main" | description: | name: foo + | sha256: "439814f59cbc73e1c28ca5ac6e437d5f2af10dfd18db786ce46fe0663e605ccb" | url: "http://localhost:$PORT" | source: hosted | version: "1.0.0" diff --git a/test/upgrade/dry_run_does_not_apply_changes_test.dart b/test/upgrade/dry_run_does_not_apply_changes_test.dart index 8c942ca79..4f514f998 100644 --- a/test/upgrade/dry_run_does_not_apply_changes_test.dart +++ b/test/upgrade/dry_run_does_not_apply_changes_test.dart @@ -68,6 +68,7 @@ void main() { // Do the dry run. await pubUpgrade( args: ['--dry-run', '--major-versions'], + silent: contains('Downloading foo 2.0.0...'), output: allOf([ contains('Resolving dependencies...'), contains('> foo 2.0.0 (was 1.0.0)'), @@ -92,7 +93,6 @@ void main() { output: allOf([ contains('Resolving dependencies...'), contains('> foo 2.0.0 (was 1.0.0)'), - contains('Downloading foo 2.0.0...'), contains('Changed 1 dependency!'), contains('Changed 1 constraint in pubspec.yaml:'), contains('foo: ^1.0.0 -> ^2.0.0'), diff --git a/test/upgrade/report/describes_change_test.dart b/test/upgrade/report/describes_change_test.dart index 96410cb67..2f926183e 100644 --- a/test/upgrade/report/describes_change_test.dart +++ b/test/upgrade/report/describes_change_test.dart @@ -2,6 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +import 'package:path/path.dart'; import 'package:test/test.dart'; import '../../descriptor.dart' as d; @@ -33,11 +34,17 @@ void main() { }); test('shows how package changed from previous lockfile', () async { - await servePackages() - ..serve('unchanged', '1.0.0') - ..serve('version_changed', '1.0.0') - ..serve('version_changed', '2.0.0') - ..serve('source_changed', '1.0.0'); + final server = await servePackages(); + + server.serve('unchanged', '1.0.0'); + server.serve('version_upgraded', '1.0.0'); + server.serve('version_upgraded', '2.0.0'); + server.serve('version_downgraded', '1.0.0'); + server.serve('version_downgraded', '2.0.0'); + server.serve('contents_changed', '1.0.0'); + server.serve('source_changed', '1.0.0'); + server.serve('package_added', '1.0.0'); + server.serve('package_removed', '1.0.0'); await d.dir('source_changed', [ d.libDir('source_changed'), @@ -57,28 +64,47 @@ void main() { // Create the first lockfile. await d.appDir({ 'unchanged': 'any', - 'version_changed': '1.0.0', + 'contents_changed': '1.0.0', + 'version_upgraded': '1.0.0', + 'version_downgraded': '2.0.0', 'source_changed': 'any', + 'package_removed': 'any', 'description_changed': {'path': '../description_changed_1'} }).create(); await pubGet(); + server.serve( + 'contents_changed', + '1.0.0', + contents: [d.file('Sneaky.txt', 'Very sneaky attack on integrity.')], + ); // Change the pubspec. await d.appDir({ 'unchanged': 'any', - 'version_changed': 'any', + 'version_upgraded': 'any', + 'version_downgraded': '1.0.0', 'source_changed': {'path': '../source_changed'}, - 'description_changed': {'path': '../description_changed_2'} + 'package_added': 'any', + 'description_changed': {'path': '../description_changed_2'}, + 'contents_changed': '1.0.0', }).create(); // Upgrade everything. - await pubUpgrade(output: RegExp(r''' -Resolving dependencies\.\.\..* -. description_changed 1\.0\.0 from path \.\.[/\\]description_changed_2 \(was 1\.0\.0 from path \.\.[/\\]description_changed_1\) -. source_changed 2\.0\.0 from path \.\.[/\\]source_changed \(was 1\.0\.0\) -. unchanged 1\.0\.0 -. version_changed 2\.0\.0 \(was 1\.0\.0\) -''', multiLine: true), environment: {'PUB_ALLOW_PRERELEASE_SDK': 'false'}); + await pubUpgrade( + output: allOf([ + contains('Resolving dependencies...'), + contains( + '* description_changed 1.0.0 from path ..${separator}description_changed_2 (was 1.0.0 from path ..${separator}description_changed_1)'), + contains(' unchanged 1.0.0'), + contains( + '* source_changed 2.0.0 from path ..${separator}source_changed (was 1.0.0)'), + contains('> version_upgraded 2.0.0 (was 1.0.0'), + contains('< version_downgraded 1.0.0 (was 2.0.0'), + contains('+ package_added 1.0.0'), + contains('- package_removed 1.0.0'), + contains('~ contents_changed 1.0.0 (was 1.0.0)'), + ]), + environment: {'PUB_ALLOW_PRERELEASE_SDK': 'false'}); }); } diff --git a/test/utils_test.dart b/test/utils_test.dart index 78ccbafbc..cc51066fc 100644 --- a/test/utils_test.dart +++ b/test/utils_test.dart @@ -162,4 +162,18 @@ b: {}''')); } }); }); + + test('hexEncode', () { + expect(hexEncode([]), ''); + expect(hexEncode([255, 0, 1, 240]), 'ff0001f0'); + expect(() => hexEncode([256, 0, 1]), throwsA(isA())); + }); + test('hexDecode', () { + expect(hexDecode(''), []); + expect(hexDecode('ff0001f0abcdef'), [255, 0, 1, 240, 171, 205, 239]); + expect(hexDecode('FF0001F0ABCDEF'), [255, 0, 1, 240, 171, 205, 239]); + expect(() => hexDecode('F'), throwsA(isA())); + expect(() => hexDecode('0p'), throwsA(isA())); + expect(() => hexDecode('p0'), throwsA(isA())); + }); } diff --git a/tool/test.dart b/tool/test.dart index 57ab55464..e0ef4b073 100755 --- a/tool/test.dart +++ b/tool/test.dart @@ -39,7 +39,7 @@ Future main(List args) async { packageConfigPath: path.join('.dart_tool', 'package_config.json')); testProcess = await Process.start( Platform.resolvedExecutable, - ['run', 'test', '--chain-stack-traces', ...args], + ['run', 'test', ...args], environment: {'_PUB_TEST_SNAPSHOT': pubSnapshotFilename}, mode: ProcessStartMode.inheritStdio, );