diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..ab8774e --- /dev/null +++ b/.github/dependabot.yml
@@ -0,0 +1,7 @@ +version: 2 +enable-beta-ecosystems: true +updates: + - package-ecosystem: "pub" + directory: "/" + schedule: + interval: "monthly"
diff --git a/analysis_options.yaml b/analysis_options.yaml index f07a060..6177eba 100644 --- a/analysis_options.yaml +++ b/analysis_options.yaml
@@ -31,6 +31,7 @@ - sort_pub_dependencies - test_types_in_equals - throw_in_finally + - unawaited_futures - unnecessary_lambdas - unnecessary_null_aware_assignments - unnecessary_parenthesis
diff --git a/bin/dependency_services.dart b/bin/dependency_services.dart new file mode 100644 index 0000000..3dabf5d --- /dev/null +++ b/bin/dependency_services.dart
@@ -0,0 +1,84 @@ +// Copyright (c) 2021, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +/// Support for automated upgrades. +/// +/// For now this is not a finalized interface. Don't rely on this. +library dependency_services; + +import 'dart:async'; + +import 'package:args/args.dart'; +import 'package:args/command_runner.dart'; +import 'package:pub/src/command.dart'; +import 'package:pub/src/command/dependency_services.dart'; +import 'package:pub/src/exit_codes.dart' as exit_codes; +import 'package:pub/src/io.dart'; +import 'package:pub/src/log.dart' as log; + +class _DependencyServicesCommandRunner extends CommandRunner<int> + implements PubTopLevel { + @override + String? get directory => argResults['directory']; + + @override + bool get captureStackChains => argResults['verbose']; + + @override + bool get trace => argResults['verbose']; + + ArgResults? _argResults; + + /// The top-level options parsed by the command runner. + @override + ArgResults get argResults { + final a = _argResults; + if (a == null) { + throw StateError( + 'argResults cannot be used before Command.run is called.'); + } + return a; + } + + _DependencyServicesCommandRunner() + : super('dependency_services', 'Support for automatic upgrades', + usageLineLength: lineLength) { + argParser.addFlag('verbose', + abbr: 'v', negatable: false, help: 'Shortcut for "--verbosity=all".'); + argParser.addOption( + 'directory', + abbr: 'C', + help: 'Run the subcommand in the directory<dir>.', + defaultsTo: '.', + valueHelp: 'dir', + ); + + addCommand(DependencyServicesListCommand()); + addCommand(DependencyServicesReportCommand()); + addCommand(DependencyServicesApplyCommand()); + } + + @override + Future<int> run(Iterable<String> args) async { + try { + _argResults = parse(args); + return await runCommand(argResults) ?? exit_codes.SUCCESS; + } on UsageException catch (error) { + log.exception(error); + return exit_codes.USAGE; + } + } + + @override + void printUsage() { + log.message(usage); + } + + @override + log.Verbosity get verbosity => log.Verbosity.normal; +} + +Future<void> main(List<String> arguments) async { + await flushThenExit(await _DependencyServicesCommandRunner().run(arguments)); +}
diff --git a/doc/repository-spec-v2.md b/doc/repository-spec-v2.md index 6ddfd0f..55ccbb0 100644 --- a/doc/repository-spec-v2.md +++ b/doc/repository-spec-v2.md
@@ -91,8 +91,8 @@ overloading servers that are partially failing. Clients are strongly encouraged to employ exponential backoff starting at 200ms, -400ms, etc. stopping after 5-7 retries. Excessive can have negative impact on -servers and network performance. +400ms, etc. stopping after 5-7 retries. Excessive retries can have a negative impact +on servers and network performance. ## Rejecting Requests
diff --git a/lib/src/command.dart b/lib/src/command.dart index 556c04f..ee62392 100644 --- a/lib/src/command.dart +++ b/lib/src/command.dart
@@ -13,7 +13,6 @@ import 'package:path/path.dart' as p; import 'authentication/token_store.dart'; -import 'command_runner.dart'; import 'entrypoint.dart'; import 'exceptions.dart'; import 'exit_codes.dart' as exit_codes; @@ -127,7 +126,7 @@ } PubTopLevel get _pubTopLevel => - _pubEmbeddableCommand ?? runner as PubCommandRunner; + _pubEmbeddableCommand ?? runner as PubTopLevel; PubAnalytics? get analytics => _pubEmbeddableCommand?.analytics;
diff --git a/lib/src/command/add.dart b/lib/src/command/add.dart index d540978..0980b81 100644 --- a/lib/src/command/add.dart +++ b/lib/src/command/add.dart
@@ -19,6 +19,8 @@ import '../package_name.dart'; import '../pubspec.dart'; import '../solver.dart'; +import '../source/git.dart'; +import '../source/hosted.dart'; import '../source/path.dart'; import '../utils.dart'; @@ -110,8 +112,7 @@ var updatedPubSpec = entrypoint.root.pubspec; for (final update in updates) { /// Perform version resolution in-memory. - updatedPubSpec = - await _addPackageToPubspec(updatedPubSpec, update.packageRange); + updatedPubSpec = await _addPackageToPubspec(updatedPubSpec, update); } late SolveResult solveResult; @@ -125,9 +126,8 @@ solveResult = await resolveVersions( SolveType.upgrade, cache, Package.inMemory(updatedPubSpec)); } on GitException { - final packageRange = updates.first.packageRange; - dataError( - 'Unable to resolve package "${packageRange.name}" with the given ' + final name = updates.first.ref.name; + dataError('Unable to resolve package "$name" with the given ' 'git parameters.'); } on SolveFailure catch (e) { dataError(e.message); @@ -138,22 +138,20 @@ /// Verify the results for each package. for (final update in updates) { - final packageRange = update.packageRange; - final name = packageRange.name; + final ref = update.ref; + final name = ref.name; final resultPackage = solveResult.packages .firstWhere((packageId) => packageId.name == name); /// Assert that [resultPackage] is within the original user's expectations. - var constraint = packageRange.constraint; - if (!constraint.allows(resultPackage.version)) { - var dependencyOverrides = updatedPubSpec.dependencyOverrides; + final constraint = update.constraint; + if (constraint != null && !constraint.allows(resultPackage.version)) { + final dependencyOverrides = updatedPubSpec.dependencyOverrides; if (dependencyOverrides.isNotEmpty) { dataError('"$name" resolved to "${resultPackage.version}" which ' 'does not satisfy constraint "$constraint". This could be ' 'caused by "dependency_overrides".'); } - dataError('"$name" resolved to "${resultPackage.version}" which ' - 'does not satisfy constraint "$constraint".'); } } if (isDryRun) { @@ -162,9 +160,8 @@ /// to this new dependency. final newRoot = Package.inMemory(updatedPubSpec); - // TODO(jonasfj): Stop abusing Entrypoint.global for dry-run output - await Entrypoint.global(newRoot, entrypoint.lockFile, cache, - solveResult: solveResult) + await Entrypoint.inMemory(newRoot, cache, + solveResult: solveResult, lockFile: entrypoint.lockFile) .acquireDependencies(SolveType.get, dryRun: true, precompile: argResults['precompile'], @@ -204,54 +201,52 @@ /// Creates a new in-memory [Pubspec] by adding [package] to the /// dependencies of [original]. Future<Pubspec> _addPackageToPubspec( - Pubspec original, PackageRange package) async { - ArgumentError.checkNotNull(original, 'original'); - ArgumentError.checkNotNull(package, 'package'); - + Pubspec original, _ParseResult package) async { + final name = package.ref.name; final dependencies = [...original.dependencies.values]; var devDependencies = [...original.devDependencies.values]; final dependencyNames = dependencies.map((dependency) => dependency.name); final devDependencyNames = devDependencies.map((devDependency) => devDependency.name); - + final range = + package.ref.withConstraint(package.constraint ?? VersionConstraint.any); if (isDev) { /// TODO(walnut): Change the error message once pub upgrade --bump is /// released - if (devDependencyNames.contains(package.name)) { - dataError('"${package.name}" is already in "dev_dependencies". ' - 'Use "pub upgrade ${package.name}" to upgrade to a later version!'); + if (devDependencyNames.contains(name)) { + dataError('"$name" is already in "dev_dependencies". ' + 'Use "pub upgrade $name" to upgrade to a later version!'); } /// If package is originally in dependencies and we wish to add it to /// dev_dependencies, this is a redundant change, and we should not /// remove the package from dependencies, since it might cause the user's /// code to break. - if (dependencyNames.contains(package.name)) { - dataError('"${package.name}" is already in "dependencies". ' - 'Use "pub remove ${package.name}" to remove it before adding it ' + if (dependencyNames.contains(name)) { + dataError('"$name" is already in "dependencies". ' + 'Use "pub remove $name" to remove it before adding it ' 'to "dev_dependencies"'); } - devDependencies.add(package); + devDependencies.add(range); } else { /// TODO(walnut): Change the error message once pub upgrade --bump is /// released - if (dependencyNames.contains(package.name)) { - dataError('"${package.name}" is already in "dependencies". ' - 'Use "pub upgrade ${package.name}" to upgrade to a later version!'); + if (dependencyNames.contains(name)) { + dataError('"$name" is already in "dependencies". ' + 'Use "pub upgrade $name" to upgrade to a later version!'); } /// If package is originally in dev_dependencies and we wish to add it to /// dependencies, we remove the package from dev_dependencies, since it is /// now redundant. - if (devDependencyNames.contains(package.name)) { - log.message('"${package.name}" was found in dev_dependencies. ' - 'Removing "${package.name}" and adding it to dependencies instead.'); - devDependencies = - devDependencies.where((d) => d.name != package.name).toList(); + if (devDependencyNames.contains(name)) { + log.message('"$name" was found in dev_dependencies. ' + 'Removing "$name" and adding it to dependencies instead.'); + devDependencies = devDependencies.where((d) => d.name != name).toList(); } - dependencies.add(package); + dependencies.add(range); } return Pubspec( @@ -311,15 +306,6 @@ } } - /// The package to be added, along with the user-defined package constraints - /// if present. - PackageRange packageRange; - - /// The entry to be added to the pubspec. Assigned dynamic because it can - /// take on either a string for simple version constraints or a map for - /// more complicated hosted/git options. - dynamic pubspecInformation; - final splitPackage = package.split(':'); final packageName = splitPackage[0]; @@ -340,94 +326,48 @@ usageException('Invalid version constraint: ${e.message}'); } - /// Determine the relevant [packageRange] and [pubspecInformation] depending - /// on the type of package. - var path = this.path; + /// The package to be added. + late final PackageRef ref; + final path = this.path; if (hasGitOptions) { - dynamic git; - + final gitUrl = this.gitUrl; if (gitUrl == null) { usageException('The `--git-url` is required for git dependencies.'); } Uri parsed; try { - parsed = Uri.parse(gitUrl!); + parsed = Uri.parse(gitUrl); } on FormatException catch (e) { usageException('The --git-url must be a valid url: ${e.message}.'); } - final urlRelativeToEntrypoint = parsed.isAbsolute - ? parsed.toString() - : - // Turn the relative url from current working directory into a relative - // url from the entrypoint. - p.url.relative( - p.url.join(Uri.file(p.absolute(p.current)).toString(), - parsed.toString()), - from: p.toUri(p.absolute(entrypoint.root.dir)).toString()); /// Process the git options to return the simplest representation to be /// added to the pubspec. - if (gitRef == null && gitPath == null) { - git = urlRelativeToEntrypoint; - } else { - git = {'url': urlRelativeToEntrypoint, 'ref': gitRef, 'path': gitPath}; - git.removeWhere((key, value) => value == null); - } - packageRange = cache.sources.git - .parseRef(packageName, git, containingPath: entrypoint.pubspecPath) - .withConstraint(constraint ?? VersionConstraint.any); - pubspecInformation = {'git': git}; + ref = PackageRef( + packageName, + GitDescription( + url: parsed.toString(), + containingDir: p.current, + ref: gitRef, + path: gitPath, + ), + ); } else if (path != null) { - final relativeToEntryPoint = p.isRelative(path) - ? PathSource.relativePathWithPosixSeparators( - p.relative(path, from: entrypoint.root.dir)) - : path; - packageRange = cache.sources.path - .parseRef(packageName, relativeToEntryPoint, - containingPath: entrypoint.pubspecPath) - .withConstraint(constraint ?? VersionConstraint.any); - pubspecInformation = {'path': relativeToEntryPoint}; + ref = PackageRef( + packageName, PathDescription(p.absolute(path), p.isRelative(path))); } else if (sdk != null) { - packageRange = cache.sources.sdk - .parseRef(packageName, sdk) - .withConstraint(constraint ?? VersionConstraint.any); - pubspecInformation = {'sdk': sdk}; + ref = cache.sdk.parseRef(packageName, sdk); } else { - // Hosted - final Object? hostInfo; - if (hasHostOptions) { - hostInfo = languageVersion.supportsShorterHostedSyntax - ? hostUrl - : {'url': hostUrl, 'name': packageName}; - pubspecInformation = { - 'hosted': hostInfo, - }; - } else { - hostInfo = null; - pubspecInformation = constraint?.toString(); - } - - packageRange = cache.hosted.source - .parseRef( - packageName, - hostInfo, - languageVersion: entrypoint.root.pubspec.languageVersion, - ) - .withConstraint(constraint ?? VersionConstraint.any); + ref = PackageRef( + packageName, + HostedDescription( + packageName, + hostUrl ?? cache.hosted.defaultUrl, + ), + ); } - - if (pubspecInformation is Map && constraint != null) { - /// We cannot simply assign the value of version since it is likely that - /// [pubspecInformation] takes on the type - /// [Map<String, Map<String, String>>] - pubspecInformation = { - ...pubspecInformation, - 'version': constraint.toString() - }; - } - - return _ParseResult(packageRange, pubspecInformation); + return _ParseResult(ref, constraint); } /// Writes the changes to the pubspec file. @@ -437,37 +377,40 @@ log.io('Reading ${entrypoint.pubspecPath}.'); log.fine('Contents:\n$yamlEditor'); + final dependencyKey = isDevelopment ? 'dev_dependencies' : 'dependencies'; + for (final update in updates) { - final packageRange = update.packageRange; - final name = packageRange.name; + final constraint = update.constraint; + final ref = update.ref; + final name = ref.name; final resultId = resultPackages.firstWhere((id) => id.name == name); - var description = update.description; - - if (isHosted) { - final inferredConstraint = - VersionConstraint.compatibleWith(resultId.version).toString(); - if (description == null) { - description = inferredConstraint; - } else if (description is Map && description['version'] == null) { - /// We cannot simply assign the value of version since it is likely that - /// [description] takes on the type - /// [Map<String, Map<String, String>>] - description = {...description, 'version': '^${resultId.version}'}; - } + var description = ref.description; + final versionConstraintString = + constraint == null ? '^${resultId.version}' : constraint.toString(); + late Object? pubspecInformation; + if (description is HostedDescription && + description.url == cache.hosted.defaultUrl) { + pubspecInformation = versionConstraintString; + } else { + pubspecInformation = { + ref.source.name: ref.description.serializeForPubspec( + containingDir: entrypoint.root.dir, + languageVersion: entrypoint.root.pubspec.languageVersion), + if (description is HostedDescription || constraint != null) + 'version': versionConstraintString + }; } - - final dependencyKey = isDevelopment ? 'dev_dependencies' : 'dependencies'; final packagePath = [dependencyKey, name]; - /// Ensure we have a [dependencyKey] map in the `pubspec.yaml`. - if (yamlEditor.parseAt([dependencyKey], - orElse: () => YamlScalar.wrap(null)).value == + if (yamlEditor.parseAt( + [dependencyKey], + orElse: () => YamlScalar.wrap(null), + ).value == null) { + // Insert dependencyKey: {} if it did not exist. yamlEditor.update([dependencyKey], {}); } - yamlEditor.update(packagePath, description); - - log.fine('Added ${packageRange.name} to "$dependencyKey".'); + yamlEditor.update(packagePath, pubspecInformation); /// Remove the package from dev_dependencies if we are adding it to /// dependencies. Refer to [_addPackageToPubspec] for additional discussion. @@ -494,7 +437,7 @@ } class _ParseResult { - PackageRange packageRange; - Object? description; - _ParseResult(this.packageRange, this.description); + PackageRef ref; + VersionConstraint? constraint; + _ParseResult(this.ref, this.constraint); }
diff --git a/lib/src/command/cache_add.dart b/lib/src/command/cache_add.dart index e3e64e4..ea8a8c1 100644 --- a/lib/src/command/cache_add.dart +++ b/lib/src/command/cache_add.dart
@@ -59,7 +59,7 @@ var source = cache.hosted; // TODO(rnystrom): Allow specifying the server. - var ids = (await source.getVersions(cache.sources.hosted.refFor(package))) + var ids = (await cache.getVersions(source.refFor(package))) .where((id) => constraint.allows(id.version)) .toList(); @@ -77,7 +77,7 @@ } // Download it. - await source.downloadToSystemCache(id); + await cache.downloadPackage(id); } if (argResults['all']) {
diff --git a/lib/src/command/cache_list.dart b/lib/src/command/cache_list.dart index f5ec62e..76faaf0 100644 --- a/lib/src/command/cache_list.dart +++ b/lib/src/command/cache_list.dart
@@ -25,7 +25,7 @@ var packagesObj = <String, Map>{}; var source = cache.defaultSource as CachedSource; - for (var package in source.getCachedPackages()) { + for (var package in source.getCachedPackages(cache)) { var packageInfo = packagesObj.putIfAbsent(package.name, () => {}); packageInfo[package.version.toString()] = {'location': package.dir}; }
diff --git a/lib/src/command/cache_repair.dart b/lib/src/command/cache_repair.dart index 74968be..850b782 100644 --- a/lib/src/command/cache_repair.dart +++ b/lib/src/command/cache_repair.dart
@@ -26,22 +26,14 @@ // Delete any eventual temp-files left in the cache. cache.deleteTempDir(); // Repair every cached source. - final repairResults = (await Future.wait( - cache.sources.all.map(cache.source).map((source) async { - return source is CachedSource - ? await source.repairCachedPackages() - : <RepairResult>[]; - }))) - .expand((x) => x); + final repairResults = + (await Future.wait(<CachedSource>[cache.hosted, cache.git].map( + (source) => source.repairCachedPackages(cache), + ))) + .expand((x) => x); - final successes = [ - for (final result in repairResults) - if (result.success) result.package - ]; - final failures = [ - for (final result in repairResults) - if (!result.success) result.package - ]; + final successes = repairResults.where((result) => result.success); + final failures = repairResults.where((result) => !result.success); if (successes.isNotEmpty) { var packages = pluralize('package', successes.length); @@ -53,10 +45,10 @@ var buffer = StringBuffer( 'Failed to reinstall ${log.red(failures.length)} $packages:\n'); - for (var id in failures) { - buffer.write('- ${log.bold(id.name)} ${id.version}'); - if (id.source != cache.sources.defaultSource) { - buffer.write(' from ${id.source}'); + for (var failure in failures) { + buffer.write('- ${log.bold(failure.packageName)} ${failure.version}'); + if (failure.source != cache.defaultSource) { + buffer.write(' from ${failure.source}'); } buffer.writeln(); }
diff --git a/lib/src/command/dependency_services.dart b/lib/src/command/dependency_services.dart new file mode 100644 index 0000000..45833d4 --- /dev/null +++ b/lib/src/command/dependency_services.dart
@@ -0,0 +1,454 @@ +// Copyright (c) 2021, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +/// This implements support for dependency-bot style automated upgrades. +/// It is still work in progress - do not rely on the current output. +import 'dart:convert'; +import 'dart:io'; + +import 'package:collection/collection.dart'; +import 'package:pub_semver/pub_semver.dart'; +import 'package:yaml/yaml.dart'; +import 'package:yaml_edit/yaml_edit.dart'; + +import '../command.dart'; +import '../exceptions.dart'; +import '../io.dart'; +import '../lock_file.dart'; +import '../log.dart' as log; +import '../package.dart'; +import '../package_name.dart'; +import '../pubspec.dart'; +import '../pubspec_utils.dart'; +import '../solver.dart'; +import '../system_cache.dart'; +import '../utils.dart'; + +class DependencyServicesReportCommand extends PubCommand { + @override + String get name => 'report'; + @override + String get description => + 'Output a machine-digestible report of the upgrade options for each dependency.'; + @override + String get argumentsDescription => '[options]'; + + @override + bool get takesArguments => false; + + DependencyServicesReportCommand() { + argParser.addOption('directory', + abbr: 'C', help: 'Run this in the directory<dir>.', valueHelp: 'dir'); + } + + @override + Future<void> runProtected() async { + final compatiblePubspec = stripDependencyOverrides(entrypoint.root.pubspec); + + final breakingPubspec = stripVersionUpperBounds(compatiblePubspec); + + final compatiblePackagesResult = + await _tryResolve(compatiblePubspec, cache); + + final breakingPackagesResult = await _tryResolve(breakingPubspec, cache); + + // The packages in the current lockfile or resolved from current pubspec.yaml. + late Map<String, PackageId> currentPackages; + + if (fileExists(entrypoint.lockFilePath)) { + currentPackages = + Map<String, PackageId>.from(entrypoint.lockFile.packages); + } else { + final resolution = await _tryResolve(entrypoint.root.pubspec, cache) ?? + (throw DataException('Failed to resolve pubspec')); + currentPackages = + Map<String, PackageId>.fromIterable(resolution, key: (e) => e.name); + } + currentPackages.remove(entrypoint.root.name); + + final dependencies = <Object>[]; + final result = <String, Object>{'dependencies': dependencies}; + + Future<List<Object>> _computeUpgradeSet( + Pubspec rootPubspec, + PackageId? package, { + required UpgradeType upgradeType, + }) async { + if (package == null) return []; + final lockFile = entrypoint.lockFile; + final pubspec = upgradeType == UpgradeType.multiBreaking + ? stripVersionUpperBounds(rootPubspec) + : Pubspec( + rootPubspec.name, + dependencies: rootPubspec.dependencies.values, + devDependencies: rootPubspec.devDependencies.values, + sdkConstraints: rootPubspec.sdkConstraints, + ); + + final dependencySet = dependencySetOfPackage(pubspec, package); + if (dependencySet != null) { + // Force the version to be the new version. + dependencySet[package.name] = + package.toRef().withConstraint(package.toRange().constraint); + } + + final resolution = await tryResolveVersions( + SolveType.get, + cache, + Package.inMemory(pubspec), + lockFile: lockFile, + ); + + // TODO(sigurdm): improve error messages. + if (resolution == null) { + throw DataException('Failed resolving'); + } + + return [ + ...resolution.packages.where((r) { + if (r.name == rootPubspec.name) return false; + final originalVersion = currentPackages[r.name]; + return originalVersion == null || + r.version != originalVersion.version; + }).map((p) { + final depset = dependencySetOfPackage(rootPubspec, p); + final originalConstraint = depset?[p.name]?.constraint; + return { + 'name': p.name, + 'version': p.version.toString(), + 'kind': _kindString(pubspec, p.name), + 'constraintBumped': originalConstraint == null + ? null + : upgradeType == UpgradeType.compatible + ? originalConstraint.toString() + : VersionConstraint.compatibleWith(p.version).toString(), + 'constraintWidened': originalConstraint == null + ? null + : upgradeType == UpgradeType.compatible + ? originalConstraint.toString() + : _widenConstraint(originalConstraint, p.version) + .toString(), + 'constraintBumpedIfNeeded': originalConstraint == null + ? null + : upgradeType == UpgradeType.compatible + ? originalConstraint.toString() + : originalConstraint.allows(p.version) + ? originalConstraint.toString() + : VersionConstraint.compatibleWith(p.version) + .toString(), + 'previousVersion': currentPackages[p.name]?.version.toString(), + 'previousConstraint': originalConstraint?.toString(), + }; + }), + for (final oldPackageName in lockFile.packages.keys) + if (!resolution.packages + .any((newPackage) => newPackage.name == oldPackageName)) + { + 'name': oldPackageName, + 'version': null, + 'kind': + 'transitive', // Only transitive constraints can be removed. + 'constraintBumped': null, + 'constraintWidened': null, + 'constraintBumpedIfNeeded': null, + 'previousVersion': + currentPackages[oldPackageName]?.version.toString(), + 'previousConstraint': null, + }, + ]; + } + + for (final package in currentPackages.values) { + final compatibleVersion = compatiblePackagesResult + ?.firstWhereOrNull((element) => element.name == package.name); + final multiBreakingVersion = breakingPackagesResult + ?.firstWhereOrNull((element) => element.name == package.name); + final singleBreakingPubspec = Pubspec( + compatiblePubspec.name, + version: compatiblePubspec.version, + sdkConstraints: compatiblePubspec.sdkConstraints, + dependencies: compatiblePubspec.dependencies.values, + devDependencies: compatiblePubspec.devDependencies.values, + ); + final dependencySet = + dependencySetOfPackage(singleBreakingPubspec, package); + final kind = _kindString(compatiblePubspec, package.name); + PackageId? singleBreakingVersion; + if (dependencySet != null) { + dependencySet[package.name] = package + .toRef() + .withConstraint(stripUpperBound(package.toRange().constraint)); + final singleBreakingPackagesResult = + await _tryResolve(singleBreakingPubspec, cache); + singleBreakingVersion = singleBreakingPackagesResult + ?.firstWhereOrNull((element) => element.name == package.name); + } + dependencies.add({ + 'name': package.name, + 'version': package.version.toString(), + 'kind': kind, + 'latest': + (await cache.getLatest(package.toRef(), version: package.version)) + ?.version + .toString(), + 'constraint': + _constraintOf(compatiblePubspec, package.name)?.toString(), + if (compatibleVersion != null) + 'compatible': await _computeUpgradeSet( + compatiblePubspec, compatibleVersion, + upgradeType: UpgradeType.compatible), + 'singleBreaking': kind != 'transitive' && singleBreakingVersion == null + ? [] + : await _computeUpgradeSet(compatiblePubspec, singleBreakingVersion, + upgradeType: UpgradeType.singleBreaking), + 'multiBreaking': kind != 'transitive' && multiBreakingVersion != null + ? await _computeUpgradeSet(compatiblePubspec, multiBreakingVersion, + upgradeType: UpgradeType.multiBreaking) + : [], + }); + } + log.message(JsonEncoder.withIndent(' ').convert(result)); + } +} + +VersionConstraint? _constraintOf(Pubspec pubspec, String packageName) { + return (pubspec.dependencies[packageName] ?? + pubspec.devDependencies[packageName]) + ?.constraint; +} + +String _kindString(Pubspec pubspec, String packageName) { + return pubspec.dependencies.containsKey(packageName) + ? 'direct' + : pubspec.devDependencies.containsKey(packageName) + ? 'dev' + : 'transitive'; +} + +/// Try to solve [pubspec] return [PackageId]s in the resolution or `null` if no +/// resolution was found. +Future<List<PackageId>?> _tryResolve(Pubspec pubspec, SystemCache cache) async { + final solveResult = await tryResolveVersions( + SolveType.upgrade, + cache, + Package.inMemory(pubspec), + ); + + return solveResult?.packages; +} + +class DependencyServicesListCommand extends PubCommand { + @override + String get name => 'list'; + + @override + String get description => + 'Output a machine digestible listing of all dependencies'; + + @override + bool get takesArguments => false; + + DependencyServicesListCommand() { + argParser.addOption('directory', + abbr: 'C', help: 'Run this in the directory<dir>.', valueHelp: 'dir'); + } + + @override + Future<void> runProtected() async { + final pubspec = entrypoint.root.pubspec; + + final currentPackages = fileExists(entrypoint.lockFilePath) + ? entrypoint.lockFile.packages.values.toList() + : (await _tryResolve(pubspec, cache) ?? <PackageId>[]); + + final dependencies = <Object>[]; + final result = <String, Object>{'dependencies': dependencies}; + + for (final package in currentPackages) { + dependencies.add({ + 'name': package.name, + 'version': package.version.toString(), + 'kind': _kindString(pubspec, package.name), + 'constraint': _constraintOf(pubspec, package.name).toString(), + }); + } + log.message(JsonEncoder.withIndent(' ').convert(result)); + } +} + +enum UpgradeType { + /// Only upgrade pubspec.lock. + compatible, + + /// Unlock at most one dependency in pubspec.yaml. + singleBreaking, + + /// Unlock any dependencies in pubspec.yaml needed for getting the + /// latest resolvable version. + multiBreaking, +} + +class DependencyServicesApplyCommand extends PubCommand { + @override + String get name => 'apply'; + + @override + String get description => + 'Updates pubspec.yaml and pubspec.lock according to input.'; + + @override + bool get takesArguments => true; + + DependencyServicesApplyCommand() { + argParser.addOption('directory', + abbr: 'C', help: 'Run this in the directory <dir>.', valueHelp: 'dir'); + } + + @override + Future<void> runProtected() async { + YamlEditor(readTextFile(entrypoint.pubspecPath)); + final toApply = <_PackageVersion>[]; + final input = json.decode(await utf8.decodeStream(stdin)); + for (final change in input['dependencyChanges']) { + toApply.add( + _PackageVersion( + change['name'], + change['version'] != null ? Version.parse(change['version']) : null, + change['constraint'] != null + ? VersionConstraint.parse(change['constraint']) + : null, + ), + ); + } + + final pubspec = entrypoint.root.pubspec; + final pubspecEditor = YamlEditor(readTextFile(entrypoint.pubspecPath)); + final lockFile = fileExists(entrypoint.lockFilePath) + ? readTextFile(entrypoint.lockFilePath) + : null; + final lockFileYaml = lockFile == null ? null : loadYaml(lockFile); + final lockFileEditor = lockFile == null ? null : YamlEditor(lockFile); + for (final p in toApply) { + final targetPackage = p.name; + final targetVersion = p.version; + final targetConstraint = p.constraint; + + if (targetConstraint != null) { + final section = pubspec.dependencies[targetPackage] != null + ? 'dependencies' + : 'dev_dependencies'; + pubspecEditor + .update([section, targetPackage], targetConstraint.toString()); + } else if (targetVersion != null) { + final constraint = _constraintOf(pubspec, targetPackage); + if (constraint != null && !constraint.allows(targetVersion)) { + final section = pubspec.dependencies[targetPackage] != null + ? 'dependencies' + : 'dev_dependencies'; + pubspecEditor.update([section, targetPackage], + VersionConstraint.compatibleWith(targetVersion).toString()); + } + } + if (targetVersion != null && + lockFileEditor != null && + lockFileYaml['packages'].containsKey(targetPackage)) { + lockFileEditor.update( + ['packages', targetPackage, 'version'], targetVersion.toString()); + } + if (targetVersion == null && + lockFileEditor != null && + !lockFileYaml['packages'].containsKey(targetPackage)) { + dataError( + 'Trying to remove non-existing transitive dependency $targetPackage.', + ); + } + } + + final updatedLockfile = lockFileEditor == null + ? null + : LockFile.parse(lockFileEditor.toString(), cache.sources); + await log.warningsOnlyUnlessTerminal( + () async { + final updatedPubspec = pubspecEditor.toString(); + // Resolve versions, this will update transitive dependencies that were + // not passed in the input. And also counts as a validation of the input + // by ensuring the resolution is valid. + // + // We don't use `acquireDependencies` as that downloads all the archives + // to cache. + // TODO: Handle HTTP exceptions gracefully! + final solveResult = await resolveVersions( + SolveType.get, + cache, + Package.inMemory(Pubspec.parse(updatedPubspec, cache.sources)), + lockFile: updatedLockfile, + ); + if (pubspecEditor.edits.isNotEmpty) { + writeTextFile(entrypoint.pubspecPath, updatedPubspec); + } + // Only if we originally had a lock-file we write the resulting lockfile back. + if (lockFileEditor != null) { + entrypoint.saveLockFile(solveResult); + } + }, + ); + // Dummy message. + log.message(json.encode({'dependencies': []})); + } +} + +class _PackageVersion { + String name; + Version? version; + VersionConstraint? constraint; + _PackageVersion(this.name, this.version, this.constraint); +} + +Map<String, PackageRange>? dependencySetOfPackage( + Pubspec pubspec, PackageId package) { + return pubspec.dependencies.containsKey(package.name) + ? pubspec.dependencies + : pubspec.devDependencies.containsKey(package.name) + ? pubspec.devDependencies + : null; +} + +VersionConstraint _widenConstraint( + VersionConstraint original, Version newVersion) { + if (original.allows(newVersion)) return original; + if (original is VersionRange) { + final min = original.min; + final max = original.max; + if (max != null && newVersion >= max) { + return compatibleWithIfPossible( + VersionRange( + min: min, + includeMin: original.includeMin, + max: newVersion.nextBreaking.firstPreRelease, + ), + ); + } + if (min != null && newVersion <= min) { + return compatibleWithIfPossible( + VersionRange( + min: newVersion, + includeMin: true, + max: max, + includeMax: original.includeMax), + ); + } + } + + if (original.isEmpty) return newVersion; + throw ArgumentError.value( + original, 'original', 'Must be a Version range or empty'); +} + +VersionConstraint compatibleWithIfPossible(VersionRange versionRange) { + final min = versionRange.min; + if (min != null && min.nextBreaking.firstPreRelease == versionRange.max) { + return VersionConstraint.compatibleWith(min); + } + return versionRange; +}
diff --git a/lib/src/command/deps.dart b/lib/src/command/deps.dart index 79463fc..678aafa 100644 --- a/lib/src/command/deps.dart +++ b/lib/src/command/deps.dart
@@ -102,7 +102,7 @@ ? 'dev' : 'transitive')); final source = - entrypoint.packageGraph.lockFile.packages[current]?.source?.name ?? + entrypoint.packageGraph.lockFile.packages[current]?.source.name ?? 'root'; packagesJson.add({ 'name': current,
diff --git a/lib/src/command/global_activate.dart b/lib/src/command/global_activate.dart index a21386a..53353ae 100644 --- a/lib/src/command/global_activate.dart +++ b/lib/src/command/global_activate.dart
@@ -7,7 +7,6 @@ import 'package:pub_semver/pub_semver.dart'; import '../command.dart'; -import '../package_name.dart'; import '../source/hosted.dart'; import '../utils.dart'; @@ -27,6 +26,11 @@ allowed: ['git', 'hosted', 'path'], defaultsTo: 'hosted'); + argParser.addOption('git-path', help: 'Path of git package in repository'); + + argParser.addOption('git-ref', + help: 'Git branch or commit to be retrieved'); + argParser.addMultiOption('features', abbr: 'f', help: 'Feature(s) to enable.', hide: true); @@ -64,18 +68,6 @@ executables = []; } - var features = <String, FeatureDependency>{}; - for (var feature in argResults['features'] ?? []) { - features[feature] = FeatureDependency.required; - } - for (var feature in argResults['omit-features'] ?? []) { - if (features.containsKey(feature)) { - usageException('Cannot both enable and disable $feature.'); - } - - features[feature] = FeatureDependency.unused; - } - final overwrite = argResults['overwrite'] as bool; Uri? hostedUrl; if (argResults.wasParsed('hosted-url')) { @@ -102,13 +94,23 @@ usageException('Unexpected $arguments ${toSentence(unexpected)}.'); } + if (argResults['source'] != 'git' && + (argResults['git-path'] != null || argResults['git-ref'] != null)) { + usageException( + 'Options `--git-path` and `--git-ref` can only be used with --source=git.'); + } + switch (argResults['source']) { case 'git': var repo = readArg('No Git repository given.'); - // TODO(rnystrom): Allow passing in a Git ref too. validateNoExtraArgs(); - return globals.activateGit(repo, executables, - features: features, overwriteBinStubs: overwrite); + return globals.activateGit( + repo, + executables, + overwriteBinStubs: overwrite, + path: argResults['git-path'], + ref: argResults['git-ref'], + ); case 'hosted': var package = readArg('No package to activate given.'); @@ -124,17 +126,15 @@ } validateNoExtraArgs(); - return globals.activateHosted(package, constraint, executables, - features: features, overwriteBinStubs: overwrite, url: hostedUrl); + return globals.activateHosted( + package, + constraint, + executables, + overwriteBinStubs: overwrite, + url: hostedUrl?.toString(), + ); case 'path': - if (features.isNotEmpty) { - // Globally-activated path packages just use the existing lockfile, so - // we can't change the feature selection. - usageException('--features and --omit-features may not be used with ' - 'the path source.'); - } - var path = readArg('No package to activate given.'); validateNoExtraArgs(); return globals.activatePath(
diff --git a/lib/src/command/lish.dart b/lib/src/command/lish.dart index c927e07..a20fbdf 100644 --- a/lib/src/command/lish.dart +++ b/lib/src/command/lish.dart
@@ -34,9 +34,7 @@ bool get takesArguments => false; /// The URL of the server to which to upload the package. - late final Uri server = _createServer(); - - Uri _createServer() { + late final Uri host = () { // An explicit argument takes precedence. if (argResults.wasParsed('server')) { try { @@ -57,8 +55,8 @@ } // Use the default server if nothing else is specified - return cache.sources.hosted.defaultUrl; - } + return Uri.parse(cache.hosted.defaultUrl); + }(); /// Whether the publish is just a preview. bool get dryRun => argResults['dry-run']; @@ -91,7 +89,7 @@ try { await log.progress('Uploading', () async { - var newUri = server.resolve('api/packages/versions/new'); + var newUri = host.resolve('api/packages/versions/new'); var response = await client.get(newUri, headers: pubApiHeaders); var parameters = parseJsonResponse(response); @@ -123,14 +121,14 @@ } on AuthenticationException catch (error) { var msg = ''; if (error.statusCode == 401) { - msg += '$server package repository requested authentication!\n' + msg += '$host package repository requested authentication!\n' 'You can provide credentials using:\n' - ' pub token add $server\n'; + ' pub token add $host\n'; } if (error.statusCode == 403) { - msg += 'Insufficient permissions to the resource at the $server ' + msg += 'Insufficient permissions to the resource at the $host ' 'package repository.\nYou can modify credentials using:\n' - ' pub token add $server\n'; + ' pub token add $host\n'; } if (error.serverMessage != null) { msg += '\n' + error.serverMessage! + '\n'; @@ -143,7 +141,7 @@ // the error. Try to parse that out once we have an easily-accessible // XML parser. fail(log.red('Failed to upload the package.')); - } else if (Uri.parse(url.origin) == Uri.parse(server.origin)) { + } else if (Uri.parse(url.origin) == Uri.parse(host.origin)) { handleJsonError(error.response); } else { rethrow; @@ -155,7 +153,8 @@ try { final officialPubServers = { 'https://pub.dartlang.org', - 'https://pub.dev', + // [validateAndNormalizeHostedUrl] normalizes https://pub.dev to + // https://pub.dartlang.org, so we don't need to do allow that here. // Pub uses oauth2 credentials only for authenticating official pub // servers for security purposes (to not expose pub.dev access token to @@ -165,25 +164,30 @@ // explicitly have to define mock servers as official server to test // publish command with oauth2 credentials. if (runningFromTest && - Platform.environment.containsKey('PUB_HOSTED_URL') && - Platform.environment['_PUB_TEST_AUTH_METHOD'] == 'oauth2') - Platform.environment['PUB_HOSTED_URL'], + Platform.environment.containsKey('_PUB_TEST_DEFAULT_HOSTED_URL')) + Platform.environment['_PUB_TEST_DEFAULT_HOSTED_URL'], }; - if (officialPubServers.contains(server.toString())) { - // Using OAuth2 authentication client for the official pub servers + // Using OAuth2 authentication client for the official pub servers + final isOfficalServer = officialPubServers.contains(host.toString()); + if (isOfficalServer && !cache.tokenStore.hasCredential(host)) { + // Using OAuth2 authentication client for the official pub servers, when + // we don't have an explicit token from [TokenStore] to use instead. + // + // This allows us to use `dart pub token add` to inject a token for use + // with the official servers. await oauth2.withClient(cache, (client) { return _publishUsingClient(packageBytes, client); }); } else { // For third party servers using bearer authentication client - await withAuthenticatedClient(cache, server, (client) { + await withAuthenticatedClient(cache, host, (client) { return _publishUsingClient(packageBytes, client); }); } } on PubHttpException catch (error) { var url = error.response.request!.url; - if (Uri.parse(url.origin) == Uri.parse(server.origin)) { + if (Uri.parse(url.origin) == Uri.parse(host.origin)) { handleJsonError(error.response); } else { rethrow; @@ -214,19 +218,19 @@ } var files = entrypoint.root.listFiles(); - log.fine('Archiving and publishing ${entrypoint.root}.'); + log.fine('Archiving and publishing ${entrypoint.root.name}.'); // Show the package contents so the user can verify they look OK. var package = entrypoint.root; - log.message('Publishing ${package.name} ${package.version} to $server:\n' + log.message('Publishing ${package.name} ${package.version} to $host:\n' '${tree.fromFiles(files, baseDir: entrypoint.root.dir)}'); var packageBytesFuture = createTarGz(files, baseDir: entrypoint.root.dir).toBytes(); // Validate the package. - var isValid = - await _validate(packageBytesFuture.then((bytes) => bytes.length)); + var isValid = await _validate( + packageBytesFuture.then((bytes) => bytes.length), files); if (!isValid) { overrideExitCode(exit_codes.DATA); return; @@ -247,7 +251,7 @@ /// Validates the package. Completes to false if the upload should not /// proceed. - Future<bool> _validate(Future<int> packageSize) async { + Future<bool> _validate(Future<int> packageSize, List<String> files) async { final hints = <String>[]; final warnings = <String>[]; final errors = <String>[]; @@ -255,7 +259,8 @@ await Validator.runAll( entrypoint, packageSize, - server, + host, + files, hints: hints, warnings: warnings, errors: errors,
diff --git a/lib/src/command/list_package_dirs.dart b/lib/src/command/list_package_dirs.dart index 09d248e..c18eae0 100644 --- a/lib/src/command/list_package_dirs.dart +++ b/lib/src/command/list_package_dirs.dart
@@ -45,8 +45,7 @@ // Include the local paths to all locked packages. var packages = mapMap(entrypoint.lockFile.packages, value: (String name, PackageId package) { - var source = entrypoint.cache.source(package.source); - var packageDir = source.getDirectory(package); + var packageDir = cache.getDirectory(package); // Normalize paths and make them absolute for backwards compatibility // with the protocol used by the analyzer. return p.normalize(p.absolute(p.join(packageDir, 'lib')));
diff --git a/lib/src/command/outdated.dart b/lib/src/command/outdated.dart index 609b7ae..a291198 100644 --- a/lib/src/command/outdated.dart +++ b/lib/src/command/outdated.dart
@@ -10,7 +10,6 @@ import 'package:collection/collection.dart' show IterableExtension, IterableNullableExtension; import 'package:path/path.dart' as path; -import 'package:pub_semver/pub_semver.dart'; import '../command.dart'; import '../command_runner.dart'; @@ -181,18 +180,24 @@ PackageId? latest; // If not overridden in current resolution we can use this if (!entrypoint.root.pubspec.dependencyOverrides.containsKey(name)) { - latest ??= await _getLatest(current); + latest ??= await cache.getLatest(current?.toRef(), + version: current?.version, allowPrereleases: prereleases); } // If present as a dependency or dev_dependency we use this - latest ??= await _getLatest(rootPubspec.dependencies[name]); - latest ??= await _getLatest(rootPubspec.devDependencies[name]); + latest ??= await cache.getLatest(rootPubspec.dependencies[name]?.toRef(), + allowPrereleases: prereleases); + latest ??= await cache.getLatest( + rootPubspec.devDependencies[name]?.toRef(), + allowPrereleases: prereleases); // If not overridden and present in either upgradable or resolvable we // use this reference to find the latest if (!upgradablePubspec.dependencyOverrides.containsKey(name)) { - latest ??= await _getLatest(upgradable); + latest ??= await cache.getLatest(upgradable?.toRef(), + version: upgradable?.version, allowPrereleases: prereleases); } if (!resolvablePubspec.dependencyOverrides.containsKey(name)) { - latest ??= await _getLatest(resolvable); + latest ??= await cache.getLatest(resolvable?.toRef(), + version: resolvable?.version, allowPrereleases: prereleases); } // Otherwise, we might simply not have a latest, when a transitive // dependency is overridden the source can depend on which versions we @@ -200,7 +205,9 @@ // allow 3rd party pub servers, but other servers might. Hence, we choose // to fallback to using the overridden source for latest. if (latest == null) { - latest ??= await _getLatest(current ?? upgradable ?? resolvable); + final id = current ?? upgradable ?? resolvable; + latest ??= await cache.getLatest(id?.toRef(), + version: id?.version, allowPrereleases: prereleases); latestIsOverridden = true; } @@ -304,37 +311,6 @@ return argResults['mode'] == 'null-safety'; }(); - /// Get the latest version of [package]. - /// - /// Will include prereleases in the comparison if '--prereleases' was enabled - /// by the arguments. - /// - /// If [package] is a [PackageId] with a prerelease version and there are no - /// later stable version we return a prerelease version if it exists. - /// - /// Returns `null`, if unable to find the package. - Future<PackageId?> _getLatest(PackageName? package) async { - if (package == null) { - return null; - } - final ref = package.toRef(); - final available = await cache.source(ref.source).getVersions(ref); - if (available.isEmpty) { - return null; - } - - // TODO(sigurdm): Refactor this to share logic with report.dart. - available.sort(prereleases - ? (x, y) => x.version.compareTo(y.version) - : (x, y) => Version.prioritize(x.version, y.version)); - if (package is PackageId && - package.version.isPreRelease && - package.version > available.last.version) { - available.sort((x, y) => x.version.compareTo(y.version)); - } - return available.last; - } - /// Retrieves the pubspec of package [name] in [version] from [source]. /// /// Returns `null`, if given `null` as a convinience. @@ -346,7 +322,7 @@ return null; } return _VersionDetails( - await cache.source(id.source).describe(id), + await cache.describe(id), id, isOverridden, ); @@ -380,7 +356,7 @@ if (id == null) { continue; // allow partial resolutions } - final pubspec = await cache.source(id.source).describe(id); + final pubspec = await cache.describe(id); queue.addAll(pubspec.dependencies.keys); } @@ -743,11 +719,8 @@ return Map.fromEntries( await Future.wait( ids.map( - (id) async => MapEntry( - id, - (await id.source!.bind(cache).describe(id)) - .languageVersion - .supportsNullSafety), + (id) async => MapEntry(id, + (await cache.describe(id)).languageVersion.supportsNullSafety), ), ), );
diff --git a/lib/src/command/remove.dart b/lib/src/command/remove.dart index af91d8c..f3b27bd 100644 --- a/lib/src/command/remove.dart +++ b/lib/src/command/remove.dart
@@ -65,7 +65,7 @@ final newPubspec = _removePackagesFromPubspec(rootPubspec, packages); final newRoot = Package.inMemory(newPubspec); - await Entrypoint.global(newRoot, entrypoint.lockFile, cache) + await Entrypoint.inMemory(newRoot, cache, lockFile: entrypoint.lockFile) .acquireDependencies(SolveType.get, precompile: argResults['precompile'], dryRun: true,
diff --git a/lib/src/command/upgrade.dart b/lib/src/command/upgrade.dart index 34e061d..927d849 100644 --- a/lib/src/command/upgrade.dart +++ b/lib/src/command/upgrade.dart
@@ -214,34 +214,39 @@ continue; } - changes[dep] = dep.withConstraint(VersionConstraint.compatibleWith( - resolvedPackage.version, - )); + changes[dep] = dep.toRef().withConstraint( + VersionConstraint.compatibleWith( + resolvedPackage.version, + ), + ); } + final newPubspecText = _updatePubspec(changes); if (_dryRun) { // Even if it is a dry run, run `acquireDependencies` so that the user // gets a report on changes. - // TODO(jonasfj): Stop abusing Entrypoint.global for dry-run output - await Entrypoint.global( - Package.inMemory(resolvablePubspec), - entrypoint.lockFile, + await Entrypoint.inMemory( + Package.inMemory( + Pubspec.parse(newPubspecText, cache.sources), + ), cache, + lockFile: entrypoint.lockFile, solveResult: solveResult, ).acquireDependencies( - SolveType.upgrade, + SolveType.get, dryRun: true, precompile: _precompile, analytics: null, // No analytics for dry-run ); } else { - await _updatePubspec(changes); - + if (changes.isNotEmpty) { + writeTextFile(entrypoint.pubspecPath, newPubspecText); + } // TODO: Allow Entrypoint to be created with in-memory pubspec, so that // we can show the changes when not in --dry-run mode. For now we only show // the changes made to pubspec.yaml in dry-run mode. await Entrypoint(directory, cache).acquireDependencies( - SolveType.upgrade, + SolveType.get, precompile: _precompile, analytics: analytics, ); @@ -310,17 +315,18 @@ continue; } - changes[dep] = dep.withConstraint(constraint); + changes[dep] = dep.toRef().withConstraint(constraint); } + final newPubspecText = _updatePubspec(changes); if (_dryRun) { // Even if it is a dry run, run `acquireDependencies` so that the user // gets a report on changes. // TODO(jonasfj): Stop abusing Entrypoint.global for dry-run output - await Entrypoint.global( - Package.inMemory(nullsafetyPubspec), - entrypoint.lockFile, + await Entrypoint.inMemory( + Package.inMemory(Pubspec.parse(newPubspecText, cache.sources)), cache, + lockFile: entrypoint.lockFile, solveResult: solveResult, ).acquireDependencies( SolveType.upgrade, @@ -329,8 +335,9 @@ analytics: null, ); } else { - await _updatePubspec(changes); - + if (changes.isNotEmpty) { + writeTextFile(entrypoint.pubspecPath, newPubspecText); + } // TODO: Allow Entrypoint to be created with in-memory pubspec, so that // we can show the changes in --dry-run mode. For now we only show // the changes made to pubspec.yaml in dry-run mode. @@ -357,8 +364,7 @@ await Future.wait(directDeps.map((name) async { final resolvedPackage = resolvedPackages[name]!; - final boundSource = resolvedPackage.source!.bind(cache); - final pubspec = await boundSource.describe(resolvedPackage); + final pubspec = await cache.describe(resolvedPackage); if (!pubspec.languageVersion.supportsNullSafety) { nonMigratedDirectDeps.add(name); } @@ -381,13 +387,11 @@ } /// Updates `pubspec.yaml` with given [changes]. - Future<void> _updatePubspec( + String _updatePubspec( Map<PackageRange, PackageRange> changes, - ) async { + ) { ArgumentError.checkNotNull(changes, 'changes'); - if (changes.isEmpty) return; - final yamlEditor = YamlEditor(readTextFile(entrypoint.pubspecPath)); final deps = entrypoint.root.pubspec.dependencies.keys; final devDeps = entrypoint.root.pubspec.devDependencies.keys; @@ -407,9 +411,7 @@ ); } } - - /// Windows line endings are already handled by [yamlEditor] - writeTextFile(entrypoint.pubspecPath, yamlEditor.toString()); + return yamlEditor.toString(); } /// Outputs a summary of changes made to `pubspec.yaml`. @@ -469,24 +471,23 @@ return dep; } - final boundSource = dep.source!.bind(cache); - final packages = await boundSource.getVersions(dep.toRef()); + final packages = await cache.getVersions(dep.toRef()); packages.sort((a, b) => a.version.compareTo(b.version)); for (final package in packages) { - final pubspec = await boundSource.describe(package); + final pubspec = await cache.describe(package); if (pubspec.languageVersion.supportsNullSafety) { hasNullSafetyVersions.add(dep.name); - return dep.withConstraint( - VersionRange(min: package.version, includeMin: true), - ); + return dep.toRef().withConstraint( + VersionRange(min: package.version, includeMin: true), + ); } } hasNoNullSafetyVersions.add(dep.name); // This value is never used. We will throw an exception because //`hasNonNullSafetyVersions` is not empty. - return dep.withConstraint(VersionConstraint.empty); + return dep.toRef().withConstraint(VersionConstraint.empty); })); final deps = _removeUpperConstraints(original.dependencies.values);
diff --git a/lib/src/command/uploader.dart b/lib/src/command/uploader.dart index 99a6a57..cae4963 100644 --- a/lib/src/command/uploader.dart +++ b/lib/src/command/uploader.dart
@@ -6,10 +6,7 @@ import 'dart:io'; import '../command.dart'; -import '../exit_codes.dart' as exit_codes; -import '../http.dart'; -import '../log.dart' as log; -import '../oauth2.dart' as oauth2; +import '../utils.dart'; /// Handles the `uploader` pub command. class UploaderCommand extends PubCommand { @@ -23,6 +20,9 @@ @override String get docUrl => 'https://dart.dev/tools/pub/cmd/pub-uploader'; + @override + bool get hidden => true; + /// The URL of the package hosting server. Uri get server => Uri.parse(argResults['server']); @@ -41,58 +41,18 @@ @override Future<void> runProtected() async { - if (argResults.wasParsed('server')) { - await log.warningsOnlyUnlessTerminal(() { - log.message( - ''' -The --server option is deprecated. Use `publish_to` in your pubspec.yaml or set -the \$PUB_HOSTED_URL environment variable.''', - ); - }); - } - if (argResults.rest.isEmpty) { - log.error('No uploader command given.'); - printUsage(); - overrideExitCode(exit_codes.USAGE); - return; - } - - var rest = argResults.rest.toList(); - - // TODO(rnystrom): Use subcommands for these. - var command = rest.removeAt(0); - if (!['add', 'remove'].contains(command)) { - log.error('Unknown uploader command "$command".'); - printUsage(); - overrideExitCode(exit_codes.USAGE); - return; - } else if (rest.isEmpty) { - log.error('No uploader given for "pub uploader $command".'); - printUsage(); - overrideExitCode(exit_codes.USAGE); - return; - } - - final package = argResults['package'] ?? entrypoint.root.name; - final uploader = rest[0]; + String packageName = '<packageName>'; try { - final response = await oauth2.withClient(cache, (client) { - if (command == 'add') { - var url = server.resolve('/api/packages/' - '${Uri.encodeComponent(package)}/uploaders'); - return client - .post(url, headers: pubApiHeaders, body: {'email': uploader}); - } else { - // command == 'remove' - var url = server.resolve('/api/packages/' - '${Uri.encodeComponent(package)}/uploaders/' - '${Uri.encodeComponent(uploader)}'); - return client.delete(url, headers: pubApiHeaders); - } - }); - handleJsonSuccess(response); - } on PubHttpException catch (error) { - handleJsonError(error.response); + packageName = entrypoint.root.name; + } on Exception catch (_) { + // Probably run without a pubspec. + // Just print error below without a specific package name. } + fail(''' +Package uploaders are no longer managed from the command line. +Manage uploaders from: + +https://pub.dev/packages/$packageName/admin +'''); } }
diff --git a/lib/src/dart.dart b/lib/src/dart.dart index 97219dc..f4f8686 100644 --- a/lib/src/dart.dart +++ b/lib/src/dart.dart
@@ -146,11 +146,14 @@ String toString() => errors.join('\n'); } -/// Precompiles the Dart executable at [executablePath] to a kernel file at -/// [outputPath]. +/// Precompiles the Dart executable at [executablePath]. /// -/// This file is also cached at [incrementalDillOutputPath] which is used to -/// initialize the compiler on future runs. +/// If the compilation succeeds it is saved to a kernel file at [outputPath]. +/// +/// If compilation fails, the output is cached at [incrementalDillOutputPath]. +/// +/// Whichever of [incrementalDillOutputPath] and [outputPath] already exists is +/// used to initialize the compiler run. /// /// The [packageConfigPath] should point at the package config file to be used /// for `package:` uri resolution. @@ -158,39 +161,65 @@ /// The [name] is used to describe the executable in logs and error messages. Future<void> precompile({ required String executablePath, - required String incrementalDillOutputPath, + required String incrementalDillPath, required String name, required String outputPath, required String packageConfigPath, }) async { ensureDir(p.dirname(outputPath)); - ensureDir(p.dirname(incrementalDillOutputPath)); + ensureDir(p.dirname(incrementalDillPath)); + const platformDill = 'lib/_internal/vm_platform_strong.dill'; final sdkRoot = p.relative(p.dirname(p.dirname(Platform.resolvedExecutable))); - var client = await FrontendServerClient.start( - executablePath, - incrementalDillOutputPath, - platformDill, - sdkRoot: sdkRoot, - packagesJson: packageConfigPath, - printIncrementalDependencies: false, - ); + String? tempDir; + FrontendServerClient? client; try { - var result = await client.compile(); + tempDir = createTempDir(p.dirname(incrementalDillPath), 'tmp'); + // To avoid potential races we copy the incremental data to a temporary file + // for just this compilation. + final temporaryIncrementalDill = + p.join(tempDir, '${p.basename(incrementalDillPath)}.incremental.dill'); + try { + if (fileExists(incrementalDillPath)) { + copyFile(incrementalDillPath, temporaryIncrementalDill); + } else if (fileExists(outputPath)) { + copyFile(outputPath, temporaryIncrementalDill); + } + } on FileSystemException { + // Not able to copy existing file, compilation will start from scratch. + } + + client = await FrontendServerClient.start( + executablePath, + temporaryIncrementalDill, + platformDill, + sdkRoot: sdkRoot, + packagesJson: packageConfigPath, + printIncrementalDependencies: false, + ); + final result = await client.compile(); final highlightedName = log.bold(name); if (result?.errorCount == 0) { log.message('Built $highlightedName.'); - await File(incrementalDillOutputPath).copy(outputPath); + // By using rename we ensure atomicity. An external observer will either + // see the old or the new snapshot. + renameFile(temporaryIncrementalDill, outputPath); } else { - // Don't leave partial results. - deleteEntry(outputPath); + // By using rename we ensure atomicity. An external observer will either + // see the old or the new snapshot. + renameFile(temporaryIncrementalDill, incrementalDillPath); + // If compilation failed we don't want to leave an incorrect snapshot. + tryDeleteEntry(outputPath); throw ApplicationException( log.yellow('Failed to build $highlightedName:\n') + (result?.compilerOutputLines.join('\n') ?? '')); } } finally { - client.kill(); + client?.kill(); + if (tempDir != null) { + tryDeleteEntry(tempDir); + } } }
diff --git a/lib/src/entrypoint.dart b/lib/src/entrypoint.dart index b54ec5e..42b3732 100644 --- a/lib/src/entrypoint.dart +++ b/lib/src/entrypoint.dart
@@ -16,7 +16,6 @@ import 'dart.dart' as dart; import 'exceptions.dart'; import 'executable.dart'; -import 'http.dart' as http; import 'io.dart'; import 'language_version.dart'; import 'lock_file.dart'; @@ -73,8 +72,14 @@ /// but may be the entrypoint when you're running its tests. class Entrypoint { /// The root package this entrypoint is associated with. + /// + /// For a global package, this is the activated package. final Package root; + /// For a global package, this is the directory that the package is installed + /// in. Non-global packages have null. + final String? globalDir; + /// The system-wide cache which caches packages that need to be fetched over /// the network. final SystemCache cache; @@ -83,7 +88,8 @@ bool get isCached => !root.isInMemory && p.isWithin(cache.rootDir, root.dir); /// Whether this is an entrypoint for a globally-activated package. - final bool isGlobal; + // final bool isGlobal; + bool get isGlobal => globalDir != null; /// The lockfile for the entrypoint. /// @@ -123,8 +129,7 @@ /// /// Global packages (except those from path source) /// store these in the global cache. - String? get _configRoot => - isCached ? p.join(cache.rootDir, 'global_packages', root.name) : root.dir; + String? get _configRoot => isCached ? globalDir : root.dir; /// The path to the entrypoint's ".packages" file. /// @@ -140,6 +145,14 @@ /// The path to the entrypoint package's pubspec. String get pubspecPath => p.normalize(root.path('pubspec.yaml')); + /// Whether the entrypoint package contains a `pubspec_overrides.yaml` file. + bool get hasPubspecOverrides => + !root.isInMemory && fileExists(pubspecOverridesPath); + + /// The path to the entrypoint package's pubspec overrides file. + String get pubspecOverridesPath => + p.normalize(root.path('pubspec_overrides.yaml')); + /// The path to the entrypoint package's lockfile. String get lockFilePath => p.normalize(p.join(_configRoot!, 'pubspec.lock')); @@ -153,11 +166,7 @@ /// but the configuration is stored at the package itself. String get cachePath { if (isGlobal) { - return p.join( - cache.rootDir, - 'global_packages', - root.name, - ); + return globalDir!; } else { var newPath = root.path('.dart_tool/pub'); var oldPath = root.path('.pub'); @@ -174,15 +183,26 @@ String get _incrementalDillsPath => p.join(cachePath, 'incremental'); /// Loads the entrypoint from a package at [rootDir]. - Entrypoint(String rootDir, this.cache) - : root = Package.load(null, rootDir, cache.sources), - isGlobal = false; + Entrypoint( + String rootDir, + this.cache, + ) : root = Package.load(null, rootDir, cache.sources, + withPubspecOverrides: true), + globalDir = null; + + Entrypoint.inMemory(this.root, this.cache, + {required LockFile? lockFile, SolveResult? solveResult}) + : _lockFile = lockFile, + globalDir = null { + if (solveResult != null) { + _packageGraph = PackageGraph.fromSolveResult(this, solveResult); + } + } /// Creates an entrypoint given package and lockfile objects. /// If a SolveResult is already created it can be passed as an optimization. - Entrypoint.global(this.root, this._lockFile, this.cache, - {SolveResult? solveResult}) - : isGlobal = true { + Entrypoint.global(this.globalDir, this.root, this._lockFile, this.cache, + {SolveResult? solveResult}) { if (solveResult != null) { _packageGraph = PackageGraph.fromSolveResult(this, solveResult); } @@ -203,18 +223,20 @@ /// Writes .packages and .dart_tool/package_config.json Future<void> writePackagesFiles() async { + final entrypointName = isGlobal ? null : root.name; writeTextFile( packagesFile, lockFile.packagesFile(cache, - entrypoint: root.name, relativeFrom: root.dir)); + entrypoint: entrypointName, + relativeFrom: isGlobal ? null : root.dir)); ensureDir(p.dirname(packageConfigFile)); writeTextFile( packageConfigFile, await lockFile.packageConfigFile(cache, - entrypoint: root.name, + entrypoint: entrypointName, entrypointSdkConstraint: root.pubspec.sdkConstraints[sdk.identifier], - relativeFrom: root.dir)); + relativeFrom: isGlobal ? null : root.dir)); } /// Gets all dependencies of the [root] package. @@ -247,6 +269,11 @@ required PubAnalytics? analytics, bool onlyReportSuccessOrFailure = false, }) async { + if (!onlyReportSuccessOrFailure && hasPubspecOverrides) { + log.warning( + 'Warning: pubspec.yaml has overrides from $pubspecOverridesPath'); + } + final suffix = root.isInMemory || root.dir == '.' ? '' : ' in ${root.dir}'; SolveResult result; try { @@ -294,8 +321,8 @@ await result.showReport(type, cache); } if (!dryRun) { - await Future.wait(result.packages.map(_get)); - _saveLockFile(result); + await result.downloadCachedPackages(cache); + saveLockFile(result); } if (onlyReportSuccessOrFailure) { log.message('Got dependencies$suffix.'); @@ -387,10 +414,11 @@ Future<void> _precompileExecutable(Executable executable) async { final package = executable.package; + await dart.precompile( executablePath: resolveExecutable(executable), outputPath: pathOfExecutable(executable), - incrementalDillOutputPath: incrementalDillPathOfExecutable(executable), + incrementalDillPath: incrementalDillPathOfExecutable(executable), packageConfigPath: packageConfigFile, name: '$package:${p.basenameWithoutExtension(executable.relativePath)}'); @@ -470,21 +498,6 @@ } } - /// Makes sure the package at [id] is locally available. - /// - /// This automatically downloads the package to the system-wide cache as well - /// if it requires network access to retrieve (specifically, if the package's - /// source is a [CachedSource]). - Future<void> _get(PackageId id) async { - return await http.withDependencyType(root.dependencyType(id.name), - () async { - if (id.isRoot) return; - - var source = cache.source(id.source); - if (source is CachedSource) await source.downloadToSystemCache(id); - }); - } - /// Throws a [DataError] if the `.dart_tool/package_config.json` file doesn't /// exist or if it's out-of-date relative to the lockfile or the pubspec. /// @@ -514,11 +527,22 @@ var pubspecModified = File(pubspecPath).lastModifiedSync(); var lockFileModified = File(lockFilePath).lastModifiedSync(); + var pubspecChanged = lockFileModified.isBefore(pubspecModified); + var pubspecOverridesChanged = false; + + if (hasPubspecOverrides) { + var pubspecOverridesModified = + File(pubspecOverridesPath).lastModifiedSync(); + pubspecOverridesChanged = + lockFileModified.isBefore(pubspecOverridesModified); + } + var touchedLockFile = false; - if (lockFileModified.isBefore(pubspecModified) || hasPathDependencies) { - // If `pubspec.lock` is newer than `pubspec.yaml` or we have path - // dependencies, then we check that `pubspec.lock` is a correct solution - // for the requirements in `pubspec.yaml`. This aims to: + if (pubspecChanged || pubspecOverridesChanged || hasPathDependencies) { + // If `pubspec.lock` is older than `pubspec.yaml` or + // `pubspec_overrides.yaml`, or we have path dependencies, then we check + // that `pubspec.lock` is a correct solution for the requirements in + // `pubspec.yaml` and `pubspec_overrides.yaml`. This aims to: // * Prevent missing packages when `pubspec.lock` is checked into git. // * Mitigate missing transitive dependencies when the `pubspec.yaml` in // a path dependency is changed. @@ -527,7 +551,8 @@ touchedLockFile = true; touch(lockFilePath); } else { - dataError('The $pubspecPath file has changed since the $lockFilePath ' + var filePath = pubspecChanged ? pubspecPath : pubspecOverridesPath; + dataError('The $filePath file has changed since the $lockFilePath ' 'file was generated, please run "$topLevelProgram pub get" again.'); } } @@ -545,11 +570,11 @@ var packageConfigModified = File(packageConfigFile).lastModifiedSync(); if (packageConfigModified.isBefore(lockFileModified) || hasPathDependencies) { - // If `package_config.json` is newer than `pubspec.lock` or we have + // If `package_config.json` is older than `pubspec.lock` or we have // path dependencies, then we check that `package_config.json` is a // correct configuration on the local machine. This aims to: // * Mitigate issues when copying a folder from one machine to another. - // * Force `pub get` if a path dependency has changed language verison. + // * Force `pub get` if a path dependency has changed language version. _checkPackageConfigUpToDate(); touch(packageConfigFile); } else if (touchedLockFile) { @@ -592,7 +617,7 @@ // Check that uncached dependencies' pubspecs are also still satisfied, // since they're mutable and may have changed since the last get. for (var id in lockFile.packages.values) { - var source = cache.source(id.source); + final source = id.source; if (source is CachedSource) continue; try { @@ -605,7 +630,7 @@ } final relativePubspecPath = - p.join(source.getDirectory(id, relativeFrom: '.'), 'pubspec.yaml'); + p.join(cache.getDirectory(id, relativeFrom: '.'), 'pubspec.yaml'); dataError('$relativePubspecPath has ' 'changed since the $lockFilePath file was generated, please run ' '"$topLevelProgram pub get" again.'); @@ -629,11 +654,11 @@ // We only care about cached sources. Uncached sources aren't "installed". // If one of those is missing, we want to show the user the file not // found error later since installing won't accomplish anything. - var source = cache.source(package.source); + var source = package.source; if (source is! CachedSource) return true; // Get the directory. - var dir = source.getDirectory(package, relativeFrom: '.'); + var dir = cache.getDirectory(package, relativeFrom: '.'); // See if the directory is there and looks like a package. return fileExists(p.join(dir, 'pubspec.yaml')); }); @@ -672,9 +697,9 @@ return false; } - final source = cache.source(lockFileId.source); + final source = lockFileId.source; final lockFilePackagePath = root.path( - source.getDirectory(lockFileId, relativeFrom: root.dir), + cache.getDirectory(lockFileId, relativeFrom: root.dir), ); // Make sure that the packagePath agrees with the lock file about the @@ -811,7 +836,7 @@ // If a package is cached, then it's universally immutable and we need // not check if the language version is correct. - final source = cache.source(id.source); + final source = id.source; if (source is CachedSource) { continue; } @@ -824,7 +849,9 @@ ); if (pkg.languageVersion != languageVersion) { final relativePubspecPath = p.join( - source.getDirectory(id, relativeFrom: '.'), 'pubspec.yaml'); + cache.getDirectory(id, relativeFrom: '.'), + 'pubspec.yaml', + ); dataError('$relativePubspecPath has ' 'changed since the $lockFilePath file was generated, please run ' '"$topLevelProgram pub get" again.'); @@ -840,7 +867,7 @@ /// /// Will use Windows line endings (`\r\n`) if a `pubspec.lock` exists, and /// uses that. - void _saveLockFile(SolveResult result) { + void saveLockFile(SolveResult result) { _lockFile = result.lockFile; final windowsLineEndings = fileExists(lockFilePath) &&
diff --git a/lib/src/feature.dart b/lib/src/feature.dart deleted file mode 100644 index 94134ab..0000000 --- a/lib/src/feature.dart +++ /dev/null
@@ -1,70 +0,0 @@ -// Copyright (c) 2017, the Dart project authors. Please see the AUTHORS file -// for details. All rights reserved. Use of this source code is governed by a -// BSD-style license that can be found in the LICENSE file. - -import 'package:collection/collection.dart'; -import 'package:pub_semver/pub_semver.dart'; - -import 'package_name.dart'; - -/// A feature declared by a package. -/// -/// Features are collections of optional dependencies. Dependers can choose -/// which features to require from packages they depend on. -class Feature { - /// The name of this feature. - final String name; - - /// Whether this feature is enabled by default. - final bool onByDefault; - - /// The additional dependencies added by this feature. - final List<PackageRange> dependencies; - - /// Other features that this feature requires. - final List<String> requires; - - /// A map from SDK identifiers to this feature's constraints on those SDKs. - final Map<String, VersionConstraint> sdkConstraints; - - /// Returns the set of features in [features] that are enabled by - /// [dependencies]. - static Set<Feature> featuresEnabledBy(Map<String, Feature> features, - Map<String, FeatureDependency> dependencies) { - if (features.isEmpty) return const UnmodifiableSetView.empty(); - - // [enableFeature] adds a feature to [features], along with any other - // features it requires. - var enabledFeatures = <Feature>{}; - void enableFeature(Feature feature) { - if (!enabledFeatures.add(feature)) return; - for (var require in feature.requires) { - enableFeature(features[require]!); - } - } - - // Enable all features that are explicitly enabled by dependencies, or on by - // default and not disabled by dependencies. - for (var feature in features.values) { - if (dependencies[feature.name]?.isEnabled ?? feature.onByDefault) { - enableFeature(feature); - } - } - - return enabledFeatures; - } - - Feature(this.name, Iterable<PackageRange> dependencies, - {Iterable<String>? requires, - Map<String, VersionConstraint>? sdkConstraints, - this.onByDefault = true}) - : dependencies = UnmodifiableListView(dependencies.toList()), - requires = requires == null - ? const [] - : UnmodifiableListView(requires.toList()), - sdkConstraints = UnmodifiableMapView( - sdkConstraints ?? {'dart': VersionConstraint.any}); - - @override - String toString() => name; -}
diff --git a/lib/src/git.dart b/lib/src/git.dart index 5fa7536..ded45e9 100644 --- a/lib/src/git.dart +++ b/lib/src/git.dart
@@ -5,8 +5,11 @@ /// Helper functionality for invoking Git. import 'dart:async'; +import 'package:collection/collection.dart'; import 'package:path/path.dart' as p; +import 'package:pub_semver/pub_semver.dart'; +import 'command_runner.dart'; import 'exceptions.dart'; import 'io.dart'; import 'log.dart' as log; @@ -86,24 +89,9 @@ return result.stdout; } -/// Returns the name of the git command-line app, or `null` if Git could not be -/// found on the user's PATH. -String? get command { - if (_commandCache != null) return _commandCache; - - if (_tryGitCommand('git')) { - _commandCache = 'git'; - } else if (_tryGitCommand('git.cmd')) { - _commandCache = 'git.cmd'; - } else { - return null; - } - - log.fine('Determined git command $_commandCache.'); - return _commandCache; -} - -String? _commandCache; +/// The name of the git command-line app, or `null` if Git could not be found on +/// the user's PATH. +final String? command = ['git', 'git.cmd'].firstWhereOrNull(_tryGitCommand); /// Returns the root of the git repo [dir] belongs to. Returns `null` if not /// in a git repo or git is not installed. @@ -121,13 +109,35 @@ return null; } +/// '--recourse-submodules' was introduced in Git 2.14 +/// (https://git-scm.com/book/en/v2/Git-Tools-Submodules). +final _minSupportedGitVersion = Version(2, 14, 0); + /// Checks whether [command] is the Git command for this computer. bool _tryGitCommand(String command) { // If "git --version" prints something familiar, git is working. try { var result = runProcessSync(command, ['--version']); - var regexp = RegExp('^git version'); - return result.stdout.length == 1 && regexp.hasMatch(result.stdout.single); + + if (result.stdout.length != 1) return false; + final output = result.stdout.single; + final match = RegExp(r'^git version (\d+)\.(\d+)\.').matchAsPrefix(output); + + if (match == null) return false; + // Git seems to use many parts in the version number. We just check the + // first two. + final major = int.parse(match[1]!); + final minor = int.parse(match[2]!); + if (Version(major, minor, 0) < _minSupportedGitVersion) { + // We just warn here, as some features might work with older versions of + // git. + log.warning(''' +You have a very old version of git (version ${output.substring('git version '.length)}), +for $topLevelProgram it is recommended to use git version 2.14 or newer. +'''); + } + log.fine('Determined git command $command.'); + return true; } on RunProcessException catch (err) { // If the process failed, they probably don't have it. log.error('Git command is not "$command": $err');
diff --git a/lib/src/global_packages.dart b/lib/src/global_packages.dart index a868b4b..cc0bfdf 100644 --- a/lib/src/global_packages.dart +++ b/lib/src/global_packages.dart
@@ -12,7 +12,6 @@ import 'entrypoint.dart'; import 'exceptions.dart'; import 'executable.dart' as exec; -import 'http.dart' as http; import 'io.dart'; import 'lock_file.dart'; import 'log.dart' as log; @@ -62,6 +61,8 @@ /// The directory where the lockfiles for activated packages are stored. String get _directory => p.join(cache.rootDir, 'global_packages'); + String _packageDir(String packageName) => p.join(_directory, packageName); + /// The directory where binstubs for global package executables are stored. String get _binStubDir => p.join(cache.rootDir, 'bin'); @@ -84,27 +85,37 @@ /// If [overwriteBinStubs] is `true`, any binstubs that collide with /// existing binstubs in other packages will be overwritten by this one's. /// Otherwise, the previous ones will be preserved. - Future<void> activateGit(String repo, List<String>? executables, - {Map<String, FeatureDependency>? features, - required bool overwriteBinStubs}) async { - var name = await cache.git.getPackageNameFromRepo(repo); + Future<void> activateGit( + String repo, + List<String>? executables, { + required bool overwriteBinStubs, + String? path, + String? ref, + }) async { + var name = await cache.git.getPackageNameFromRepo(repo, cache); // TODO(nweiz): Add some special handling for git repos that contain path // dependencies. Their executables shouldn't be cached, and there should // be a mechanism for redoing dependency resolution if a path pubspec has // changed (see also issue 20499). - PackageRef ref; + PackageRef packageRef; try { - ref = cache.git.source.parseRef(name, {'url': repo}, containingPath: '.'); + packageRef = cache.git.parseRef( + name, + { + 'url': repo, + if (path != null) 'path': path, + if (ref != null) 'ref': ref, + }, + containingDir: '.'); } on FormatException catch (e) { throw ApplicationException(e.message); } await _installInCache( - ref - .withConstraint(VersionConstraint.any) - .withFeatures(features ?? const {}), - executables, - overwriteBinStubs: overwriteBinStubs); + packageRef.withConstraint(VersionConstraint.any), + executables, + overwriteBinStubs: overwriteBinStubs, + ); } /// Finds the latest version of the hosted package with [name] that matches @@ -121,15 +132,14 @@ /// [url] is an optional custom pub server URL. If not null, the package to be /// activated will be fetched from this URL instead of the default pub URL. Future<void> activateHosted( - String name, VersionConstraint constraint, List<String>? executables, - {Map<String, FeatureDependency>? features, - required bool overwriteBinStubs, - Uri? url}) async { + String name, + VersionConstraint constraint, + List<String>? executables, { + required bool overwriteBinStubs, + String? url, + }) async { await _installInCache( - cache.hosted.source - .refFor(name, url: url) - .withConstraint(constraint) - .withFeatures(features ?? const {}), + cache.hosted.refFor(name, url: url).withConstraint(constraint), executables, overwriteBinStubs: overwriteBinStubs); } @@ -151,26 +161,24 @@ // Get the package's dependencies. await entrypoint.acquireDependencies(SolveType.get, analytics: analytics); var name = entrypoint.root.name; - - try { - var originalLockFile = - LockFile.load(_getLockFilePath(name), cache.sources); - // Call this just to log what the current active package is, if any. - _describeActive(originalLockFile, name); - } on IOException { - // Couldn't read the lock file. It probably doesn't exist. - } + _describeActive(name, cache); // Write a lockfile that points to the local package. var fullPath = canonicalize(entrypoint.root.dir); - var id = cache.path.source.idFor(name, entrypoint.root.version, fullPath); + var id = cache.path.idFor( + name, + entrypoint.root.version, + fullPath, + p.current, + ); + final tempDir = cache.createTempDir(); // TODO(rnystrom): Look in "bin" and display list of binaries that // user can run. - _writeLockFile(name, LockFile([id])); + _writeLockFile(tempDir, LockFile([id])); - var binDir = p.join(_directory, name, 'bin'); - if (dirExists(binDir)) deleteEntry(binDir); + tryDeleteEntry(_packageDir(name)); + tryRenameDir(tempDir, _packageDir(name)); _updateBinStubs(entrypoint, entrypoint.root, executables, overwriteBinStubs: overwriteBinStubs); @@ -178,17 +186,12 @@ } /// Installs the package [dep] and its dependencies into the system cache. + /// + /// If [silent] less logging will be printed. Future<void> _installInCache(PackageRange dep, List<String>? executables, - {required bool overwriteBinStubs}) async { - LockFile? originalLockFile; - try { - originalLockFile = - LockFile.load(_getLockFilePath(dep.name), cache.sources); - // Call this just to log what the current active package is, if any. - _describeActive(originalLockFile, dep.name); - } on IOException { - // Couldn't read the lock file. It probably doesn't exist. - } + {required bool overwriteBinStubs, bool silent = false}) async { + final name = dep.name; + LockFile? originalLockFile = _describeActive(name, cache); // Create a dummy package with just [dep] so we can do resolution on it. var root = Package.inMemory(Pubspec('pub global activate', @@ -200,118 +203,104 @@ // being available, report that as a [dataError]. SolveResult result; try { - result = await log.progress('Resolving dependencies', - () => resolveVersions(SolveType.get, cache, root)); + result = await log.spinner( + 'Resolving dependencies', + () => resolveVersions(SolveType.get, cache, root), + condition: !silent, + ); } on SolveFailure catch (error) { for (var incompatibility in error.incompatibility.externalIncompatibilities) { if (incompatibility.cause != IncompatibilityCause.noVersions) continue; - if (incompatibility.terms.single.package.name != dep.name) continue; + if (incompatibility.terms.single.package.name != name) continue; dataError(error.toString()); } rethrow; } + // We want the entrypoint to be rooted at 'dep' not the dummy-package. + result.packages.removeWhere((id) => id.name == 'pub global activate'); final sameVersions = originalLockFile != null && originalLockFile.samePackageIds(result.lockFile); + final PackageId id = result.lockFile.packages[name]!; if (sameVersions) { log.message(''' -The package ${dep.name} is already activated at newest available version. -To recompile executables, first run `$topLevelProgram pub global deactivate ${dep.name}`. +The package $name is already activated at newest available version. +To recompile executables, first run `$topLevelProgram pub global deactivate $name`. '''); } else { - await result.showReport(SolveType.get, cache); + // Only precompile binaries if we have a new resolution. + if (!silent) await result.showReport(SolveType.get, cache); + + await result.downloadCachedPackages(cache); + + final lockFile = result.lockFile; + final tempDir = cache.createTempDir(); + _writeLockFile(tempDir, lockFile); + + // Load the package graph from [result] so we don't need to re-parse all + // the pubspecs. + final entrypoint = Entrypoint.global( + tempDir, + cache.loadCached(id), + lockFile, + cache, + solveResult: result, + ); + + await entrypoint.writePackagesFiles(); + + await entrypoint.precompileExecutables(); + + tryDeleteEntry(_packageDir(name)); + tryRenameDir(tempDir, _packageDir(name)); } - // Make sure all of the dependencies are locally installed. - await Future.wait(result.packages.map((id) { - return http.withDependencyType(root.dependencyType(id.name), () async { - if (id.isRoot) return; - - var source = cache.source(id.source); - if (source is CachedSource) await source.downloadToSystemCache(id); - }); - })); - - var lockFile = result.lockFile; - _writeLockFile(dep.name, lockFile); - await _writePackageConfigFiles(dep.name, lockFile); - - // We want the entrypoint to be rooted at 'dep' not the dummy-package. - result.packages.removeWhere((id) => id.name == 'pub global activate'); - - var id = lockFile.packages[dep.name]!; - // Load the package graph from [result] so we don't need to re-parse all - // the pubspecs. final entrypoint = Entrypoint.global( - Package( - result.pubspecs[dep.name]!, - (cache.source(dep.source) as CachedSource).getDirectoryInCache(id), - ), - lockFile, + _packageDir(id.name), + cache.loadCached(id), + result.lockFile, cache, solveResult: result, ); - if (!sameVersions) { - // Only precompile binaries if we have a new resolution. - await entrypoint.precompileExecutables(); - } - _updateBinStubs( entrypoint, cache.load(entrypoint.lockFile.packages[dep.name]!), executables, overwriteBinStubs: overwriteBinStubs, ); - - log.message('Activated ${_formatPackage(id)}.'); - } - - Future<void> _writePackageConfigFiles( - String package, LockFile lockFile) async { - // TODO(sigurdm): Use [Entrypoint.writePackagesFiles] instead. - final packagesFilePath = _getPackagesFilePath(package); - final packageConfigFilePath = _getPackageConfigFilePath(package); - final dir = p.dirname(packagesFilePath); - writeTextFile( - packagesFilePath, lockFile.packagesFile(cache, relativeFrom: dir)); - ensureDir(p.dirname(packageConfigFilePath)); - writeTextFile(packageConfigFilePath, - await lockFile.packageConfigFile(cache, relativeFrom: dir)); + if (!silent) log.message('Activated ${_formatPackage(id)}.'); } /// Finishes activating package [package] by saving [lockFile] in the cache. - void _writeLockFile(String package, LockFile lockFile) { - ensureDir(p.join(_directory, package)); - - // TODO(nweiz): This cleans up Dart 1.6's old lockfile location. Remove it - // when Dart 1.6 is old enough that we don't think anyone will have these - // lockfiles anymore (issue 20703). - var oldPath = p.join(_directory, '$package.lock'); - if (fileExists(oldPath)) deleteEntry(oldPath); - - writeTextFile(_getLockFilePath(package), - lockFile.serialize(p.join(_directory, package))); + void _writeLockFile(String dir, LockFile lockFile) { + writeTextFile(p.join(dir, 'pubspec.lock'), lockFile.serialize(null)); } /// Shows the user the currently active package with [name], if any. - void _describeActive(LockFile lockFile, String? name) { + LockFile? _describeActive(String name, SystemCache cache) { + late final LockFile lockFile; + try { + lockFile = LockFile.load(_getLockFilePath(name), cache.sources); + } on IOException { + // Couldn't read the lock file. It probably doesn't exist. + return null; + } var id = lockFile.packages[name]!; + final description = id.description.description; - var source = id.source; - if (source is GitSource) { - var url = source.urlFromDescription(id.description); + if (description is GitDescription) { log.message('Package ${log.bold(name)} is currently active from Git ' - 'repository "$url".'); - } else if (source is PathSource) { - var path = source.pathFromDescription(id.description); + 'repository "${p.prettyUri(description.url)}".'); + } else if (description is PathDescription) { log.message('Package ${log.bold(name)} is currently active at path ' - '"$path".'); + '"${description.path}".'); } else { log.message('Package ${log.bold(name)} is currently active at version ' '${log.bold(id.version)}.'); } + return lockFile; } /// Deactivates a previously-activated package named [name]. @@ -341,22 +330,8 @@ try { lockFile = LockFile.load(lockFilePath, cache.sources); } on IOException { - var oldLockFilePath = p.join(_directory, '$name.lock'); - try { - // TODO(nweiz): This looks for Dart 1.6's old lockfile location. - // Remove it when Dart 1.6 is old enough that we don't think anyone - // will have these lockfiles anymore (issue 20703). - lockFile = LockFile.load(oldLockFilePath, cache.sources); - } on IOException { - // If we couldn't read the lock file, it's not activated. - dataError('No active package ${log.bold(name)}.'); - } - - // Move the old lockfile to its new location. - ensureDir(p.dirname(lockFilePath)); - File(oldLockFilePath).renameSync(lockFilePath); - // Just make sure these files are created as well. - await _writePackageConfigFiles(name, lockFile); + // If we couldn't read the lock file, it's not activated. + dataError('No active package ${log.bold(name)}.'); } // Remove the package itself from the lockfile. We put it in there so we @@ -365,17 +340,17 @@ var id = lockFile.packages[name]!; lockFile = lockFile.removePackage(name); - var source = cache.source(id.source); Entrypoint entrypoint; - if (source is CachedSource) { + if (id.source is CachedSource) { // For cached sources, the package itself is in the cache and the // lockfile is the one we just loaded. - entrypoint = Entrypoint.global(cache.loadCached(id), lockFile, cache); + entrypoint = Entrypoint.global( + _packageDir(id.name), cache.loadCached(id), lockFile, cache); } else { // For uncached sources (i.e. path), the ID just points to the real // directory for the package. entrypoint = Entrypoint( - (id.source as PathSource).pathFromDescription(id.description), cache); + (id.description.description as PathDescription).path, cache); } entrypoint.root.pubspec.sdkConstraints.forEach((sdkName, constraint) { @@ -446,16 +421,6 @@ String _getLockFilePath(String name) => p.join(_directory, name, 'pubspec.lock'); - /// Gets the path to the .packages file for an activated cached package with - /// [name]. - String _getPackagesFilePath(String name) => - p.join(_directory, name, '.packages'); - - /// Gets the path to the `package_config.json` file for an - /// activated cached package with [name]. - String _getPackageConfigFilePath(String name) => - p.join(_directory, name, '.dart_tool', 'package_config.json'); - /// Shows the user a formatted list of globally activated packages. void listActivePackages() { if (!dirExists(_directory)) return; @@ -487,12 +452,12 @@ /// Returns formatted string representing the package [id]. String _formatPackage(PackageId id) { - var source = id.source; - if (source is GitSource) { - var url = source.urlFromDescription(id.description); + final description = id.description.description; + if (description is GitDescription) { + var url = p.prettyUri(description.url); return '${log.bold(id.name)} ${id.version} from Git repository "$url"'; - } else if (source is PathSource) { - var path = source.pathFromDescription(id.description); + } else if (description is PathDescription) { + var path = description.path; return '${log.bold(id.name)} ${id.version} at path "$path"'; } else { return '${log.bold(id.name)} ${id.version}'; @@ -542,17 +507,24 @@ log.message('Reactivating ${log.bold(id.name)} ${id.version}...'); var entrypoint = await find(id.name); + final packageExecutables = executables.remove(id.name) ?? []; - await _writePackageConfigFiles(id.name, entrypoint.lockFile); - await entrypoint.precompileExecutables(); - var packageExecutables = executables.remove(id.name) ?? []; - _updateBinStubs( - entrypoint, - cache.load(id), - packageExecutables, - overwriteBinStubs: true, - suggestIfNotOnPath: false, - ); + if (entrypoint.isCached) { + deleteEntry(entrypoint.globalDir!); + await _installInCache( + id.toRange(), + packageExecutables, + overwriteBinStubs: true, + silent: true, + ); + } else { + await activatePath( + entrypoint.root.dir, + packageExecutables, + overwriteBinStubs: true, + analytics: null, + ); + } successes.add(id.name); } catch (error, stackTrace) { var message = 'Failed to reactivate ' @@ -706,10 +678,7 @@ // Show errors for any missing scripts. // TODO(rnystrom): This can print false positives since a script may be // produced by a transformer. Do something better. - var binFiles = package - .listFiles(beneath: 'bin', recursive: false) - .map(package.relative) - .toList(); + var binFiles = package.executablePaths; for (var executable in installed) { var script = package.pubspec.executables[executable]; var scriptPath = p.join('bin', '$script.dart'); @@ -761,6 +730,7 @@ // If the script was built to a snapshot, just try to invoke that // directly and skip pub global run entirely. String invocation; + late String binstub; if (Platform.isWindows) { if (fileExists(snapshot)) { // We expect absolute paths from the precompiler since relative ones @@ -786,7 +756,7 @@ } else { invocation = 'dart pub global run ${package.name}:$script %*'; } - var batch = ''' + binstub = ''' @echo off rem This file was created by pub v${sdk.version}. rem Package: ${package.name} @@ -795,7 +765,6 @@ rem Script: $script $invocation '''; - writeTextFile(binStubPath, batch); } else { if (fileExists(snapshot)) { // We expect absolute paths from the precompiler since relative ones @@ -818,7 +787,7 @@ } else { invocation = 'dart pub global run ${package.name}:$script "\$@"'; } - var bash = ''' + binstub = ''' #!/usr/bin/env sh # This file was created by pub v${sdk.version}. # Package: ${package.name} @@ -827,25 +796,31 @@ # Script: $script $invocation '''; + } - // Write this as the system encoding since the system is going to execute - // it and it might contain non-ASCII characters in the pathnames. - writeTextFile(binStubPath, bash, encoding: const SystemEncoding()); + // Write the binstub to a temporary location, make it executable and move + // it into place afterwards to avoid races. + final tempDir = cache.createTempDir(); + try { + final tmpPath = p.join(tempDir, binStubPath); - // Make it executable. - var result = Process.runSync('chmod', ['+x', binStubPath]); - if (result.exitCode != 0) { - // Couldn't make it executable so don't leave it laying around. - try { - deleteEntry(binStubPath); - } on IOException catch (err) { - // Do nothing. We're going to fail below anyway. - log.fine('Could not delete binstub:\n$err'); + // Write this as the system encoding since the system is going to + // execute it and it might contain non-ASCII characters in the + // pathnames. + writeTextFile(tmpPath, binstub, encoding: const SystemEncoding()); + + if (Platform.isLinux || Platform.isMacOS) { + // Make it executable. + var result = Process.runSync('chmod', ['+x', tmpPath]); + if (result.exitCode != 0) { + // Couldn't make it executable so don't leave it laying around. + fail('Could not make "$tmpPath" executable (exit code ' + '${result.exitCode}):\n${result.stderr}'); } - - fail('Could not make "$binStubPath" executable (exit code ' - '${result.exitCode}):\n${result.stderr}'); } + File(tmpPath).renameSync(binStubPath); + } finally { + deleteEntry(tempDir); } return previousPackage;
diff --git a/lib/src/ignore.dart b/lib/src/ignore.dart index ff22dff..34e51fb 100644 --- a/lib/src/ignore.dart +++ b/lib/src/ignore.dart
@@ -25,7 +25,6 @@ /// [Ignore.listFiles]. /// /// [1]: https://git-scm.com/docs/gitignore - import 'package:meta/meta.dart'; /// A set of ignore rules representing a single ignore file. @@ -148,7 +147,8 @@ path.endsWith('/') ? path.substring(0, path.length - 1) : path; return listFiles( beneath: pathWithoutSlash, - includeDirs: true, // because we are listing below pathWithoutSlash + includeDirs: true, + // because we are listing below pathWithoutSlash listDir: (dir) { // List the next part of path: if (dir == pathWithoutSlash) return []; @@ -285,8 +285,10 @@ } if (currentIsDir) { final ignore = ignoreForDir(normalizedCurrent); - ignoreStack - .add(ignore == null ? null : _IgnorePrefixPair(ignore, current)); + ignoreStack.add(ignore == null + ? null + : _IgnorePrefixPair( + ignore, current == '/' ? current : '$current/')); // Put all entities in current on the stack to be processed. toVisit.add(listDir(normalizedCurrent).map((x) => '/$x').toList()); if (includeDirs) { @@ -309,13 +311,16 @@ // An invalid pattern is also considered empty. bool get empty => rule == null; + bool get valid => exception == null; // For invalid patterns this contains a description of the problem. final FormatException? exception; _IgnoreParseResult(this.pattern, this.rule) : exception = null; + _IgnoreParseResult.invalid(this.pattern, this.exception) : rule = null; + _IgnoreParseResult.empty(this.pattern) : rule = null, exception = null; @@ -540,7 +545,9 @@ class _IgnorePrefixPair { final Ignore ignore; final String prefix; + _IgnorePrefixPair(this.ignore, this.prefix); + @override String toString() { return '{${ignore._rules.map((r) => r.original)} $prefix}';
diff --git a/lib/src/io.dart b/lib/src/io.dart index 6d0bf83..2faf85f 100644 --- a/lib/src/io.dart +++ b/lib/src/io.dart
@@ -383,7 +383,7 @@ } // ERROR_DIR_NOT_EMPTY - if (!ignoreEmptyDir && isDirectoryNotEmptyException(error)) { + if (!ignoreEmptyDir && _isDirectoryNotEmptyException(error)) { return 'of dart-lang/sdk#25353'; } @@ -457,7 +457,35 @@ }, ignoreEmptyDir: true); } -bool isDirectoryNotEmptyException(FileSystemException e) { +/// Renames directory [from] to [to]. +/// If it fails with "destination not empty" we log and continue, assuming +/// another process got there before us. +void tryRenameDir(String from, String to) { + ensureDir(path.dirname(to)); + try { + renameDir(from, to); + } on FileSystemException catch (e) { + tryDeleteEntry(from); + if (!_isDirectoryNotEmptyException(e)) { + rethrow; + } + log.fine(''' +Destination directory $to already existed. +Assuming a concurrent pub invocation installed it.'''); + } +} + +void copyFile(String from, String to) { + log.io('Copying "$from" to "$to".'); + File(from).copySync(to); +} + +void renameFile(String from, String to) { + log.io('Renaming "$from" to "$to".'); + File(from).renameSync(to); +} + +bool _isDirectoryNotEmptyException(FileSystemException e) { final errorCode = e.osError?.errorCode; return // On Linux rename will fail with ENOTEMPTY if directory exists: @@ -1052,3 +1080,13 @@ return null; } }(); + +/// Escape [x] for users to copy-paste in bash. +/// +/// If x is alphanumeric we leave it as is. +/// +/// Otherwise, wrap with single quotation, and use '\'' to insert single quote. +String escapeShellArgument(String x) => + RegExp(r'^[a-zA-Z0-9-_=@.]+$').stringMatch(x) == null + ? "'${x.replaceAll(r'\', '\\').replaceAll("'", r"'\''")}'" + : x;
diff --git a/lib/src/language_version.dart b/lib/src/language_version.dart index ad331b2..46254d0 100644 --- a/lib/src/language_version.dart +++ b/lib/src/language_version.dart
@@ -110,4 +110,6 @@ /// [LanguageVersion.parse]. @override String toString() => '$major.$minor'; + + Version firstStable() => Version(major, minor, 0); }
diff --git a/lib/src/lock_file.dart b/lib/src/lock_file.dart index e2723c7..0efc3af 100644 --- a/lib/src/lock_file.dart +++ b/lib/src/lock_file.dart
@@ -16,7 +16,6 @@ import 'package_name.dart'; import 'packages_file.dart' as packages_file; import 'sdk.dart' show sdk; -import 'source_registry.dart'; import 'system_cache.dart'; import 'utils.dart'; @@ -142,11 +141,11 @@ var description = spec['description']; // Let the source parse the description. - var source = sources[sourceName]!; + var source = sources(sourceName); PackageId id; try { id = source.parseId(name, version, description, - containingPath: filePath); + containingDir: filePath == null ? null : p.dirname(filePath)); } on FormatException catch (ex) { throw SourceSpanFormatException( ex.message, spec.nodes['description'].span); @@ -218,7 +217,7 @@ String packagesFile( SystemCache cache, { String? entrypoint, - required String relativeFrom, + String? relativeFrom, }) { var header = ''' This file is deprecated. Tools should instead consume @@ -231,9 +230,12 @@ var map = Map<String, Uri>.fromIterable(ordered<String>(packages.keys), value: (name) { var id = packages[name]!; - var source = cache.source(id.source); return p.toUri( - p.join(source.getDirectory(id, relativeFrom: relativeFrom), 'lib')); + p.join( + cache.getDirectory(id, relativeFrom: relativeFrom), + 'lib', + ), + ); }); if (entrypoint != null) map[entrypoint] = Uri.parse('lib/'); @@ -256,13 +258,12 @@ SystemCache cache, { String? entrypoint, VersionConstraint? entrypointSdkConstraint, - required String relativeFrom, + String? relativeFrom, }) async { final entries = <PackageConfigEntry>[]; for (final name in ordered(packages.keys)) { final id = packages[name]!; - final source = cache.source(id.source); - final rootPath = source.getDirectory(id, relativeFrom: relativeFrom); + final rootPath = cache.getDirectory(id, relativeFrom: relativeFrom); Uri rootUri; if (p.isRelative(rootPath)) { // Relative paths are relative to the root project, we want them @@ -271,7 +272,7 @@ } else { rootUri = p.toUri(rootPath); } - final pubspec = await source.describe(id); + final pubspec = await cache.describe(id); final sdkConstraint = pubspec.sdkConstraints[sdk.identifier]; entries.add(PackageConfigEntry( name: name, @@ -306,17 +307,18 @@ /// Returns the serialized YAML text of the lock file. /// /// [packageDir] is the containing directory of the root package, used to - /// properly serialize package descriptions. - String serialize(String packageDir) { + /// serialize relative path package descriptions. If it is null, they will be + /// serialized as absolute. + String serialize(String? packageDir) { // Convert the dependencies to a simple object. var packageMap = {}; packages.forEach((name, package) { var description = - package.source!.serializeDescription(packageDir, package.description); + package.description.serializeForLockfile(containingDir: packageDir); packageMap[name] = { 'version': package.version.toString(), - 'source': package.source!.name, + 'source': package.source.name, 'description': description, 'dependency': _dependencyType(package.name) };
diff --git a/lib/src/null_safety_analysis.dart b/lib/src/null_safety_analysis.dart index 5b882a8..fe89b55 100644 --- a/lib/src/null_safety_analysis.dart +++ b/lib/src/null_safety_analysis.dart
@@ -20,7 +20,6 @@ import 'solver.dart'; import 'source.dart'; import 'source/cached.dart'; -import 'source/path.dart'; import 'system_cache.dart'; enum NullSafetyCompliance { @@ -71,28 +70,29 @@ /// /// If [packageId] is a relative path dependency [containingPath] must be /// provided with an absolute path to resolve it against. - Future<NullSafetyAnalysisResult> nullSafetyCompliance(PackageId packageId, - {String? containingPath}) async { + Future<NullSafetyAnalysisResult> nullSafetyCompliance( + PackageId packageId, + ) async { + final description = packageId.description.description; + final rootPubspec = await _systemCache.describe(packageId); + // A space in the name prevents clashes with other package names. final fakeRootName = '${packageId.name} importer'; final fakeRoot = Package.inMemory(Pubspec(fakeRootName, fields: { 'dependencies': { packageId.name: { - packageId.source!.name: packageId.source is PathSource - ? (packageId.description['relative'] - ? path.join( - containingPath!, packageId.description['path']) - : packageId.description['path']) - : packageId.description, + packageId.source.name: description.serializeForPubspec( + containingDir: null, + languageVersion: + LanguageVersion.firstVersionWithShorterHostedSyntax), 'version': packageId.version.toString(), } } }, - sources: _systemCache.sources)); + sources: _systemCache.sources, + sdkConstraints: {'dart': rootPubspec.sdkConstraints['dart']!})); - final rootPubspec = - await packageId.source!.bind(_systemCache).describe(packageId); final rootLanguageVersion = rootPubspec.languageVersion; if (!rootLanguageVersion.supportsNullSafety) { final span = @@ -119,8 +119,10 @@ } return nullSafetyComplianceOfPackages( result.packages.where((id) => id.name != fakeRootName), - Package(rootPubspec, - packageId.source!.bind(_systemCache).getDirectory(packageId)), + Package( + rootPubspec, + _systemCache.getDirectory(packageId), + ), ); } @@ -144,15 +146,15 @@ final packageInternalAnalysis = await _packageInternallyGoodCache.putIfAbsent(dependencyId, () async { Pubspec pubspec; - BoundSource? boundSource; + Source? source; String packageDir; - if (dependencyId.source == null) { + if (dependencyId.isRoot) { pubspec = rootPackage.pubspec; packageDir = rootPackage.dir; } else { - boundSource = _systemCache.source(dependencyId.source); - pubspec = await boundSource.describe(dependencyId); - packageDir = boundSource.getDirectory(dependencyId); + source = dependencyId.source; + pubspec = await _systemCache.describe(dependencyId); + packageDir = _systemCache.getDirectory(dependencyId); } if (!pubspec.languageVersion.supportsNullSafety) { @@ -167,9 +169,9 @@ ); } - if (boundSource is CachedSource) { + if (source is CachedSource) { // TODO(sigurdm): Consider using withDependencyType here. - await boundSource.downloadToSystemCache(dependencyId); + await source.downloadToSystemCache(dependencyId, _systemCache); } final libDir =
diff --git a/lib/src/package.dart b/lib/src/package.dart index 0bc6f89..4d90311 100644 --- a/lib/src/package.dart +++ b/lib/src/package.dart
@@ -4,7 +4,6 @@ import 'dart:io'; -import 'package:collection/collection.dart' show IterableExtension; import 'package:path/path.dart' as p; import 'package:pub_semver/pub_semver.dart'; @@ -15,12 +14,9 @@ import 'log.dart' as log; import 'package_name.dart'; import 'pubspec.dart'; -import 'source_registry.dart'; +import 'system_cache.dart'; import 'utils.dart'; -final _readmeRegexp = RegExp(r'^README($|\.)', caseSensitive: false); -final _changelogRegexp = RegExp(r'^CHANGELOG($|\.)', caseSensitive: false); - /// A named, versioned, unit of code and resource reuse. class Package { /// Compares [a] and [b] orders them by name then version number. @@ -68,7 +64,8 @@ /// The immediate dev dependencies this package specifies in its pubspec. Map<String, PackageRange> get devDependencies => pubspec.devDependencies; - /// The dependency overrides this package specifies in its pubspec. + /// The dependency overrides this package specifies in its pubspec or pubspec + /// overrides. Map<String, PackageRange> get dependencyOverrides => pubspec.dependencyOverrides; @@ -83,10 +80,12 @@ ..addAll(dependencyOverrides); } - /// Returns a list of asset ids for all Dart executables in this package's bin + /// Returns a list of paths to all Dart executables in this package's bin /// directory. List<String> get executablePaths { - return ordered(listFiles(beneath: 'bin', recursive: false)) + final binDir = p.join(dir, 'bin'); + if (!dirExists(binDir)) return <String>[]; + return ordered(listDir(p.join(dir, 'bin'), includeDirs: false)) .where((executable) => p.extension(executable) == '.dart') .map((executable) => p.relative(executable, from: dir)) .toList(); @@ -95,34 +94,6 @@ List<String> get executableNames => executablePaths.map(p.basenameWithoutExtension).toList(); - /// Returns the path to the README file at the root of the entrypoint, or null - /// if no README file is found. - /// - /// If multiple READMEs are found, this uses the same conventions as - /// pub.dartlang.org for choosing the primary one: the README with the fewest - /// extensions that is lexically ordered first is chosen. - String? get readmePath { - var readmes = listFiles(recursive: false) - .map(p.basename) - .where((entry) => entry.contains(_readmeRegexp)); - if (readmes.isEmpty) return null; - - return p.join(dir, readmes.reduce((readme1, readme2) { - var extensions1 = '.'.allMatches(readme1).length; - var extensions2 = '.'.allMatches(readme2).length; - var comparison = extensions1.compareTo(extensions2); - if (comparison == 0) comparison = readme1.compareTo(readme2); - return (comparison <= 0) ? readme1 : readme2; - })); - } - - /// Returns the path to the CHANGELOG file at the root of the entrypoint, or - /// null if no CHANGELOG file is found. - String? get changelogPath { - return listFiles(recursive: false).firstWhereOrNull( - (entry) => p.basename(entry).contains(_changelogRegexp)); - } - /// Returns whether or not this package is in a Git repo. late final bool inGitRepo = computeInGitRepoCache(); @@ -145,8 +116,24 @@ /// [name] is the expected name of that package (e.g. the name given in the /// dependency), or `null` if the package being loaded is the entrypoint /// package. - Package.load(String? name, String this._dir, SourceRegistry sources) - : pubspec = Pubspec.load(_dir, sources, expectedName: name); + /// + /// `pubspec_overrides.yaml` is only loaded if [withPubspecOverrides] is + /// `true`. + factory Package.load( + String? name, + String dir, + SourceRegistry sources, { + bool withPubspecOverrides = false, + }) { + final pubspec = Pubspec.load(dir, sources, + expectedName: name, allowOverridesFile: withPubspecOverrides); + return Package._(dir, pubspec); + } + + Package._( + this._dir, + this.pubspec, + ); /// Constructs a package with the given pubspec. /// @@ -220,8 +207,11 @@ /// /// For each directory a .pubignore takes precedence over a .gitignore. /// - /// Note that the returned paths won't always be beneath [dir]. To safely - /// convert them to paths relative to the package root, use [relative]. + /// Note that the returned paths will be always be below [dir], and will + /// always start with [dir] (thus alway be relative to current working + /// directory or absolute id [dir] is absolute. + /// + /// To convert them to paths relative to the package root, use [p.relative]. List<String> listFiles({String? beneath, bool recursive = true}) { // An in-memory package has no files. if (isInMemory) return [];
diff --git a/lib/src/package_graph.dart b/lib/src/package_graph.dart index 45351d7..cbd91ec 100644 --- a/lib/src/package_graph.dart +++ b/lib/src/package_graph.dart
@@ -40,14 +40,15 @@ /// the packages' pubspecs are already fully-parsed. factory PackageGraph.fromSolveResult( Entrypoint entrypoint, SolveResult result) { - var packages = Map<String, Package>.fromIterable(result.packages, - key: (id) => id.name, - value: (id) { - if (id.name == entrypoint.root.name) return entrypoint.root; - - return Package(result.pubspecs[id.name]!, - entrypoint.cache.source(id.source).getDirectory(id)); - }); + final packages = { + for (final id in result.packages) + id.name: id.name == entrypoint.root.name + ? entrypoint.root + : Package( + result.pubspecs[id.name]!, + entrypoint.cache.getDirectory(id), + ) + }; return PackageGraph(entrypoint, result.lockFile, packages); } @@ -84,7 +85,7 @@ return entrypoint.isCached; } else { var id = lockFile.packages[package]!; - return entrypoint.cache.source(id.source) is CachedSource; + return id.source is CachedSource; } }
diff --git a/lib/src/package_name.dart b/lib/src/package_name.dart index d83f466..58f12dd 100644 --- a/lib/src/package_name.dart +++ b/lib/src/package_name.dart
@@ -2,93 +2,32 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -import 'package:collection/collection.dart'; import 'package:pub_semver/pub_semver.dart'; import 'package.dart'; import 'source.dart'; import 'source/hosted.dart'; -import 'utils.dart'; - -/// The equality to use when comparing the feature sets of two package names. -const _featureEquality = MapEquality<String, FeatureDependency>(); - -/// The base class of [PackageRef], [PackageId], and [PackageRange]. -abstract class PackageName { - /// The name of the package being identified. - final String name; - - /// The [Source] used to look up this package. - /// - /// If this is a root package, this will be `null`. - final Source? source; - - /// The metadata used by the package's [source] to identify and locate it. - /// - /// It contains whatever [Source]-specific data it needs to be able to get - /// the package. For example, the description of a git sourced package might - /// by the URL "git://github.com/dart/uilib.git". - final dynamic description; - - /// Whether this package is the root package. - bool get isRoot => source == null; - - PackageName._(this.name, this.source, this.description); - - /// Returns a [PackageRef] with this one's [name], [source], and - /// [description]. - PackageRef toRef() => PackageRef(name, source, description); - - /// Returns a [PackageRange] for this package with the given version constraint. - PackageRange withConstraint(VersionConstraint constraint) => - PackageRange(name, source, constraint, description); - - /// Returns whether this refers to the same package as [other]. - /// - /// This doesn't compare any constraint information; it's equivalent to - /// `this.toRef() == other.toRef()`. - bool samePackage(PackageName other) { - if (other.name != name) return false; - var thisSource = source; - if (thisSource == null) return other.source == null; - - return other.source == thisSource && - thisSource.descriptionsEqual(description, other.description); - } - - @override - bool operator ==(Object other) => - throw UnimplementedError('Subclass should implement =='); - - @override - int get hashCode { - var thisSource = source; - if (thisSource == null) return name.hashCode; - return name.hashCode ^ - thisSource.hashCode ^ - thisSource.hashDescription(description); - } - - /// Returns a string representation of this package name. - /// - /// If [detail] is passed, it controls exactly which details are included. - @override - String toString([PackageDetail? detail]); -} +import 'source/root.dart'; +import 'system_cache.dart'; /// A reference to a [Package], but not any particular version(s) of it. -class PackageRef extends PackageName { - /// Creates a reference to a package with the given [name], [source], and +/// +/// It knows the [name] of a package and a [Description] that is connected +/// with a certain [Source]. This is what you need for listing available +/// versions of a package. See [SystemCache.getVersions]. +class PackageRef { + final String name; + final Description description; + bool get isRoot => description is RootDescription; + Source get source => description.source; + + /// Creates a reference to a package with the given [name], and /// [description]. - /// - /// Since an ID's description is an implementation detail of its source, this - /// should generally not be called outside of [Source] subclasses. A reference - /// can be obtained from a user-supplied description using [Source.parseRef]. - PackageRef(String name, Source? source, description) - : super._(name, source, description); + PackageRef(this.name, this.description); /// Creates a reference to the given root package. - PackageRef.root(Package package) : super._(package.name, null, package.name); + static PackageRef root(Package package) => + PackageRef(package.name, RootDescription(package)); @override String toString([PackageDetail? detail]) { @@ -96,62 +35,73 @@ if (isRoot) return name; var buffer = StringBuffer(name); - if (detail.showSource ?? source is! HostedSource) { - buffer.write(' from $source'); + if (detail.showSource ?? description is! HostedDescription) { + buffer.write(' from ${description.source}'); if (detail.showDescription) { - buffer.write(' ${source!.formatDescription(description)}'); + buffer.write(' ${description.format()}'); } } return buffer.toString(); } - @override - bool operator ==(other) => other is PackageRef && samePackage(other); + PackageRange withConstraint(VersionConstraint constraint) => + PackageRange(this, constraint); @override - int get hashCode => super.hashCode ^ 'PackageRef'.hashCode; + bool operator ==(other) => + other is PackageRef && + name == other.name && + description == other.description; + + @override + int get hashCode => Object.hash(name, description); } /// A reference to a specific version of a package. /// -/// A package ID contains enough information to correctly get the package. +/// A package ID contains enough information to correctly retrieve the package. /// /// It's possible for multiple distinct package IDs to point to different /// packages that have identical contents. For example, the same package may be /// available from multiple sources. As far as Pub is concerned, those packages /// are different. /// -/// Note that a package ID's [description] field has a different structure than -/// the [PackageRef.description] or [PackageRange.description] fields for some -/// sources. For example, the `git` source adds revision information to the -/// description to ensure that the same ID always points to the same source. -class PackageId extends PackageName { - /// The package's version. +/// Note that a package ID's [description] field is a [ResolvedDescription] +/// while [PackageRef.description] and [PackageRange.description] are +/// [Description]s. +class PackageId { + final String name; final Version version; + final ResolvedDescription description; + bool get isRoot => description is ResolvedRootDescription; + Source get source => description.description.source; /// Creates an ID for a package with the given [name], [source], [version], /// and [description]. /// /// Since an ID's description is an implementation detail of its source, this /// should generally not be called outside of [Source] subclasses. - PackageId(String name, Source? source, this.version, description) - : super._(name, source, description); + PackageId(this.name, this.version, this.description); /// Creates an ID for the given root package. - PackageId.root(Package package) - : version = package.version, - super._(package.name, null, package.name); + static PackageId root(Package package) => PackageId(package.name, + package.version, ResolvedRootDescription(RootDescription(package))); @override - int get hashCode => super.hashCode ^ version.hashCode; + int get hashCode => Object.hash(name, version, description); @override bool operator ==(other) => - other is PackageId && samePackage(other) && other.version == version; + other is PackageId && + name == other.name && + version == other.version && + description == other.description; /// Returns a [PackageRange] that allows only [version] of this package. - PackageRange toRange() => withConstraint(version); + PackageRange toRange() => PackageRange(toRef(), version); + + PackageRef toRef() => PackageRef(name, description.description); @override String toString([PackageDetail? detail]) { @@ -160,10 +110,11 @@ var buffer = StringBuffer(name); if (detail.showVersion ?? !isRoot) buffer.write(' $version'); - if (!isRoot && (detail.showSource ?? source is! HostedSource)) { - buffer.write(' from $source'); + if (!isRoot && + (detail.showSource ?? description is! ResolvedHostedDescription)) { + buffer.write(' from ${description.description.source}'); if (detail.showDescription) { - buffer.write(' ${source!.formatDescription(description)}'); + buffer.write(' ${description.format()}'); } } @@ -172,57 +123,31 @@ } /// A reference to a constrained range of versions of one package. -class PackageRange extends PackageName { +/// +/// This is represented as a [PackageRef] and a [VersionConstraint]. +class PackageRange { + final PackageRef _ref; + /// The allowed package versions. final VersionConstraint constraint; - /// The dependencies declared on features of the target package. - final Map<String, FeatureDependency> features; + String get name => _ref.name; + Description get description => _ref.description; + bool get isRoot => _ref.isRoot; + Source get source => _ref.source; /// Creates a reference to package with the given [name], [source], /// [constraint], and [description]. /// /// Since an ID's description is an implementation detail of its source, this /// should generally not be called outside of [Source] subclasses. - PackageRange(String name, Source? source, this.constraint, description, - {Map<String, FeatureDependency>? features}) - : features = features == null - ? const {} - : UnmodifiableMapView(Map.from(features)), - super._(name, source, description); + PackageRange(this._ref, this.constraint); /// Creates a range that selects the root package. - PackageRange.root(Package package) - : constraint = package.version, - features = const {}, - super._(package.name, null, package.name); + static PackageRange root(Package package) => + PackageRange(PackageRef.root(package), package.version); - /// Returns a description of [features], or the empty string if [features] is - /// empty. - String get featureDescription { - if (features.isEmpty) return ''; - - var enabledFeatures = <String>[]; - var disabledFeatures = <String>[]; - features.forEach((name, type) { - if (type == FeatureDependency.unused) { - disabledFeatures.add(name); - } else { - enabledFeatures.add(name); - } - }); - - var description = ''; - if (enabledFeatures.isNotEmpty) { - description += 'with ${toSentence(enabledFeatures)}'; - if (disabledFeatures.isNotEmpty) description += ', '; - } - - if (disabledFeatures.isNotEmpty) { - description += 'without ${toSentence(disabledFeatures)}'; - } - return description; - } + PackageRef toRef() => _ref; @override String toString([PackageDetail? detail]) { @@ -233,17 +158,12 @@ buffer.write(' $constraint'); } - if (!isRoot && (detail.showSource ?? source is! HostedSource)) { - buffer.write(' from $source'); + if (!isRoot && (detail.showSource ?? description is! HostedDescription)) { + buffer.write(' from ${description.source.name}'); if (detail.showDescription) { - buffer.write(' ${source!.formatDescription(description)}'); + buffer.write(' ${description.format()}'); } } - - if (detail.showFeatures && features.isNotEmpty) { - buffer.write(' $featureDescription'); - } - return buffer.toString(); } @@ -251,14 +171,7 @@ bool get _showVersionConstraint { if (isRoot) return false; if (!constraint.isAny) return true; - return source!.hasMultipleVersions; - } - - /// Returns a new [PackageRange] with [features] merged with [this.features]. - PackageRange withFeatures(Map<String, FeatureDependency> features) { - if (features.isEmpty) return this; - return PackageRange(name, source, constraint, description, - features: Map.from(this.features)..addAll(features)); + return description.source.hasMultipleVersions; } /// Returns a copy of [this] with the same semantics, but with a `^`-style @@ -273,7 +186,7 @@ var min = range.min; if (min == null) return this; if (range.max == min.nextBreaking.firstPreRelease) { - return withConstraint(VersionConstraint.compatibleWith(min)); + return PackageRange(_ref, VersionConstraint.compatibleWith(min)); } else { return this; } @@ -283,42 +196,19 @@ /// /// Specifically, whether [id] refers to the same package as [this] *and* /// [constraint] allows `id.version`. - bool allows(PackageId id) => samePackage(id) && constraint.allows(id.version); + bool allows(PackageId id) => + name == id.name && + description == id.description.description && + constraint.allows(id.version); @override - int get hashCode => - super.hashCode ^ constraint.hashCode ^ _featureEquality.hash(features); + int get hashCode => Object.hash(_ref, constraint); @override bool operator ==(other) => other is PackageRange && - samePackage(other) && - other.constraint == constraint && - _featureEquality.equals(other.features, features); -} - -/// An enum of types of dependencies on a [Feature]. -class FeatureDependency { - /// The feature must exist and be enabled for this dependency to be satisfied. - static const required = FeatureDependency._('required'); - - /// The feature must be enabled if it exists, but is not required to exist for - /// this dependency to be satisfied. - static const ifAvailable = FeatureDependency._('if available'); - - /// The feature is neither required to exist nor to be enabled for this - /// feature to be satisfied. - static const unused = FeatureDependency._('unused'); - - final String _name; - - /// Whether this type of dependency enables the feature it depends on. - bool get isEnabled => this != unused; - - const FeatureDependency._(this._name); - - @override - String toString() => _name; + _ref == other._ref && + other.constraint == constraint; } /// An enum of different levels of detail that can be used when displaying a @@ -345,25 +235,15 @@ /// This defaults to `false`. final bool showDescription; - /// Whether to show the package features. - /// - /// This defaults to `true`. - final bool showFeatures; - const PackageDetail( - {this.showVersion, - bool? showSource, - bool? showDescription, - bool? showFeatures}) + {this.showVersion, bool? showSource, bool? showDescription}) : showSource = showDescription == true ? true : showSource, - showDescription = showDescription ?? false, - showFeatures = showFeatures ?? true; + showDescription = showDescription ?? false; /// Returns a [PackageDetail] with the maximum amount of detail between [this] /// and [other]. PackageDetail max(PackageDetail other) => PackageDetail( showVersion: showVersion! || other.showVersion!, showSource: showSource! || other.showSource!, - showDescription: showDescription || other.showDescription, - showFeatures: showFeatures || other.showFeatures); + showDescription: showDescription || other.showDescription); }
diff --git a/lib/src/pubspec.dart b/lib/src/pubspec.dart index cc75b2d..e18fbc6 100644 --- a/lib/src/pubspec.dart +++ b/lib/src/pubspec.dart
@@ -11,14 +11,13 @@ import 'package:yaml/yaml.dart'; import 'exceptions.dart'; -import 'feature.dart'; import 'io.dart'; import 'language_version.dart'; import 'log.dart'; import 'package_name.dart'; import 'pubspec_parse.dart'; import 'sdk.dart'; -import 'source_registry.dart'; +import 'system_cache.dart'; import 'utils.dart'; export 'pubspec_parse.dart' hide PubspecBase; @@ -69,12 +68,27 @@ // initialization can throw a [PubspecException], that error should also be // exposed through [allErrors]. + /// The fields of [pubspecOverridesFilename]. `null` if no such file exists or has + /// to be considered. + final YamlMap? _overridesFileFields; + + String? get _packageName => fields['name'] != null ? name : null; + + /// The name of the manifest file. + static const pubspecYamlFilename = 'pubspec.yaml'; + + /// The filename of the pubspec overrides file. + /// + /// This file can contain dependency_overrides that override those in + /// pubspec.yaml. + static const pubspecOverridesFilename = 'pubspec_overrides.yaml'; + /// The registry of sources to use when parsing [dependencies] and /// [devDependencies]. /// /// This will be null if this was created using [new Pubspec] or [new /// Pubspec.empty]. - final SourceRegistry? _sources; + final SourceRegistry _sources; /// The location from which the pubspec was loaded. /// @@ -83,14 +97,27 @@ Uri? get _location => fields.span.sourceUrl; /// The additional packages this package depends on. - Map<String, PackageRange> get dependencies => _dependencies ??= - _parseDependencies('dependencies', fields.nodes['dependencies']); + Map<String, PackageRange> get dependencies => + _dependencies ??= _parseDependencies( + 'dependencies', + fields.nodes['dependencies'], + _sources, + languageVersion, + _packageName, + _location); Map<String, PackageRange>? _dependencies; /// The packages this package depends on when it is the root package. - Map<String, PackageRange> get devDependencies => _devDependencies ??= - _parseDependencies('dev_dependencies', fields.nodes['dev_dependencies']); + Map<String, PackageRange> get devDependencies => + _devDependencies ??= _parseDependencies( + 'dev_dependencies', + fields.nodes['dev_dependencies'], + _sources, + languageVersion, + _packageName, + _location, + ); Map<String, PackageRange>? _devDependencies; @@ -99,58 +126,44 @@ /// /// Dependencies here will replace any dependency on a package with the same /// name anywhere in the dependency graph. - Map<String, PackageRange> get dependencyOverrides => - _dependencyOverrides ??= _parseDependencies( - 'dependency_overrides', fields.nodes['dependency_overrides']); + /// + /// These can occur both in the pubspec.yaml file and the [pubspecOverridesFilename]. + Map<String, PackageRange> get dependencyOverrides { + if (_dependencyOverrides != null) return _dependencyOverrides!; + final pubspecOverridesFields = _overridesFileFields; + if (pubspecOverridesFields != null) { + pubspecOverridesFields.nodes.forEach((key, _) { + if (!const {'dependency_overrides'}.contains(key.value)) { + throw PubspecException( + 'pubspec_overrides.yaml only supports the `dependency_overrides` field.', + key.span, + ); + } + }); + if (pubspecOverridesFields.containsKey('dependency_overrides')) { + _dependencyOverrides = _parseDependencies( + 'dependency_overrides', + pubspecOverridesFields.nodes['dependency_overrides'], + _sources, + languageVersion, + _packageName, + _location, + fileType: _FileType.pubspecOverrides, + ); + } + } + return _dependencyOverrides ??= _parseDependencies( + 'dependency_overrides', + fields.nodes['dependency_overrides'], + _sources, + languageVersion, + _packageName, + _location, + ); + } Map<String, PackageRange>? _dependencyOverrides; - late final Map<String, Feature> features = _computeFeatures(); - - Map<String, Feature> _computeFeatures() { - final features = fields['features']; - if (features == null) { - return const {}; - } - - if (features is! YamlMap) { - _error('"features" field must be a map.', fields.nodes['features']!.span); - } - - return mapMap(features.nodes, - key: (dynamic nameNode, dynamic _) => _validateFeatureName(nameNode), - value: (dynamic nameNode, dynamic specNode) { - if (specNode.value == null) { - return Feature(nameNode.value, const []); - } - - if (specNode is! YamlMap) { - _error('A feature specification must be a map.', specNode.span); - } - - var onByDefault = specNode['default'] ?? true; - if (onByDefault is! bool) { - _error('Default must be true or false.', - specNode.nodes['default']!.span); - } - - var requires = _parseStringList(specNode.nodes['requires'], - validate: (name, span) { - if (!features.containsKey(name)) _error('Undefined feature.', span); - }); - - var dependencies = _parseDependencies( - 'dependencies', specNode.nodes['dependencies']); - - var sdkConstraints = _parseEnvironment(specNode); - - return Feature(nameNode.value, dependencies.values, - requires: requires, - sdkConstraints: sdkConstraints, - onByDefault: onByDefault); - }); - } - /// A map from SDK identifiers to constraints on those SDK versions. Map<String, VersionConstraint> get sdkConstraints { _ensureEnvironment(); @@ -250,7 +263,8 @@ } var constraints = { - 'dart': _parseVersionConstraint(yaml.nodes['sdk'], + 'dart': _parseVersionConstraint( + yaml.nodes['sdk'], _packageName, _FileType.pubspec, defaultUpperBoundConstraint: _includeDefaultSdkConstraint ? _defaultUpperBoundSdkConstraint : null) @@ -263,10 +277,11 @@ } if (name.value == 'sdk') return; - constraints[name.value as String] = _parseVersionConstraint(constraint, - // Flutter constraints get special treatment, as Flutter won't be - // using semantic versioning to mark breaking releases. - ignoreUpperBound: name.value == 'flutter'); + constraints[name.value as String] = + _parseVersionConstraint(constraint, _packageName, _FileType.pubspec, + // Flutter constraints get special treatment, as Flutter won't be + // using semantic versioning to mark breaking releases. + ignoreUpperBound: name.value == 'flutter'); }); return constraints; @@ -280,10 +295,14 @@ /// /// If [expectedName] is passed and the pubspec doesn't have a matching name /// field, this will throw a [PubspecException]. + /// + /// If [allowOverridesFile] is `true` [pubspecOverridesFilename] is loaded and + /// is allowed to override dependency_overrides from `pubspec.yaml`. factory Pubspec.load(String packageDir, SourceRegistry sources, - {String? expectedName}) { - var pubspecPath = path.join(packageDir, 'pubspec.yaml'); - var pubspecUri = path.toUri(pubspecPath); + {String? expectedName, bool allowOverridesFile = false}) { + var pubspecPath = path.join(packageDir, pubspecYamlFilename); + var overridesPath = path.join(packageDir, pubspecOverridesFilename); + if (!fileExists(pubspecPath)) { throw FileException( // Make the package dir absolute because for the entrypoint it'll just @@ -292,20 +311,31 @@ '"${canonicalize(packageDir)}".', pubspecPath); } + String? overridesFileContents = + allowOverridesFile && fileExists(overridesPath) + ? readTextFile(overridesPath) + : null; - return Pubspec.parse(readTextFile(pubspecPath), sources, - expectedName: expectedName, location: pubspecUri); + return Pubspec.parse( + readTextFile(pubspecPath), + sources, + expectedName: expectedName, + location: path.toUri(pubspecPath), + overridesFileContents: overridesFileContents, + overridesLocation: path.toUri(overridesPath), + ); } - Pubspec(String name, - {Version? version, - Iterable<PackageRange>? dependencies, - Iterable<PackageRange>? devDependencies, - Iterable<PackageRange>? dependencyOverrides, - Map? fields, - SourceRegistry? sources, - Map<String, VersionConstraint>? sdkConstraints}) - : _dependencies = dependencies == null + Pubspec( + String name, { + Version? version, + Iterable<PackageRange>? dependencies, + Iterable<PackageRange>? devDependencies, + Iterable<PackageRange>? dependencyOverrides, + Map? fields, + SourceRegistry? sources, + Map<String, VersionConstraint>? sdkConstraints, + }) : _dependencies = dependencies == null ? null : Map.fromIterable(dependencies, key: (range) => range.name), _devDependencies = devDependencies == null @@ -317,22 +347,14 @@ _sdkConstraints = sdkConstraints ?? UnmodifiableMapView({'dart': VersionConstraint.any}), _includeDefaultSdkConstraint = false, - _sources = sources, + _sources = sources ?? + ((String? name) => throw StateError('No source registry given')), + _overridesFileFields = null, super( fields == null ? YamlMap() : YamlMap.wrap(fields), name: name, version: version, ); - Pubspec.empty() - : _sources = null, - _dependencies = {}, - _devDependencies = {}, - _sdkConstraints = {'dart': VersionConstraint.any}, - _includeDefaultSdkConstraint = false, - super( - YamlMap(), - version: Version.none, - ); /// Returns a Pubspec object for an already-parsed map representing its /// contents. @@ -342,8 +364,9 @@ /// /// [location] is the location from which this pubspec was loaded. Pubspec.fromMap(Map fields, this._sources, - {String? expectedName, Uri? location}) - : _includeDefaultSdkConstraint = true, + {YamlMap? overridesFields, String? expectedName, Uri? location}) + : _overridesFileFields = overridesFields, + _includeDefaultSdkConstraint = true, super(fields is YamlMap ? fields : YamlMap.wrap(fields, sourceUrl: location)) { @@ -358,31 +381,49 @@ this.fields.nodes['name']!.span); } - /// Parses the pubspec stored at [filePath] whose text is [contents]. + /// Parses the pubspec stored at [location] whose text is [contents]. /// /// If the pubspec doesn't define a version for itself, it defaults to /// [Version.none]. - factory Pubspec.parse(String contents, SourceRegistry sources, - {String? expectedName, Uri? location}) { - YamlNode pubspecNode; + factory Pubspec.parse( + String contents, + SourceRegistry sources, { + String? expectedName, + Uri? location, + String? overridesFileContents, + Uri? overridesLocation, + }) { + late final YamlMap pubspecMap; + YamlMap? overridesFileMap; try { - pubspecNode = loadYamlNode(contents, sourceUrl: location); + pubspecMap = _ensureMap(loadYamlNode(contents, sourceUrl: location)); + if (overridesFileContents != null) { + overridesFileMap = _ensureMap( + loadYamlNode(overridesFileContents, sourceUrl: overridesLocation)); + } } on YamlException catch (error) { throw PubspecException(error.message, error.span); } - Map pubspecMap; - if (pubspecNode is YamlScalar && pubspecNode.value == null) { - pubspecMap = YamlMap(sourceUrl: location); - } else if (pubspecNode is YamlMap) { - pubspecMap = pubspecNode; - } else { - throw PubspecException( - 'The pubspec must be a YAML mapping.', pubspecNode.span); - } - return Pubspec.fromMap(pubspecMap, sources, - expectedName: expectedName, location: location); + overridesFields: overridesFileMap, + expectedName: expectedName, + location: location); + } + + /// Ensures that [node] is a mapping. + /// + /// If [node] is already a map it is returned. + /// If [node] is yaml-null an empty map is returned. + /// Otherwise an exception is thrown. + static YamlMap _ensureMap(YamlNode node) { + if (node is YamlScalar && node.value == null) { + return YamlMap(sourceUrl: node.span.sourceUrl); + } else if (node is YamlMap) { + return node; + } else { + throw PubspecException('The pubspec must be a YAML mapping.', node.span); + } } /// Returns a list of most errors in this pubspec. @@ -403,35 +444,44 @@ _collectError(() => dependencies); _collectError(() => devDependencies); _collectError(() => publishTo); - _collectError(() => features); _collectError(() => executables); _collectError(() => falseSecrets); _collectError(_ensureEnvironment); return errors; } +} - /// Parses the dependency field named [field], and returns the corresponding - /// map of dependency names to dependencies. - Map<String, PackageRange> _parseDependencies(String field, YamlNode? node) { - var dependencies = <String, PackageRange>{}; +/// Parses the dependency field named [field], and returns the corresponding +/// map of dependency names to dependencies. +Map<String, PackageRange> _parseDependencies( + String field, + YamlNode? node, + SourceRegistry sources, + LanguageVersion languageVersion, + String? packageName, + Uri? location, { + _FileType fileType = _FileType.pubspec, +}) { + var dependencies = <String, PackageRange>{}; - // Allow an empty dependencies key. - if (node == null || node.value == null) return dependencies; + // Allow an empty dependencies key. + if (node == null || node.value == null) return dependencies; - if (node is! YamlMap) { - _error('"$field" field must be a map.', node.span); - } + if (node is! YamlMap) { + _error('"$field" field must be a map.', node.span); + } - var nonStringNode = node.nodes.keys - .firstWhere((e) => e.value is! String, orElse: () => null); - if (nonStringNode != null) { - _error('A dependency name must be a string.', nonStringNode.span); - } + var nonStringNode = + node.nodes.keys.firstWhere((e) => e.value is! String, orElse: () => null); + if (nonStringNode != null) { + _error('A dependency name must be a string.', nonStringNode.span); + } - node.nodes.forEach((nameNode, specNode) { + node.nodes.forEach( + (nameNode, specNode) { var name = nameNode.value; var spec = specNode.value; - if (fields['name'] != null && name == this.name) { + if (packageName != null && name == packageName) { _error('A package may not list itself as a dependency.', nameNode.span); } @@ -439,12 +489,12 @@ String? sourceName; VersionConstraint versionConstraint = VersionRange(); - var features = const <String, FeatureDependency>{}; if (spec == null) { - sourceName = _sources!.defaultSource.name; + sourceName = null; } else if (spec is String) { - sourceName = _sources!.defaultSource.name; - versionConstraint = _parseVersionConstraint(specNode); + sourceName = null; + versionConstraint = + _parseVersionConstraint(specNode, packageName, fileType); } else if (spec is Map) { // Don't write to the immutable YAML map. spec = Map.from(spec); @@ -452,12 +502,11 @@ if (spec.containsKey('version')) { spec.remove('version'); - versionConstraint = _parseVersionConstraint(specMap.nodes['version']); - } - - if (spec.containsKey('features')) { - spec.remove('features'); - features = _parseDependencyFeatures(specMap.nodes['features']); + versionConstraint = _parseVersionConstraint( + specMap.nodes['version'], + packageName, + fileType, + ); } var sourceNames = spec.keys.toList(); @@ -482,152 +531,24 @@ // Let the source validate the description. var ref = _wrapFormatException('description', descriptionNode?.span, () { - String? pubspecPath; - var location = _location; + String? pubspecDir; if (location != null && _isFileUri(location)) { - pubspecPath = path.fromUri(_location); + pubspecDir = path.dirname(path.fromUri(location)); } - return _sources![sourceName]!.parseRef( + return sources(sourceName).parseRef( name, descriptionNode?.value, - containingPath: pubspecPath, + containingDir: pubspecDir, languageVersion: languageVersion, ); - }, targetPackage: name); + }, packageName, fileType, targetPackage: name); - dependencies[name] = - ref.withConstraint(versionConstraint).withFeatures(features); - }); + dependencies[name] = ref.withConstraint(versionConstraint); + }, + ); - return dependencies; - } - - /// Parses [node] to a [VersionConstraint]. - /// - /// If or [defaultUpperBoundConstraint] is specified then it will be set as - /// the max constraint if the original constraint doesn't have an upper - /// bound and it is compatible with [defaultUpperBoundConstraint]. - /// - /// If [ignoreUpperBound] the max constraint is ignored. - VersionConstraint _parseVersionConstraint(YamlNode? node, - {VersionConstraint? defaultUpperBoundConstraint, - bool ignoreUpperBound = false}) { - if (node?.value == null) { - return defaultUpperBoundConstraint ?? VersionConstraint.any; - } - if (node!.value is! String) { - _error('A version constraint must be a string.', node.span); - } - - return _wrapFormatException('version constraint', node.span, () { - var constraint = VersionConstraint.parse(node.value); - if (defaultUpperBoundConstraint != null && - constraint is VersionRange && - constraint.max == null && - defaultUpperBoundConstraint.allowsAny(constraint)) { - constraint = VersionConstraint.intersection( - [constraint, defaultUpperBoundConstraint]); - } - if (ignoreUpperBound && constraint is VersionRange) { - return VersionRange( - min: constraint.min, includeMin: constraint.includeMin); - } - return constraint; - }); - } - - /// Parses [node] to a map from feature names to whether those features are - /// enabled. - Map<String, FeatureDependency> _parseDependencyFeatures(YamlNode? node) { - if (node?.value == null) return const {}; - if (node is! YamlMap) _error('Features must be a map.', node!.span); - - return mapMap(node.nodes, - key: (dynamic nameNode, dynamic _) => _validateFeatureName(nameNode), - value: (dynamic _, dynamic valueNode) { - var value = valueNode.value; - if (value is bool) { - return value - ? FeatureDependency.required - : FeatureDependency.unused; - } else if (value is String && value == 'if available') { - return FeatureDependency.ifAvailable; - } else { - _error('Features must be true, false, or "if available".', - valueNode.span); - } - }); - } - - /// Verifies that [node] is a string and a valid feature name, and returns it - /// if so. - String _validateFeatureName(YamlNode node) { - var name = node.value; - if (name is! String) { - _error('A feature name must be a string.', node.span); - } else if (!packageNameRegExp.hasMatch(name)) { - _error('A feature name must be a valid Dart identifier.', node.span); - } - - return name; - } - - /// Verifies that [node] is a list of strings and returns it. - /// - /// If [validate] is passed, it's called for each string in [node]. - List<String> _parseStringList(YamlNode? node, - {void Function(String value, SourceSpan)? validate}) { - var list = _parseList(node); - for (var element in list.nodes) { - var value = element.value; - if (value is String) { - if (validate != null) validate(value, element.span); - } else { - _error('Must be a string.', element.span); - } - } - return list.cast<String>(); - } - - /// Verifies that [node] is a list and returns it. - YamlList _parseList(YamlNode? node) { - if (node == null || node.value == null) return YamlList(); - if (node is YamlList) return node; - _error('Must be a list.', node.span); - } - - /// Runs [fn] and wraps any [FormatException] it throws in a - /// [PubspecException]. - /// - /// [description] should be a noun phrase that describes whatever's being - /// parsed or processed by [fn]. [span] should be the location of whatever's - /// being processed within the pubspec. - /// - /// If [targetPackage] is provided, the value is used to describe the - /// dependency that caused the problem. - T _wrapFormatException<T>( - String description, SourceSpan? span, T Function() fn, - {String? targetPackage}) { - try { - return fn(); - } on FormatException catch (e) { - // If we already have a pub exception with a span, re-use that - if (e is PubspecException) rethrow; - - var msg = 'Invalid $description'; - if (targetPackage != null) { - msg = '$msg in the "$name" pubspec on the "$targetPackage" dependency'; - } - msg = '$msg: ${e.message}'; - _error(msg, span); - } - } - - /// Throws a [PubspecException] with the given message. - Never _error(String message, SourceSpan? span) { - throw PubspecException(message, span); - } + return dependencies; } /// Returns whether [uri] is a file URI. @@ -635,3 +556,91 @@ /// This is slightly more complicated than just checking if the scheme is /// 'file', since relative URIs also refer to the filesystem on the VM. bool _isFileUri(Uri uri) => uri.scheme == 'file' || uri.scheme == ''; + +/// Parses [node] to a [VersionConstraint]. +/// +/// If or [defaultUpperBoundConstraint] is specified then it will be set as +/// the max constraint if the original constraint doesn't have an upper +/// bound and it is compatible with [defaultUpperBoundConstraint]. +/// +/// If [ignoreUpperBound] the max constraint is ignored. +VersionConstraint _parseVersionConstraint( + YamlNode? node, String? packageName, _FileType fileType, + {VersionConstraint? defaultUpperBoundConstraint, + bool ignoreUpperBound = false}) { + if (node?.value == null) { + return defaultUpperBoundConstraint ?? VersionConstraint.any; + } + if (node!.value is! String) { + _error('A version constraint must be a string.', node.span); + } + + return _wrapFormatException('version constraint', node.span, () { + var constraint = VersionConstraint.parse(node.value); + if (defaultUpperBoundConstraint != null && + constraint is VersionRange && + constraint.max == null && + defaultUpperBoundConstraint.allowsAny(constraint)) { + constraint = VersionConstraint.intersection( + [constraint, defaultUpperBoundConstraint]); + } + if (ignoreUpperBound && constraint is VersionRange) { + return VersionRange( + min: constraint.min, includeMin: constraint.includeMin); + } + return constraint; + }, packageName, fileType); +} + +/// Runs [fn] and wraps any [FormatException] it throws in a +/// [PubspecException]. +/// +/// [description] should be a noun phrase that describes whatever's being +/// parsed or processed by [fn]. [span] should be the location of whatever's +/// being processed within the pubspec. +/// +/// If [targetPackage] is provided, the value is used to describe the +/// dependency that caused the problem. +T _wrapFormatException<T>( + String description, + SourceSpan? span, + T Function() fn, + String? packageName, + _FileType fileType, { + String? targetPackage, +}) { + try { + return fn(); + } on FormatException catch (e) { + // If we already have a pub exception with a span, re-use that + if (e is PubspecException) rethrow; + + var msg = 'Invalid $description'; + final typeName = _fileTypeName(fileType); + if (targetPackage != null) { + msg = '$msg in the "$packageName" $typeName on the "$targetPackage" ' + 'dependency'; + } + msg = '$msg: ${e.message}'; + _error(msg, span); + } +} + +/// Throws a [PubspecException] with the given message. +Never _error(String message, SourceSpan? span) { + throw PubspecException(message, span); +} + +enum _FileType { + pubspec, + pubspecOverrides, +} + +String _fileTypeName(_FileType type) { + switch (type) { + case _FileType.pubspec: + return 'pubspec'; + case _FileType.pubspecOverrides: + return 'pubspec override'; + } +}
diff --git a/lib/src/pubspec_utils.dart b/lib/src/pubspec_utils.dart index 251c8c9..014c8a7 100644 --- a/lib/src/pubspec_utils.dart +++ b/lib/src/pubspec_utils.dart
@@ -4,7 +4,6 @@ import 'dart:async'; -import 'package:meta/meta.dart'; import 'package:pub_semver/pub_semver.dart'; import 'package_name.dart'; @@ -46,7 +45,7 @@ Future<VersionConstraint> constrainToFirstWithNullSafety( PackageRange packageRange) async { final ref = packageRange.toRef(); - final available = await cache.source(ref.source).getVersions(ref); + final available = await cache.getVersions(ref); if (available.isEmpty) { return stripUpperBound(packageRange.constraint); } @@ -54,7 +53,7 @@ available.sort((x, y) => x.version.compareTo(y.version)); for (final p in available) { - final pubspec = await cache.source(ref.source).describe(p); + final pubspec = await cache.describe(p); if (pubspec.languageVersion.supportsNullSafety) { return VersionRange(min: p.version, includeMin: true); } @@ -70,13 +69,11 @@ var unconstrainedRange = packageRange; /// We only need to remove the upper bound if it is a hosted package. - if (packageRange.source is HostedSource) { + if (packageRange.description is HostedDescription) { unconstrainedRange = PackageRange( - packageRange.name, - packageRange.source, - await constrainToFirstWithNullSafety(packageRange), - packageRange.description, - features: packageRange.features); + packageRange.toRef(), + await constrainToFirstWithNullSafety(packageRange), + ); } return unconstrainedRange; })); @@ -121,14 +118,12 @@ var unconstrainedRange = packageRange; /// We only need to remove the upper bound if it is a hosted package. - if (packageRange.source is HostedSource && + if (packageRange.description is HostedDescription && (stripOnly!.isEmpty || stripOnly.contains(packageRange.name))) { unconstrainedRange = PackageRange( - packageRange.name, - packageRange.source, - stripUpperBound(packageRange.constraint), - packageRange.description, - features: packageRange.features); + packageRange.toRef(), + stripUpperBound(packageRange.constraint), + ); } result.add(unconstrainedRange); } @@ -148,7 +143,6 @@ /// Removes the upper bound of [constraint]. If [constraint] is the /// empty version constraint, [VersionConstraint.empty] will be returned. -@visibleForTesting VersionConstraint stripUpperBound(VersionConstraint constraint) { ArgumentError.checkNotNull(constraint, 'constraint');
diff --git a/lib/src/solver/failure.dart b/lib/src/solver/failure.dart index 60f70c5..23b2e2f 100644 --- a/lib/src/solver/failure.dart +++ b/lib/src/solver/failure.dart
@@ -373,7 +373,7 @@ /// but each has a different source, those incompatibilities should explicitly /// print their sources, and similarly for differing descriptions. Map<String, PackageDetail> _detailsForCause(ConflictCause cause) { - var conflictPackages = <String, PackageName>{}; + var conflictPackages = <String, PackageRange>{}; for (var term in cause.conflict.terms) { if (term.package.isRoot) continue; conflictPackages[term.package.name] = term.package; @@ -384,10 +384,11 @@ var conflictPackage = conflictPackages[term.package.name]; if (term.package.isRoot) continue; if (conflictPackage == null) continue; - if (conflictPackage.source != term.package.source) { + if (conflictPackage.description.source != + term.package.description.source) { details[term.package.name] = const PackageDetail(showSource: true, showVersion: false); - } else if (!conflictPackage.samePackage(term.package)) { + } else if (conflictPackage.toRef() != term.package.toRef()) { details[term.package.name] = const PackageDetail(showDescription: true, showVersion: false); }
diff --git a/lib/src/solver/package_lister.dart b/lib/src/solver/package_lister.dart index 8e1abed..1198864 100644 --- a/lib/src/solver/package_lister.dart +++ b/lib/src/solver/package_lister.dart
@@ -15,7 +15,6 @@ import '../package_name.dart'; import '../pubspec.dart'; import '../sdk.dart'; -import '../source.dart'; import '../system_cache.dart'; import '../utils.dart'; import 'incompatibility.dart'; @@ -43,13 +42,12 @@ // This is `null` if there is no retracted version that can be allowed. final Version? _allowedRetractedVersion; - /// The source from which [_ref] comes. - final BoundSource _source; + final SystemCache _systemCache; /// The type of the dependency from the root package onto [_ref]. final DependencyType _dependencyType; - /// The set of package names that were overridden by the root package. + /// The set of packages that were overridden by the root package. final Set<String> _overriddenPackages; /// Whether this is a downgrade, in which case the package priority should be @@ -83,7 +81,7 @@ Future<List<PackageId>> get _versions => _versionsMemo.runOnce(() async { var cachedVersions = (await withDependencyType( _dependencyType, - () => _source.getVersions(_ref, + () => _systemCache.getVersions(_ref, allowedRetractedVersion: _allowedRetractedVersion))) ..sort((id1, id2) => id1.version.compareTo(id2.version)); _cachedVersions = cachedVersions; @@ -97,28 +95,27 @@ _latestMemo.runOnce(() => bestVersion(VersionConstraint.any)); final _latestMemo = AsyncMemoizer<PackageId?>(); - /// Creates a package lister for the dependency identified by [ref]. + /// Creates a package lister for the dependency identified by [_ref]. PackageLister( - SystemCache cache, + this._systemCache, this._ref, this._locked, this._dependencyType, this._overriddenPackages, this._allowedRetractedVersion, {bool downgrade = false}) - : _source = cache.source(_ref.source), - _isDowngrade = downgrade; + : _isDowngrade = downgrade; /// Creates a package lister for the root [package]. - PackageLister.root(Package package) + PackageLister.root(Package package, this._systemCache) : _ref = PackageRef.root(package), - _source = _RootSource(package), // Treat the package as locked so we avoid the logic for finding the // boundaries of various constraints, which is useless for the root // package. _locked = PackageId.root(package), _dependencyType = DependencyType.none, - _overriddenPackages = const UnmodifiableSetView.empty(), + _overriddenPackages = + Set.unmodifiable(package.dependencyOverrides.keys), _isDowngrade = false, _allowedRetractedVersion = null; @@ -144,7 +141,8 @@ /// Throws a [PackageNotFoundException] if this lister's package doesn't /// exist. Future<PackageId?> bestVersion(VersionConstraint? constraint) async { - if (_locked != null && constraint!.allows(_locked!.version)) return _locked; + final locked = _locked; + if (locked != null && constraint!.allows(locked.version)) return locked; var versions = await _versions; @@ -169,9 +167,12 @@ if (isPastLimit(id.version)) break; if (!constraint!.allows(id.version)) continue; - if (!id.version.isPreRelease) return id; + if (!id.version.isPreRelease) { + return id; + } bestPrerelease ??= id; } + return bestPrerelease; } @@ -187,8 +188,8 @@ Pubspec pubspec; try { - pubspec = - await withDependencyType(_dependencyType, () => _source.describe(id)); + pubspec = await withDependencyType( + _dependencyType, () => _systemCache.describe(id)); } on PubspecException catch (error) { // The lockfile for the pubspec couldn't be parsed, log.fine('Failed to parse pubspec for $id:\n$error'); @@ -227,12 +228,12 @@ var incompatibilities = <Incompatibility>[]; for (var range in pubspec.dependencies.values) { - if (pubspec.dependencyOverrides.containsKey(range.name)) continue; + if (_overriddenPackages.contains(range.name)) continue; incompatibilities.add(_dependency(depender, range)); } for (var range in pubspec.devDependencies.values) { - if (pubspec.dependencyOverrides.containsKey(range.name)) continue; + if (_overriddenPackages.contains(range.name)) continue; incompatibilities.add(_dependency(depender, range)); } @@ -413,7 +414,7 @@ Future<Pubspec> _describeSafe(PackageId id) async { try { return await withDependencyType( - _dependencyType, () => _source.describe(id)); + _dependencyType, () => _systemCache.describe(id)); } catch (_) { return Pubspec(id.name, version: id.version); } @@ -430,44 +431,3 @@ return sdk.isAvailable && constraint.allows(sdk.version!); } } - -/// A fake source that contains only the root package. -/// -/// This only implements the subset of the [BoundSource] API that -/// [PackageLister] uses to find information about packages. -class _RootSource extends BoundSource { - /// An error to throw for unused source methods. - UnsupportedError get _unsupported => - UnsupportedError('_RootSource is not a full source.'); - - /// The entrypoint package. - final Package _package; - - _RootSource(this._package); - - @override - Future<List<PackageId>> getVersions(PackageRef ref, - {Duration? maxAge, Version? allowedRetractedVersion}) { - assert(ref.isRoot); - return Future.value([PackageId.root(_package)]); - } - - @override - Future<Pubspec> describe(PackageId id) { - assert(id.isRoot); - return Future.value(_package.pubspec); - } - - @override - Source get source => throw _unsupported; - @override - SystemCache get systemCache => throw _unsupported; - @override - Future<List<PackageId>> doGetVersions(PackageRef ref, Duration? maxAge) => - throw _unsupported; - @override - Future<Pubspec> doDescribe(PackageId id) => throw _unsupported; - @override - String getDirectory(PackageId id, {String? relativeFrom}) => - throw _unsupported; -}
diff --git a/lib/src/solver/partial_solution.dart b/lib/src/solver/partial_solution.dart index 40d2611..db36417 100644 --- a/lib/src/solver/partial_solution.dart +++ b/lib/src/solver/partial_solution.dart
@@ -137,7 +137,7 @@ if (assignment.package.name != term.package.name) continue; if (!assignment.package.isRoot && - !assignment.package.samePackage(term.package)) { + assignment.package.toRef() != term.package.toRef()) { // not foo from hosted has no bearing on foo from git if (!assignment.isPositive) continue;
diff --git a/lib/src/solver/reformat_ranges.dart b/lib/src/solver/reformat_ranges.dart index 5c763a6..2229536 100644 --- a/lib/src/solver/reformat_ranges.dart +++ b/lib/src/solver/reformat_ranges.dart
@@ -53,15 +53,20 @@ if (min == null && max == null) return term; return Term( - term.package - .withConstraint(VersionRange( - min: min ?? range.min, - max: max ?? range.max, - includeMin: range.includeMin, - includeMax: includeMax ?? range.includeMax, - alwaysIncludeMaxPreRelease: true)) - .withTerseConstraint(), - term.isPositive); + term.package + .toRef() + .withConstraint( + VersionRange( + min: min ?? range.min, + max: max ?? range.max, + includeMin: range.includeMin, + includeMax: includeMax ?? range.includeMax, + alwaysIncludeMaxPreRelease: true, + ), + ) + .withTerseConstraint(), + term.isPositive, + ); } /// Returns the new minimum version to use for [range], or `null` if it doesn't
diff --git a/lib/src/solver/report.dart b/lib/src/solver/report.dart index 3b72cd8..dab29c6 100644 --- a/lib/src/solver/report.dart +++ b/lib/src/solver/report.dart
@@ -10,7 +10,7 @@ import '../log.dart' as log; import '../package.dart'; import '../package_name.dart'; -import '../source_registry.dart'; +import '../source/root.dart'; import '../system_cache.dart'; import '../utils.dart'; import 'result.dart'; @@ -23,7 +23,6 @@ /// It's a report builder. class SolveReport { final SolveType _type; - final SourceRegistry _sources; final Package _root; final LockFile _previousLockFile; final SolveResult _result; @@ -34,8 +33,8 @@ final _output = StringBuffer(); - SolveReport(this._type, this._sources, this._root, this._previousLockFile, - this._result, this._cache) { + SolveReport(this._type, this._root, this._previousLockFile, this._result, + this._cache) { // Fill the map so we can use it later. for (var id in _result.packages) { _dependencies[id.name] = id; @@ -148,9 +147,9 @@ Future<void> reportDiscontinued() async { var numDiscontinued = 0; for (var id in _result.packages) { - if (id.source == null) continue; + if (id.description is RootDescription) continue; final status = - await _cache.source(id.source).status(id, maxAge: Duration(days: 3)); + await id.source.status(id, _cache, maxAge: Duration(days: 3)); if (status.isDiscontinued && (_root.dependencyType(id.name) == DependencyType.direct || _root.dependencyType(id.name) == DependencyType.dev)) { @@ -229,7 +228,7 @@ } else if (oldId == null) { icon = log.green('+ '); addedOrRemoved = true; - } else if (!oldId.samePackage(newId)) { + } else if (oldId.description != newId.description) { icon = log.cyan('* '); changed = true; } else if (oldId.version < newId.version) { @@ -261,7 +260,7 @@ } } final status = - await _cache.source(id.source).status(id, maxAge: Duration(days: 3)); + await id.source.status(id, _cache, maxAge: Duration(days: 3)); if (status.isRetracted) { if (newerStable) { @@ -323,8 +322,8 @@ void _writeId(PackageId id) { _output.write(id.version); - if (id.source != _sources.defaultSource) { - var description = id.source!.formatDescription(id.description); + if (id.source != _cache.defaultSource) { + var description = id.description.format(); _output.write(' from ${id.source} $description'); } }
diff --git a/lib/src/solver/result.dart b/lib/src/solver/result.dart index b8ff0e5..7d57c36 100644 --- a/lib/src/solver/result.dart +++ b/lib/src/solver/result.dart
@@ -5,6 +5,7 @@ import 'package:collection/collection.dart'; import 'package:pub_semver/pub_semver.dart'; +import '../http.dart'; import '../io.dart'; import '../lock_file.dart'; import '../log.dart' as log; @@ -12,8 +13,8 @@ import '../package_name.dart'; import '../pub_embeddable_command.dart'; import '../pubspec.dart'; +import '../source/cached.dart'; import '../source/hosted.dart'; -import '../source_registry.dart'; import '../system_cache.dart'; import 'report.dart'; import 'type.dart'; @@ -74,10 +75,19 @@ overriddenDependencies: MapKeySet(_root.dependencyOverrides)); } - final SourceRegistry _sources; - final LockFile _previousLockFile; + /// Downloads all cached packages in [packages]. + Future<void> downloadCachedPackages(SystemCache cache) async { + await Future.wait(packages.map((id) async { + final source = id.source; + if (source is! CachedSource) return; + return await withDependencyType(_root.dependencyType(id.name), () async { + await source.downloadToSystemCache(id, cache); + }); + })); + } + /// Returns the names of all packages that were changed. /// /// This includes packages that were added or removed. @@ -92,22 +102,14 @@ .toSet()); } - SolveResult( - this._sources, - this._root, - this._previousLockFile, - this.packages, - this.pubspecs, - this.availableVersions, - this.attemptedSolutions, - this.resolutionTime); + SolveResult(this._root, this._previousLockFile, this.packages, this.pubspecs, + this.availableVersions, this.attemptedSolutions, this.resolutionTime); /// Displays a report of what changes were made to the lockfile. /// /// [type] is the type of version resolution that was run. Future<void> showReport(SolveType type, SystemCache cache) async { - await SolveReport(type, _sources, _root, _previousLockFile, this, cache) - .show(); + await SolveReport(type, _root, _previousLockFile, this, cache).show(); } /// Displays a one-line message summarizing what changes were made (or would @@ -119,8 +121,7 @@ /// [type] is the type of version resolution that was run. Future<void> summarizeChanges(SolveType type, SystemCache cache, {bool dryRun = false}) async { - final report = - SolveReport(type, _sources, _root, _previousLockFile, this, cache); + final report = SolveReport(type, _root, _previousLockFile, this, cache); report.summarize(dryRun: dryRun); if (type == SolveType.upgrade) { await report.reportDiscontinued();
diff --git a/lib/src/solver/term.dart b/lib/src/solver/term.dart index 266fabc..48133f5 100644 --- a/lib/src/solver/term.dart +++ b/lib/src/solver/term.dart
@@ -153,8 +153,9 @@ Term? difference(Term other) => intersect(other.inverse); // A ∖ B → A ∩ not B /// Returns whether [other] is compatible with [package]. - bool _compatiblePackage(PackageRange other) => - package.isRoot || other.isRoot || other.samePackage(package); + bool _compatiblePackage(PackageRange other) { + return package.isRoot || other.isRoot || other.toRef() == package.toRef(); + } /// Returns a new [Term] with the same package as [this] and with /// [constraint], unless that would produce a term that allows no packages, @@ -162,7 +163,7 @@ Term? _nonEmptyTerm(VersionConstraint constraint, bool isPositive) => constraint.isEmpty ? null - : Term(package.withConstraint(constraint), isPositive); + : Term(package.toRef().withConstraint(constraint), isPositive); @override String toString() => "${isPositive ? '' : 'not '}$package";
diff --git a/lib/src/solver/version_solver.dart b/lib/src/solver/version_solver.dart index d4e3a75..68e0dbf 100644 --- a/lib/src/solver/version_solver.dart +++ b/lib/src/solver/version_solver.dart
@@ -5,7 +5,6 @@ import 'dart:async'; import 'dart:math' as math; -import 'package:collection/collection.dart'; import 'package:pub_semver/pub_semver.dart'; import '../exceptions.dart'; @@ -80,7 +79,7 @@ VersionSolver(this._type, this._systemCache, this._root, this._lockFile, Iterable<String> unlock) - : _dependencyOverrides = _root.pubspec.dependencyOverrides, + : _dependencyOverrides = _root.dependencyOverrides, _unlock = {...unlock}; /// Finds a set of dependencies that match the root package's constraints, or @@ -326,7 +325,7 @@ for (var candidate in unsatisfied) { if (candidate.source is! UnknownSource) continue; _addIncompatibility(Incompatibility( - [Term(candidate.withConstraint(VersionConstraint.any), true)], + [Term(candidate.toRef().withConstraint(VersionConstraint.any), true)], IncompatibilityCause.unknownSource)); return candidate.name; } @@ -345,7 +344,7 @@ version = await _packageLister(package).bestVersion(package.constraint); } on PackageNotFoundException catch (error) { _addIncompatibility(Incompatibility( - [Term(package.withConstraint(VersionConstraint.any), true)], + [Term(package.toRef().withConstraint(VersionConstraint.any), true)], PackageNotFoundCause(error))); return package.name; } @@ -412,12 +411,11 @@ if (id.isRoot) { pubspecs[id.name] = _root.pubspec; } else { - pubspecs[id.name] = await _systemCache.source(id.source).describe(id); + pubspecs[id.name] = await _systemCache.describe(id); } } return SolveResult( - _systemCache.sources, _root, _lockFile, decisions, @@ -448,9 +446,8 @@ List<PackageId> ids; try { ids = package.source is HostedSource - ? (await _systemCache - .source(package.source) - .getVersions(package.toRef(), maxAge: Duration(days: 3))) + ? await _systemCache.getVersions(package.toRef(), + maxAge: Duration(days: 3)) : [package]; } on Exception { ids = <PackageId>[package]; @@ -463,20 +460,20 @@ } /// Returns the package lister for [package], creating it if necessary. - PackageLister _packageLister(PackageName package) { + PackageLister _packageLister(PackageRange package) { var ref = package.toRef(); return _packageListers.putIfAbsent(ref, () { - if (ref.isRoot) return PackageLister.root(_root); + if (ref.isRoot) return PackageLister.root(_root, _systemCache); var locked = _getLocked(ref.name); - if (locked != null && !locked.samePackage(ref)) locked = null; + if (locked != null && locked.toRef() != ref) locked = null; - Set<String> overridden = MapKeySet(_dependencyOverrides); - if (overridden.contains(package.name)) { + final overridden = <String>{ + ..._dependencyOverrides.keys, // If the package is overridden, ignore its dependencies back onto the // root package. - overridden = Set.from(overridden)..add(_root.name); - } + if (_dependencyOverrides.containsKey(package.name)) _root.name + }; return PackageLister( _systemCache, @@ -505,7 +502,7 @@ // can't be downgraded. if (_type == SolveType.downgrade) { var locked = _lockFile.packages[package]; - if (locked != null && !locked.source!.hasMultipleVersions) return locked; + if (locked != null && !locked.source.hasMultipleVersions) return locked; } if (_unlock.isEmpty || _unlock.contains(package)) return null;
diff --git a/lib/src/source.dart b/lib/src/source.dart index add55fa..b6faba6 100644 --- a/lib/src/source.dart +++ b/lib/src/source.dart
@@ -4,53 +4,42 @@ import 'dart:async'; -import 'package:collection/collection.dart' show IterableNullableExtension; import 'package:pub_semver/pub_semver.dart'; import 'exceptions.dart'; import 'language_version.dart'; import 'package_name.dart'; import 'pubspec.dart'; +import 'source/git.dart'; import 'system_cache.dart'; /// A source from which to get packages. /// -/// Each source has many packages that it looks up using [PackageId]s. Sources -/// that inherit this directly (currently just [PathSource]) are *uncached* -/// sources. They deliver a package directly to the package that depends on it. +/// Each source has many packages that it looks up using [PackageRef]s. /// /// Other sources are *cached* sources. These extend [CachedSource]. When a /// package needs a dependency from a cached source, it is first installed in /// the [SystemCache] and then acquired from there. /// -/// Each user-visible source has two classes: a [Source] that knows how to do -/// filesystem-independent operations like parsing and comparing descriptions, -/// and a [BoundSource] that knows how to actually install (and potentially -/// download) those packages. Only the [BoundSource] has access to the -/// [SystemCache]. +/// Methods on [Source] that depends on the cache will take it as an argument. /// -/// ## Subclassing +/// ## Types of description /// -/// All [Source]s should extend this class and all [BoundSource]s should extend -/// [BoundSource]. In addition to defining the behavior of various methods, -/// sources define the structure of package descriptions used in [PackageRef]s, -/// [PackageRange]s, and [PackageId]s. There are three distinct types of -/// description, although in practice most sources use the same format for one -/// or more of these: +/// * Pubspec.yaml descriptions. These are included in pubspecs and usually +/// written by hand. They're typically more flexible in the formats they allow +/// to optimize for ease of authoring. /// -/// * User descriptions. These are included in pubspecs and usually written by -/// hand. They're typically more flexible in the formats they allow to -/// optimize for ease of authoring. -/// -/// * Reference descriptions. These are the descriptions in [PackageRef]s and +/// * [Description]s. These are the descriptions in [PackageRef]s and /// [PackageRange]. They're parsed directly from user descriptions using -/// [parseRef], and so add no additional information. +/// [Source.parseRef]. Internally relative paths are stored absolute, such +/// they can be serialized elsewhere. /// -/// * ID descriptions. These are the descriptions in [PackageId]s, which +/// * [ResolvedDescription]s. These are the descriptions in [PackageId]s, which /// uniquely identify and provide the means to locate the concrete code of a /// package. They may contain additional expensive-to-compute information /// relative to the corresponding reference descriptions. These are the -/// descriptions stored in lock files. +/// descriptions stored in lock files. (This is mainly relevant for the +/// resolved-ref of GitDescriptions.) abstract class Source { /// The name of the source. /// @@ -64,25 +53,20 @@ /// Defaults to `false`. bool get hasMultipleVersions => false; - /// Records the system cache to which this source belongs. - /// - /// This should only be called once for each source, by - /// [SystemCache.register]. It should not be overridden by base classes. - BoundSource bind(SystemCache systemCache); - /// Parses a [PackageRef] from a name and a user-provided [description]. /// /// When a [Pubspec] is parsed, it reads in the description for each /// dependency. It is up to the dependency's [Source] to determine how that /// should be interpreted. This will be called during parsing to validate that /// the given [description] is well-formed according to this source, and to - /// give the source a chance to canonicalize the description. - /// For simple hosted dependencies like `foo:` or `foo: ^1.2.3`, the - /// [description] may also be `null`. + /// give the source a chance to canonicalize the description. For simple + /// hosted dependencies like `foo:` or `foo: ^1.2.3`, the [description] may + /// also be `null`. /// - /// [containingPath] is the path to the pubspec where this description - /// appears. It may be `null` if the description is coming from some in-memory - /// source (such as pulling down a pubspec from pub.dartlang.org). + /// [containingDir] is the path to the directory of the pubspec where this + /// description appears. It may be `null` if the description is coming from + /// some in-memory source (such as pulling down a pubspec from + /// pub.dartlang.org). /// /// [languageVersion] is the minimum Dart version parsed from the pubspec's /// `environment` field. Source implementations may use this parameter to only @@ -95,7 +79,7 @@ PackageRef parseRef( String name, description, { - String? containingPath, + String? containingDir, required LanguageVersion languageVersion, }); @@ -104,60 +88,17 @@ /// This only accepts descriptions serialized using [serializeDescription]. It /// should not be used with user-authored descriptions. /// - /// [containingPath] is the path to the lockfile where this description - /// appears. It may be `null` if the description is coming from some in-memory - /// source. + /// [containingDir] is the path to the directory lockfile where this + /// description appears. It may be `null` if the description is coming from + /// some in-memory source. /// /// Throws a [FormatException] if the description is not valid. PackageId parseId(String name, Version version, description, - {String? containingPath}); - - /// When a [LockFile] is serialized, it uses this method to get the - /// [description] in the right format. - /// - /// [containingPath] is the containing directory of the root package. - dynamic serializeDescription(String containingPath, description) { - return description; - } - - /// When a package [description] is shown to the user, this is called to - /// convert it into a human-friendly form. - /// - /// By default, it just converts the description to a string, but sources - /// may customize this. - String formatDescription(description) { - return description.toString(); - } - - /// Returns whether or not [description1] describes the same package as - /// [description2] for this source. - /// - /// This method should be light-weight. It doesn't need to validate that - /// either package exists. - /// - /// Note that either description may be a reference description or an ID - /// description; they need not be the same type. ID descriptions should be - /// considered equal to the reference descriptions that produced them. - bool descriptionsEqual(description1, description2); - - /// Returns a hash code for [description]. - /// - /// Descriptions that compare equal using [descriptionsEqual] should return - /// the same hash code. - int hashDescription(description); + {String? containingDir}); /// Returns the source's name. @override String toString() => name; -} - -/// A source bound to a [SystemCache]. -abstract class BoundSource { - /// The unbound source that produced [this]. - Source get source; - - /// The system cache to which [this] is bound. - SystemCache get systemCache; /// Get the IDs of all versions that match [ref]. /// @@ -168,84 +109,8 @@ /// /// By default, this assumes that each description has a single version and /// uses [describe] to get that version. - /// - /// Sources should not override this. Instead, they implement [doGetVersions]. - /// - /// If [maxAge] is given answers can be taken from cache - up to that age old. - /// - /// If given, the [allowedRetractedVersion] is the only version which can be - /// selected even if it is marked as retracted. Otherwise, all the returned - /// IDs correspond to non-retracted versions. - Future<List<PackageId>> getVersions(PackageRef ref, - {Duration? maxAge, Version? allowedRetractedVersion}) async { - if (ref.isRoot) { - throw ArgumentError('Cannot get versions for the root package.'); - } - if (ref.source != source) { - throw ArgumentError('Package $ref does not use source ${source.name}.'); - } - - var versions = await doGetVersions(ref, maxAge); - - versions = (await Future.wait(versions.map((id) async { - final packageStatus = await status(id, maxAge: maxAge); - if (!packageStatus.isRetracted || id.version == allowedRetractedVersion) { - return id; - } - return null; - }))) - .whereNotNull() - .toList(); - - return versions; - } - - /// Get the IDs of all versions that match [ref]. - /// - /// Note that this does *not* require the packages to be downloaded locally, - /// which is the point. This is used during version resolution to determine - /// which package versions are available to be downloaded (or already - /// downloaded). - /// - /// By default, this assumes that each description has a single version and - /// uses [describe] to get that version. - /// - /// This method is effectively protected: subclasses must implement it, but - /// external code should not call this. Instead, call [getVersions]. - Future<List<PackageId>> doGetVersions(PackageRef ref, Duration? maxAge); - - /// A cache of pubspecs described by [describe]. - final _pubspecs = <PackageId, Pubspec>{}; - - /// Loads the (possibly remote) pubspec for the package version identified by - /// [id]. - /// - /// This may be called for packages that have not yet been downloaded during - /// the version resolution process. Its results are automatically memoized. - /// - /// Throws a [DataException] if the pubspec's version doesn't match [id]'s - /// version. - /// - /// Sources should not override this. Instead, they implement [doDescribe]. - Future<Pubspec> describe(PackageId id) async { - if (id.isRoot) throw ArgumentError('Cannot describe the root package.'); - if (id.source != source) { - throw ArgumentError('Package $id does not use source ${source.name}.'); - } - - var pubspec = _pubspecs[id]; - if (pubspec != null) return pubspec; - - // Delegate to the overridden one. - pubspec = await doDescribe(id); - if (pubspec.version != id.version) { - throw PackageNotFoundException( - 'the pubspec for $id has version ${pubspec.version}'); - } - - _pubspecs[id] = pubspec; - return pubspec; - } + Future<List<PackageId>> doGetVersions( + PackageRef ref, Duration? maxAge, SystemCache cache); /// Loads the (possibly remote) pubspec for the package version identified by /// [id]. @@ -257,9 +122,7 @@ /// This may be called for packages that have not yet been downloaded during /// the version resolution process. /// - /// This method is effectively protected: subclasses must implement it, but - /// external code should not call this. Instead, call [describe]. - Future<Pubspec> doDescribe(PackageId id); + Future<Pubspec> doDescribe(PackageId id, SystemCache cache); /// Returns the directory where this package can (or could) be found locally. /// @@ -267,7 +130,8 @@ /// /// If id is a relative path id, the directory will be relative from /// [relativeFrom]. Returns an absolute path if [relativeFrom] is not passed. - String getDirectory(PackageId id, {String? relativeFrom}); + String doGetDirectory(PackageId id, SystemCache cache, + {String? relativeFrom}); /// Returns metadata about a given package. /// @@ -275,19 +139,62 @@ /// [maxAge]. If [maxAge] is not given, the information is not cached. /// /// In the case of offline sources, [maxAge] is not used, since information is - /// per definiton cached. - Future<PackageStatus> status(PackageId id, {Duration? maxAge}) async => - // Default implementation has no metadata. - PackageStatus(); - - /// Stores [pubspec] so it's returned when [describe] is called with [id]. - /// - /// This is notionally protected; it should only be called by subclasses. - void memoizePubspec(PackageId id, Pubspec pubspec) { - _pubspecs[id] = pubspec; + /// per definition cached. + Future<PackageStatus> status( + PackageId id, + SystemCache cache, { + Duration? maxAge, + }) async { + return PackageStatus(); } } +/// The information needed to get a version-listing of a named package from a +/// [Source]. +/// +/// For a hosted package this would be the host url. +/// +/// For a git package this would be the repo url and a ref and a path inside +/// the repo. +/// +/// This is the information that goes into a `pubspec.yaml` dependency together +/// with a version constraint. +abstract class Description { + Source get source; + Object? serializeForPubspec( + {required String? containingDir, + required LanguageVersion languageVersion}); + + /// Converts `this` into a human-friendly form to show the user. + /// + /// Paths are always relative to current dir. + String format(); +} + +/// A resolved description is a [Description] plus whatever information you need +/// to lock down a specific version. +/// +/// This is currently only relevant for the [GitSource] that resolves the +/// [Description.ref] to a specific commit id in [GitSource.doGetVersions]. +/// +/// This is the information that goes into a `pubspec.lock` file together with +/// a version number (that is represented by a [PackageId]. +abstract class ResolvedDescription { + final Description description; + ResolvedDescription(this.description); + + /// When a [LockFile] is serialized, it uses this method to get the + /// [description] in the right format. + /// + /// [containingPath] is the containing directory of the root package. + Object? serializeForLockfile({required String? containingDir}); + + /// Converts `this` into a human-friendly form to show the user. + /// + /// Paths are always relative to current dir. + String format() => description.format(); +} + /// Metadata about a [PackageId]. class PackageStatus { /// `null` if not [isDiscontinued]. Otherwise contains the @@ -296,8 +203,9 @@ final String? discontinuedReplacedBy; final bool isDiscontinued; final bool isRetracted; - PackageStatus( - {this.isDiscontinued = false, - this.discontinuedReplacedBy, - this.isRetracted = false}); + PackageStatus({ + this.isDiscontinued = false, + this.discontinuedReplacedBy, + this.isRetracted = false, + }); }
diff --git a/lib/src/source/cached.dart b/lib/src/source/cached.dart index 0c8574d..16a7313 100644 --- a/lib/src/source/cached.dart +++ b/lib/src/source/cached.dart
@@ -5,12 +5,14 @@ import 'dart:async'; import 'package:path/path.dart' as path; +import 'package:pub_semver/pub_semver.dart'; import '../io.dart'; import '../package.dart'; import '../package_name.dart'; import '../pubspec.dart'; import '../source.dart'; +import '../system_cache.dart'; /// Base class for a [BoundSource] that installs packages into pub's /// [SystemCache]. @@ -19,55 +21,51 @@ /// packages or the package needs to be "frozen" at the point in time that it's /// installed. (For example, Git packages are cached because installing from /// the same repo over time may yield different commits.) -abstract class CachedSource extends BoundSource { - /// The root directory of this source's cache within the system cache. - /// - /// This shouldn't be overridden by subclasses. - String get systemCacheRoot => path.join(systemCache.rootDir, source.name); - +abstract class CachedSource extends Source { /// If [id] is already in the system cache, just loads it from there. /// /// Otherwise, defers to the subclass. @override - Future<Pubspec> doDescribe(PackageId id) async { - var packageDir = getDirectoryInCache(id); + Future<Pubspec> doDescribe(PackageId id, SystemCache cache) async { + var packageDir = getDirectoryInCache(id, cache); if (fileExists(path.join(packageDir, 'pubspec.yaml'))) { - return Pubspec.load(packageDir, systemCache.sources, - expectedName: id.name); + return Pubspec.load(packageDir, cache.sources, expectedName: id.name); } - return await describeUncached(id); + return await describeUncached(id, cache); } @override - String getDirectory(PackageId id, {String? relativeFrom}) => - getDirectoryInCache(id); + String doGetDirectory(PackageId id, SystemCache cache, + {String? relativeFrom}) => + getDirectoryInCache(id, cache); - String getDirectoryInCache(PackageId id); + String getDirectoryInCache(PackageId id, SystemCache cache); /// Loads the (possibly remote) pubspec for the package version identified by /// [id]. /// /// This will only be called for packages that have not yet been installed in /// the system cache. - Future<Pubspec> describeUncached(PackageId id); + Future<Pubspec> describeUncached(PackageId id, SystemCache cache); /// Determines if the package identified by [id] is already downloaded to the /// system cache. - bool isInSystemCache(PackageId id) => dirExists(getDirectoryInCache(id)); + bool isInSystemCache(PackageId id, SystemCache cache) => + dirExists(getDirectoryInCache(id, cache)); /// Downloads the package identified by [id] to the system cache. - Future<Package> downloadToSystemCache(PackageId id); + Future<Package> downloadToSystemCache(PackageId id, SystemCache cache); /// Returns the [Package]s that have been downloaded to the system cache. - List<Package> getCachedPackages(); + List<Package> getCachedPackages(SystemCache cache); /// Reinstalls all packages that have been previously installed into the /// system cache by this source. /// /// Returns a list of results indicating for each if that package was /// successfully repaired. - Future<Iterable<RepairResult>> repairCachedPackages(); + Future<Iterable<RepairResult>> repairCachedPackages(SystemCache cache); } /// The result of repairing a single cache entry. @@ -78,6 +76,13 @@ /// When something goes wrong the package is attempted removed from /// cache (but that might itself have failed). final bool success; - final PackageId package; - RepairResult(this.package, {required this.success}); + final String packageName; + final Version version; + final Source source; + RepairResult( + this.packageName, + this.version, + this.source, { + required this.success, + }); }
diff --git a/lib/src/source/git.dart b/lib/src/source/git.dart index 0d98ae9..0d26283 100644 --- a/lib/src/source/git.dart +++ b/lib/src/source/git.dart
@@ -23,62 +23,66 @@ import 'cached.dart'; /// A package source that gets packages from Git repos. -class GitSource extends Source { +class GitSource extends CachedSource { + static GitSource instance = GitSource._(); + + GitSource._(); + @override final name = 'git'; @override - BoundGitSource bind(SystemCache systemCache) => - BoundGitSource(this, systemCache); - - /// Given a valid git package description, returns the URL of the repository - /// it pulls from. - /// If the url is relative, it will be returned relative to current working - /// directory. - String urlFromDescription(description) { - var url = description['url']; - if (description['relative'] == true) { - return p.url.relative(url, from: p.toUri(p.current).toString()); - } - return url; - } - - @override PackageRef parseRef( String name, - description, { - String? containingPath, + Object? description, { + String? containingDir, LanguageVersion? languageVersion, }) { - dynamic url; - dynamic ref; - dynamic path; + String url; + String? ref; + String? path; if (description is String) { url = description; } else if (description is! Map) { throw FormatException('The description must be a Git URL or a map ' "with a 'url' key."); } else { - url = description['url']; + final descriptionUrl = description['url']; + if (descriptionUrl is! String) { + throw FormatException( + "The 'url' field of a description must be a string."); + } + url = descriptionUrl; - ref = description['ref']; - if (ref != null && ref is! String) { + final descriptionRef = description['ref']; + if (descriptionRef is! String?) { throw FormatException("The 'ref' field of the description must be a " 'string.'); } + ref = descriptionRef; - path = description['path']; + final descriptionPath = description['path']; + if (descriptionPath is! String?) { + throw FormatException("The 'path' field of the description must be a " + 'string.'); + } + path = descriptionPath; } - return PackageRef(name, this, { - ..._validatedUrl(url, containingPath), - 'ref': ref ?? 'HEAD', - 'path': _validatedPath(path), - }); + + return PackageRef( + name, + GitDescription( + url: url, + containingDir: containingDir, + ref: ref, + path: _validatedPath(path), + ), + ); } @override PackageId parseId(String name, Version version, description, - {String? containingPath}) { + {String? containingDir}) { if (description is! Map) { throw FormatException("The description must be a map with a 'url' " 'key.'); @@ -90,40 +94,29 @@ 'string.'); } - if (description['resolved-ref'] is! String) { + final resolvedRef = description['resolved-ref']; + if (resolvedRef is! String) { throw FormatException("The 'resolved-ref' field of the description " 'must be a string.'); } - return PackageId(name, this, version, { - ..._validatedUrl(description['url'], containingPath), - 'ref': ref ?? 'HEAD', - 'resolved-ref': description['resolved-ref'], - 'path': _validatedPath(description['path']) - }); - } - - /// Serializes path dependency's [description]. - /// - /// For the descriptions where `relative` attribute is `true`, tries to make - /// `url` relative to the specified [containingPath]. - @override - dynamic serializeDescription(String containingPath, description) { - final copy = Map.from(description); - copy.remove('relative'); - if (description['relative'] == true) { - copy['url'] = p.url.relative(description['url'], - from: Uri.file(containingPath).toString()); - } - return copy; + final url = description['url']; + return PackageId( + name, + version, + GitResolvedDescription( + GitDescription( + url: url, + ref: ref ?? 'HEAD', + path: _validatedPath( + description['path'], + ), + containingDir: containingDir), + resolvedRef)); } /// Throws a [FormatException] if [url] isn't a valid Git URL. - Map<String, Object> _validatedUrl(dynamic url, String? containingDir) { - if (url is! String) { - throw FormatException("The 'url' field of the description must be a " - 'string.'); - } + static _ValidatedUrl _validatedUrl(String url, String? containingDir) { var relative = false; // If the URL contains an @, it's probably an SSH hostname, which we don't // know how to validate. @@ -141,15 +134,26 @@ // A relative path is stored internally as absolute resolved relative to // [containingPath]. relative = true; - url = Uri.file(p.absolute(containingDir)).resolveUri(parsed).toString(); + url = p.url.normalize( + p.url.join( + p.toUri(p.absolute(containingDir)).toString(), + parsed.toString(), + ), + ); } } - return {'relative': relative, 'url': url}; + return _ValidatedUrl(url, relative); } - /// Returns [path] normalized. + /// Normalizes [path]. /// - /// Throws a [FormatException] if [path] isn't a relative url or null. + /// Throws a [FormatException] if [path] isn't a [String] parsing as a + /// relative URL or `null`. + /// + /// A relative url here has: + /// - non-absolute path + /// - no scheme + /// - no authority String _validatedPath(dynamic path) { path ??= '.'; if (path is! String) { @@ -159,9 +163,13 @@ // Use Dart's URL parser to validate the URL. final parsed = Uri.parse(path); - if (parsed.isAbsolute) { + if (parsed.hasAbsolutePath || + parsed.hasScheme || + parsed.hasAuthority || + parsed.hasFragment || + parsed.hasQuery) { throw FormatException( - "The 'path' field of the description must be relative."); + "The 'path' field of the description must be a relative path URL."); } if (!p.url.isWithin('.', path) && !p.url.equals('.', path)) { throw FormatException( @@ -171,63 +179,10 @@ return p.url.normalize(parsed.toString()); } - /// If [description] has a resolved ref, print it out in short-form. - /// - /// This helps distinguish different git commits with the same pubspec - /// version. - @override - String formatDescription(description) { - if (description is Map && description.containsKey('resolved-ref')) { - var result = '${urlFromDescription(description)} at ' - "${description['resolved-ref'].substring(0, 6)}"; - if (description['path'] != '.') result += " in ${description["path"]}"; - return result; - } else { - return super.formatDescription(description); - } - } - - /// Two Git descriptions are equal if both their URLs and their refs are - /// equal. - @override - bool descriptionsEqual(description1, description2) { - // TODO(nweiz): Do we really want to throw an error if you have two - // dependencies on some repo, one of which specifies a ref and one of which - // doesn't? If not, how do we handle that case in the version solver? - if (description1['url'] != description2['url']) return false; - if (description1['ref'] != description2['ref']) return false; - if (description1['path'] != description2['path']) return false; - - if (description1.containsKey('resolved-ref') && - description2.containsKey('resolved-ref')) { - return description1['resolved-ref'] == description2['resolved-ref']; - } - - return true; - } - - @override - int hashDescription(description) { - // Don't include the resolved ref in the hash code because we ignore it in - // [descriptionsEqual] if only one description defines it. - return description['url'].hashCode ^ - description['ref'].hashCode ^ - description['path'].hashCode; - } -} - -/// The [BoundSource] for [GitSource]. -class BoundGitSource extends CachedSource { /// Limit the number of concurrent git operations to 1. // TODO(sigurdm): Use RateLimitedScheduler. final Pool _pool = Pool(1); - @override - final GitSource source; - - @override - final SystemCache systemCache; - /// A map from revision cache locations to futures that will complete once /// they're finished being cloned. /// @@ -239,41 +194,38 @@ /// has already been run during this run of pub. final _updatedRepos = <String>{}; - BoundGitSource(this.source, this.systemCache); - /// Given a Git repo that contains a pub package, gets the name of the pub /// package. - Future<String> getPackageNameFromRepo(String repo) { + Future<String> getPackageNameFromRepo(String repo, SystemCache cache) { // Clone the repo to a temp directory. return withTempDir((tempDir) async { await _clone(repo, tempDir, shallow: true); - var pubspec = Pubspec.load(tempDir, systemCache.sources); + var pubspec = Pubspec.load(tempDir, cache.sources); return pubspec.name; }); } @override Future<List<PackageId>> doGetVersions( - PackageRef ref, Duration? maxAge) async { + PackageRef ref, + Duration? maxAge, + SystemCache cache, + ) async { + final description = ref.description; + if (description is! GitDescription) { + throw StateError('Called with wrong ref'); + } return await _pool.withResource(() async { - await _ensureRepoCache(ref); - var path = _repoCachePath(ref); - var revision = await _firstRevision(path, ref.description['ref']); - var pubspec = await _describeUncached( - ref, - revision, - ref.description['path'], - source.urlFromDescription(ref.description), - ); + await _ensureRepoCache(ref, cache); + var path = _repoCachePath(ref, cache); + var revision = await _firstRevision( + path, description.ref!); // TODO(sigurdm) when can ref be null here? + var pubspec = + await _describeUncached(ref, revision, description.path, cache); return [ - PackageId(ref.name, source, pubspec.version, { - 'url': ref.description['url'], - 'relative': ref.description['relative'], - 'ref': ref.description['ref'], - 'resolved-ref': revision, - 'path': ref.description['path'] - }) + PackageId(ref.name, pubspec.version, + GitResolvedDescription(description, revision)) ]; }); } @@ -281,12 +233,16 @@ /// Since we don't have an easy way to read from a remote Git repo, this /// just installs [id] into the system cache, then describes it from there. @override - Future<Pubspec> describeUncached(PackageId id) { + Future<Pubspec> describeUncached(PackageId id, SystemCache cache) { + final description = id.description; + if (description is! GitResolvedDescription) { + throw StateError('Called with wrong ref'); + } return _pool.withResource(() => _describeUncached( id.toRef(), - id.description['resolved-ref'], - id.description['path'], - source.urlFromDescription(id.description), + description.resolvedRef, + description.description.path, + cache, )); } @@ -296,10 +252,14 @@ PackageRef ref, String revision, String path, - String url, + SystemCache cache, ) async { - await _ensureRevision(ref, revision); - var repoPath = _repoCachePath(ref); + final description = ref.description; + if (description is! GitDescription) { + throw ArgumentError('Wrong source'); + } + await _ensureRevision(ref, revision, cache); + var repoPath = _repoCachePath(ref, cache); // Normalize the path because Git treats "./" at the beginning of a path // specially. @@ -313,12 +273,12 @@ .run(['show', '$revision:$pubspecPath'], workingDir: repoPath); } on git.GitException catch (_) { fail('Could not find a file named "$pubspecPath" in ' - '${source.urlFromDescription(ref.description)} $revision.'); + '${p.prettyUri(description.url)} $revision.'); } return Pubspec.parse( lines.join('\n'), - systemCache.sources, + cache.sources, expectedName: ref.name, ); } @@ -336,42 +296,56 @@ /// itself; each of the commit-specific directories are clones of a directory /// in `cache/`. @override - Future<Package> downloadToSystemCache(PackageId id) async { + Future<Package> downloadToSystemCache(PackageId id, SystemCache cache) async { return await _pool.withResource(() async { - var ref = id.toRef(); + final ref = id.toRef(); + final description = ref.description; + if (description is! GitDescription) { + throw ArgumentError('Wrong source'); + } if (!git.isInstalled) { - fail("Cannot get ${id.name} from Git (${ref.description['url']}).\n" + fail('Cannot get ${id.name} from Git (${description.url}).\n' 'Please ensure Git is correctly installed.'); } - ensureDir(p.join(systemCacheRoot, 'cache')); - await _ensureRevision(ref, id.description['resolved-ref']); + ensureDir(p.join(cache.rootDirForSource(this), 'cache')); + final resolvedRef = + (id.description as GitResolvedDescription).resolvedRef; - var revisionCachePath = _revisionCachePath(id); + await _ensureRevision(ref, resolvedRef, cache); + + var revisionCachePath = _revisionCachePath(id, cache); + final path = description.path; await _revisionCacheClones.putIfAbsent(revisionCachePath, () async { if (!entryExists(revisionCachePath)) { - await _clone(_repoCachePath(ref), revisionCachePath); - await _checkOut(revisionCachePath, id.description['resolved-ref']); - _writePackageList(revisionCachePath, [id.description['path']]); + await _clone(_repoCachePath(ref, cache), revisionCachePath); + await _checkOut(revisionCachePath, resolvedRef); + _writePackageList(revisionCachePath, [path]); } else { - _updatePackageList(revisionCachePath, id.description['path']); + _updatePackageList(revisionCachePath, path); } }); return Package.load( - id.name, - p.join(revisionCachePath, p.fromUri(id.description['path'])), - systemCache.sources); + id.name, + p.join(revisionCachePath, p.fromUri(path)), + cache.sources, + ); }); } /// Returns the path to the revision-specific cache of [id]. @override - String getDirectoryInCache(PackageId? id) => - p.join(_revisionCachePath(id!), id.description['path']); + String getDirectoryInCache(PackageId id, SystemCache cache) { + final description = id.toRef().description; + if (description is! GitDescription) { + throw ArgumentError('Wrong source'); + } + return p.join(_revisionCachePath(id, cache), description.path); + } @override - List<Package> getCachedPackages() { + List<Package> getCachedPackages(SystemCache cache) { // TODO(keertip): Implement getCachedPackages(). throw UnimplementedError( "The git source doesn't support listing its cached packages yet."); @@ -380,12 +354,13 @@ /// Resets all cached packages back to the pristine state of the Git /// repository at the revision they are pinned to. @override - Future<Iterable<RepairResult>> repairCachedPackages() async { - if (!dirExists(systemCacheRoot)) return []; + Future<Iterable<RepairResult>> repairCachedPackages(SystemCache cache) async { + final rootDir = cache.rootDirForSource(this); + if (!dirExists(rootDir)) return []; final result = <RepairResult>[]; - var packages = listDir(systemCacheRoot) + var packages = listDir(rootDir) .where((entry) => dirExists(p.join(entry, '.git'))) .expand((revisionCachePath) { return _readPackageList(revisionCachePath).map((relative) { @@ -395,13 +370,13 @@ var packageDir = p.join(revisionCachePath, relative); try { - return Package.load(null, packageDir, systemCache.sources); + return Package.load(null, packageDir, cache.sources); } catch (error, stackTrace) { log.error('Failed to load package', error, stackTrace); var name = p.basename(revisionCachePath).split('-').first; - result.add(RepairResult( - PackageId(name, source, Version.none, '???'), - success: false)); + result.add( + RepairResult(name, Version.none, this, success: false), + ); tryDeleteEntry(revisionCachePath); return null; } @@ -419,8 +394,6 @@ // ignore it. if (!dirExists(package.dir)) continue; - var id = PackageId(package.name, source, package.version, null); - log.message('Resetting Git repository for ' '${log.bold(package.name)} ${package.version}...'); @@ -432,15 +405,18 @@ // Discard all changes to tracked files. await git.run(['reset', '--hard', 'HEAD'], workingDir: package.dir); - result.add(RepairResult(id, success: true)); + result.add( + RepairResult(package.name, package.version, this, success: true)); } on git.GitException catch (error, stackTrace) { log.error('Failed to reset ${log.bold(package.name)} ' '${package.version}. Error:\n$error'); log.fine(stackTrace); - result.add(RepairResult(id, success: false)); + result.add( + RepairResult(package.name, package.version, this, success: false)); // Delete the revision cache path, not the subdirectory that contains the package. - tryDeleteEntry(getDirectoryInCache(id)); + final repoRoot = git.repoRoot(package.dir); + if (repoRoot != null) tryDeleteEntry(repoRoot); } } @@ -449,46 +425,54 @@ /// Ensures that the canonical clone of the repository referred to by [ref] /// contains the given Git [revision]. - Future _ensureRevision(PackageRef ref, String revision) async { - var path = _repoCachePath(ref); + Future _ensureRevision( + PackageRef ref, + String revision, + SystemCache cache, + ) async { + var path = _repoCachePath(ref, cache); if (_updatedRepos.contains(path)) return; await _deleteGitRepoIfInvalid(path); - if (!entryExists(path)) await _createRepoCache(ref); + if (!entryExists(path)) await _createRepoCache(ref, cache); // Try to list the revision. If it doesn't exist, git will fail and we'll // know we have to update the repository. try { await _firstRevision(path, revision); } on git.GitException catch (_) { - await _updateRepoCache(ref); + await _updateRepoCache(ref, cache); } } /// Ensures that the canonical clone of the repository referred to by [ref] /// exists and is up-to-date. - Future _ensureRepoCache(PackageRef ref) async { - var path = _repoCachePath(ref); + Future _ensureRepoCache(PackageRef ref, SystemCache cache) async { + var path = _repoCachePath(ref, cache); if (_updatedRepos.contains(path)) return; await _deleteGitRepoIfInvalid(path); if (!entryExists(path)) { - await _createRepoCache(ref); + await _createRepoCache(ref, cache); } else { - await _updateRepoCache(ref); + await _updateRepoCache(ref, cache); } } /// Creates the canonical clone of the repository referred to by [ref]. /// /// This assumes that the canonical clone doesn't yet exist. - Future _createRepoCache(PackageRef ref) async { - var path = _repoCachePath(ref); + Future _createRepoCache(PackageRef ref, SystemCache cache) async { + final description = ref.description; + if (description is! GitDescription) { + throw ArgumentError('Wrong source'); + } + var path = _repoCachePath(ref, cache); assert(!_updatedRepos.contains(path)); try { - await _clone(ref.description['url'], path, mirror: true); + await _clone(description.url, path, mirror: true); } catch (_) { await _deleteGitRepoIfInvalid(path); rethrow; @@ -500,8 +484,11 @@ /// [ref]. /// /// This assumes that the canonical clone already exists. - Future _updateRepoCache(PackageRef ref) async { - var path = _repoCachePath(ref); + Future _updateRepoCache( + PackageRef ref, + SystemCache cache, + ) async { + var path = _repoCachePath(ref, cache); if (_updatedRepos.contains(path)) return Future.value(); await git.run(['fetch'], workingDir: path); _updatedRepos.add(path); @@ -612,21 +599,30 @@ .run(['checkout', ref], workingDir: repoPath).then((result) => null); } - String _revisionCachePath(PackageId id) => p.join( - systemCacheRoot, "${_repoName(id)}-${id.description['resolved-ref']}"); + String _revisionCachePath(PackageId id, SystemCache cache) => p.join( + cache.rootDirForSource(this), + '${_repoName(id.toRef())}-${(id.description as GitResolvedDescription).resolvedRef}'); /// Returns the path to the canonical clone of the repository referred to by /// [id] (the one in `<system cache>/git/cache`). - String _repoCachePath(PackageRef ref) { - var repoCacheName = '${_repoName(ref)}-${sha1(ref.description['url'])}'; - return p.join(systemCacheRoot, 'cache', repoCacheName); + String _repoCachePath(PackageRef ref, SystemCache cache) { + final description = ref.description; + if (description is! GitDescription) { + throw ArgumentError('Wrong source'); + } + final repoCacheName = '${_repoName(ref)}-${sha1(description.url)}'; + return p.join(cache.rootDirForSource(this), 'cache', repoCacheName); } - /// Returns a short, human-readable name for the repository URL in [packageName]. + /// Returns a short, human-readable name for the repository URL in [ref]. /// /// This name is not guaranteed to be unique. - String _repoName(PackageName packageName) { - var name = p.url.basename(packageName.description['url']); + String _repoName(PackageRef ref) { + final description = ref.description; + if (description is! GitDescription) { + throw ArgumentError('Wrong source'); + } + var name = p.url.basename(description.url); if (name.endsWith('.git')) { name = name.substring(0, name.length - '.git'.length); } @@ -638,3 +634,132 @@ return name; } } + +class GitDescription extends Description { + /// The url of the repo of this package. + /// + /// If the url was relative in the pubspec.yaml it will be resolved relative + /// to the pubspec location, and stored here as an absolute file url, and + /// [relative] will be true. + final String url; + + /// `true` if [url] was parsed from a relative url. + final bool relative; + + /// The git ref to resolve for finding the commit. + final String? ref; + + /// Relative path of the package inside the git repo. + /// + /// Represented as a relative url. + final String path; + + GitDescription._({ + required this.url, + required this.relative, + required String? ref, + required String? path, + }) : ref = ref ?? 'HEAD', + path = path ?? '.'; + + factory GitDescription({ + required String url, + required String? ref, + required String? path, + required String? containingDir, + }) { + final validatedUrl = GitSource._validatedUrl(url, containingDir); + return GitDescription._( + url: validatedUrl.url, + relative: validatedUrl.wasRelative, + ref: ref, + path: path, + ); + } + + @override + String format() { + var result = '${p.prettyUri(url)} at ' + '$ref'; + if (path != '.') result += ' in $path'; + return result; + } + + @override + Object? serializeForPubspec({ + required String? containingDir, + required LanguageVersion languageVersion, + }) { + final relativeUrl = containingDir != null && relative + ? p.url.relative(url, + from: p.toUri(p.normalize(p.absolute(containingDir))).toString()) + : url; + if (ref == 'HEAD' && path == '.') return relativeUrl; + return { + 'url': relativeUrl, + if (ref != 'HEAD') 'ref': ref, + if (path != '.') 'path': path, + }; + } + + @override + GitSource get source => GitSource.instance; + + @override + bool operator ==(Object other) { + return other is GitDescription && + other.url == url && + other.ref == ref && + other.path == path; + } + + @override + int get hashCode => Object.hash(url, ref, path); +} + +class GitResolvedDescription extends ResolvedDescription { + @override + GitDescription get description => super.description as GitDescription; + + final String resolvedRef; + GitResolvedDescription(GitDescription description, this.resolvedRef) + : super(description); + + @override + String format() { + var result = '${p.prettyUri(description.url)} at ' + '${resolvedRef.substring(0, 6)}'; + if (description.path != '.') result += ' in ${description.path}'; + return result; + } + + @override + Object? serializeForLockfile({required String? containingDir}) { + final url = description.relative && containingDir != null + ? p.url + .relative(description.url, from: Uri.file(containingDir).toString()) + : description.url; + return { + 'url': url, + 'ref': description.ref, + 'resolved-ref': resolvedRef, + 'path': description.path, + }; + } + + @override + bool operator ==(Object other) { + return other is GitResolvedDescription && + other.description == description && + other.resolvedRef == resolvedRef; + } + + @override + int get hashCode => Object.hash(description, resolvedRef); +} + +class _ValidatedUrl { + final String url; + final bool wasRelative; + _ValidatedUrl(this.url, this.wasRelative); +}
diff --git a/lib/src/source/hosted.dart b/lib/src/source/hosted.dart index b3fa729..f403c60 100644 --- a/lib/src/source/hosted.dart +++ b/lib/src/source/hosted.dart
@@ -79,32 +79,38 @@ if (u.path.isNotEmpty && !u.path.endsWith('/')) { u = u.replace(path: u.path + '/'); } + // pub.dev and pub.dartlang.org are identical. + // + // We rewrite here to avoid caching both, and to avoid having different + // credentials for these two. + if (u == Uri.parse('https://pub.dev')) { + log.fine('Using https://pub.dartlang.org instead of https://pub.dev.'); + u = Uri.parse('https://pub.dartlang.org'); + } return u; } /// A package source that gets packages from a package hosting site that uses /// the same API as pub.dartlang.org. -class HostedSource extends Source { +class HostedSource extends CachedSource { + static HostedSource instance = HostedSource._(); + + HostedSource._(); + @override final name = 'hosted'; @override final hasMultipleVersions = true; - @override - BoundHostedSource bind(SystemCache systemCache, {bool isOffline = false}) => - isOffline - ? _OfflineHostedSource(this, systemCache) - : BoundHostedSource(this, systemCache); - static String pubDevUrl = 'https://pub.dartlang.org'; static bool isFromPubDev(PackageId id) { - return id.source is HostedSource && - (id.description as _HostedDescription).uri.toString() == pubDevUrl; + final description = id.description.description; + return description is HostedDescription && description.url == pubDevUrl; } /// Gets the default URL for the package server for hosted dependencies. - Uri get defaultUrl { + late final String defaultUrl = () { // Changing this to pub.dev raises the following concerns: // // 1. It would blow through users caches. @@ -117,63 +123,49 @@ // Clearly, a bit of investigation is necessary before we update this to // pub.dev, it might be attractive to do next time we change the server API. try { - return _defaultUrl ??= validateAndNormalizeHostedUrl( - io.Platform.environment['PUB_HOSTED_URL'] ?? 'https://pub.dartlang.org', - ); + var defaultHostedUrl = 'https://pub.dartlang.org'; + // Allow the defaultHostedUrl to be overriden when running from tests + if (runningFromTest) { + defaultHostedUrl = + io.Platform.environment['_PUB_TEST_DEFAULT_HOSTED_URL'] ?? + defaultHostedUrl; + } + return validateAndNormalizeHostedUrl( + io.Platform.environment['PUB_HOSTED_URL'] ?? defaultHostedUrl, + ).toString(); } on FormatException catch (e) { throw ConfigException( 'Invalid `PUB_HOSTED_URL="${e.source}"`: ${e.message}'); } - } - - Uri? _defaultUrl; + }(); /// Returns a reference to a hosted package named [name]. /// /// If [url] is passed, it's the URL of the pub server from which the package /// should be downloaded. [url] most be normalized and validated using /// [validateAndNormalizeHostedUrl]. - PackageRef refFor(String name, {Uri? url}) => - PackageRef(name, this, _HostedDescription(name, url ?? defaultUrl)); + PackageRef refFor(String name, {String? url}) { + final d = HostedDescription(name, url ?? defaultUrl); + return PackageRef(name, d); + } /// Returns an ID for a hosted package named [name] at [version]. /// /// If [url] is passed, it's the URL of the pub server from which the package /// should be downloaded. [url] most be normalized and validated using /// [validateAndNormalizeHostedUrl]. - PackageId idFor(String name, Version version, {Uri? url}) => PackageId( - name, this, version, _HostedDescription(name, url ?? defaultUrl)); - - /// Returns the description for a hosted package named [name] with the - /// given package server [url]. - dynamic _serializedDescriptionFor(String name, [Uri? url]) { - if (url == null) { - return name; - } - try { - url = validateAndNormalizeHostedUrl(url.toString()); - } on FormatException catch (e) { - throw ArgumentError.value(url, 'url', 'url must be normalized: $e'); - } - return {'name': name, 'url': url.toString()}; - } - - @override - dynamic serializeDescription(String containingPath, description) { - final desc = _asDescription(description); - return _serializedDescriptionFor(desc.packageName, desc.uri); - } - - @override - String formatDescription(description) => - 'on ${_asDescription(description).uri}'; - - @override - bool descriptionsEqual(description1, description2) => - _asDescription(description1) == _asDescription(description2); - - @override - int hashDescription(description) => _asDescription(description).hashCode; + PackageId idFor( + String name, + Version version, { + String? url, + }) => + PackageId( + name, + version, + ResolvedHostedDescription( + HostedDescription(name, url ?? defaultUrl.toString()), + ), + ); /// Ensures that [description] is a valid hosted package description. /// @@ -188,48 +180,62 @@ /// 2. With a direct url: `hosted: <url>` @override PackageRef parseRef(String name, description, - {String? containingPath, required LanguageVersion languageVersion}) { + {String? containingDir, required LanguageVersion languageVersion}) { return PackageRef( - name, this, _parseDescription(name, description, languageVersion)); + name, _parseDescription(name, description, languageVersion)); } @override PackageId parseId(String name, Version version, description, - {String? containingPath}) { + {String? containingDir}) { // Old pub versions only wrote `description: <pkg>` into the lock file. if (description is String) { if (description != name) { throw FormatException('The description should be the same as the name'); } return PackageId( - name, this, version, _HostedDescription(name, defaultUrl)); + name, + version, + ResolvedHostedDescription(HostedDescription(name, defaultUrl)), + ); } - - final serializedDescription = (description as Map).cast<String, String>(); - + if (description is! Map) { + throw FormatException('The description should be a string or a map.'); + } + final url = description['url']; + if (url is! String) { + throw FormatException('The url should be a string.'); + } + final foundName = description['name']; + if (foundName is! String) { + throw FormatException('The name should be a string.'); + } + if (foundName != name) { + throw FormatException('The name should be $name'); + } return PackageId( name, - this, version, - _HostedDescription(serializedDescription['name']!, - Uri.parse(serializedDescription['url']!)), + ResolvedHostedDescription( + HostedDescription(name, Uri.parse(url).toString()), + ), ); } - _HostedDescription _asDescription(desc) => desc as _HostedDescription; + HostedDescription _asDescription(desc) => desc as HostedDescription; /// Parses the description for a package. /// /// If the package parses correctly, this returns a (name, url) pair. If not, /// this throws a descriptive FormatException. - _HostedDescription _parseDescription( + HostedDescription _parseDescription( String packageName, description, LanguageVersion languageVersion, ) { if (description == null) { // Simple dependency without a `hosted` block, use the default server. - return _HostedDescription(packageName, defaultUrl); + return HostedDescription(packageName, defaultUrl); } final canUseShorthandSyntax = languageVersion.supportsShorterHostedSyntax; @@ -245,13 +251,13 @@ // environment, we throw an error if something that looks like a URI is // used as a package name. if (canUseShorthandSyntax) { - return _HostedDescription( - packageName, validateAndNormalizeHostedUrl(description)); + return HostedDescription( + packageName, validateAndNormalizeHostedUrl(description).toString()); } else { if (_looksLikePackageName.hasMatch(description)) { // Valid use of `hosted: package` dependency with an old SDK // environment. - return _HostedDescription(description, defaultUrl); + return HostedDescription(description, defaultUrl); } else { throw FormatException( 'Using `hosted: <url>` is only supported with a minimum SDK ' @@ -279,75 +285,39 @@ if (u is! String) { throw FormatException("The 'url' key must be a string value."); } - url = validateAndNormalizeHostedUrl(u); + url = validateAndNormalizeHostedUrl(u).toString(); } - return _HostedDescription(name, url); + return HostedDescription(name, url); } static final RegExp _looksLikePackageName = RegExp(r'^[a-zA-Z_]+[a-zA-Z0-9_]*$'); -} -/// Information about a package version retrieved from /api/packages/$package -class _VersionInfo { - final Pubspec pubspec; - final Uri archiveUrl; - final PackageStatus status; - - _VersionInfo(this.pubspec, this.archiveUrl, this.status); -} - -/// The [PackageName.description] for a [HostedSource], storing the package name -/// and resolved URI of the package server. -class _HostedDescription { - final String packageName; - final Uri uri; - - _HostedDescription(this.packageName, this.uri) { - ArgumentError.checkNotNull(packageName, 'packageName'); - ArgumentError.checkNotNull(uri, 'uri'); - } - - @override - int get hashCode => Object.hash(packageName, uri); - - @override - bool operator ==(Object other) { - return other is _HostedDescription && - other.packageName == packageName && - other.uri == uri; - } -} - -/// The [BoundSource] for [HostedSource]. -class BoundHostedSource extends CachedSource { - @override - final HostedSource source; - - @override - final SystemCache systemCache; - late RateLimitedScheduler<PackageRef, Map<PackageId, _VersionInfo>?> - _scheduler; - - BoundHostedSource(this.source, this.systemCache) { - _scheduler = RateLimitedScheduler( - _fetchVersions, - maxConcurrentOperations: 10, - ); - } + late final RateLimitedScheduler<_RefAndCache, Map<PackageId, _VersionInfo>?> + _scheduler = RateLimitedScheduler( + _fetchVersions, + maxConcurrentOperations: 10, + ); Map<PackageId, _VersionInfo> _versionInfoFromPackageListing( - Map body, PackageRef ref, Uri location) { + Map body, PackageRef ref, Uri location, SystemCache cache) { + final description = ref.description; + if (description is! HostedDescription) { + throw ArgumentError('Wrong source'); + } final versions = body['versions']; if (versions is List) { return Map.fromEntries(versions.map((map) { final pubspecData = map['pubspec']; if (pubspecData is Map) { - var pubspec = Pubspec.fromMap(pubspecData, systemCache.sources, + var pubspec = Pubspec.fromMap(pubspecData, cache.sources, expectedName: ref.name, location: location); - var id = source.idFor(ref.name, pubspec.version, - url: _serverFor(ref.description)); + var id = idFor( + ref.name, + pubspec.version, + url: description.url, + ); var archiveUrl = map['archive_url']; if (archiveUrl is String) { final status = PackageStatus( @@ -366,9 +336,14 @@ } Future<Map<PackageId, _VersionInfo>?> _fetchVersionsNoPrefetching( - PackageRef ref) async { - final serverUrl = _hostedUrl(ref.description); - final url = _listVersionsUrl(ref.description); + PackageRef ref, SystemCache cache) async { + final description = ref.description; + + if (description is! HostedDescription) { + throw ArgumentError('Wrong source'); + } + final hostedUrl = description.url; + final url = _listVersionsUrl(ref); log.io('Get versions from $url.'); late final String bodyText; @@ -378,8 +353,8 @@ // TODO(sigurdm): Implement cancellation of requests. This probably // requires resolution of: https://github.com/dart-lang/sdk/issues/22265. bodyText = await withAuthenticatedClient( - systemCache, - serverUrl, + cache, + Uri.parse(hostedUrl), (client) => client.read(url, headers: pubApiHeaders), ); final decoded = jsonDecode(bodyText); @@ -387,57 +362,65 @@ throw FormatException('version listing must be a mapping'); } body = decoded; - result = _versionInfoFromPackageListing(body, ref, url); + result = _versionInfoFromPackageListing(body, ref, url, cache); } on Exception catch (error, stackTrace) { - final packageName = source._asDescription(ref.description).packageName; - _throwFriendlyError(error, stackTrace, packageName, serverUrl); + final packageName = _asDescription(ref.description).packageName; + _throwFriendlyError(error, stackTrace, packageName, hostedUrl); } // Cache the response on disk. // Don't cache overly big responses. if (bodyText.length < 100 * 1024) { - await _cacheVersionListingResponse(body, ref); + await _cacheVersionListingResponse(body, ref, cache); } return result; } - Future<Map<PackageId, _VersionInfo>?> _fetchVersions(PackageRef ref) async { + Future<Map<PackageId, _VersionInfo>?> _fetchVersions( + _RefAndCache refAndCache) async { + final ref = refAndCache.ref; + final description = ref.description; + if (description is! HostedDescription) { + throw ArgumentError('Wrong source'); + } final preschedule = - Zone.current[_prefetchingKey] as void Function(PackageRef)?; + Zone.current[_prefetchingKey] as void Function(_RefAndCache)?; /// Prefetch the dependencies of the latest version, we are likely to need /// them later. void prescheduleDependenciesOfLatest( - Map<PackageId, _VersionInfo>? listing) { + Map<PackageId, _VersionInfo>? listing, + SystemCache cache, + ) { if (listing == null) return; final latestVersion = maxBy(listing.keys.map((id) => id.version), (e) => e)!; - final latestVersionId = - PackageId(ref.name, source, latestVersion, ref.description); + final latestVersionId = PackageId( + ref.name, latestVersion, ResolvedHostedDescription(description)); final dependencies = listing[latestVersionId]?.pubspec.dependencies.values ?? []; unawaited(withDependencyType(DependencyType.none, () async { for (final packageRange in dependencies) { if (packageRange.source is HostedSource) { - preschedule!(packageRange.toRef()); + preschedule!(_RefAndCache(packageRange.toRef(), cache)); } } })); } + final cache = refAndCache.cache; if (preschedule != null) { /// If we have a cached response - preschedule dependencies of that. prescheduleDependenciesOfLatest( - await _cachedVersionListingResponse(ref), - ); + await _cachedVersionListingResponse(ref, cache), cache); } - final result = await _fetchVersionsNoPrefetching(ref); + final result = await _fetchVersionsNoPrefetching(ref, cache); if (preschedule != null) { // Preschedule the dependencies from the actual response. // This might overlap with those from the cached response. But the // scheduler ensures each listing will be fetched at most once. - prescheduleDependenciesOfLatest(result); + prescheduleDependenciesOfLatest(result, cache); } return result; } @@ -459,7 +442,7 @@ /// If [maxAge] is not given, we will try to get the cached version no matter /// how old it is. Future<Map<PackageId, _VersionInfo>?> _cachedVersionListingResponse( - PackageRef ref, + PackageRef ref, SystemCache cache, {Duration? maxAge}) async { if (_responseCache.containsKey(ref)) { final cacheAge = DateTime.now().difference(_responseCache[ref]!.first); @@ -468,7 +451,7 @@ return _responseCache[ref]!.last; } } - final cachePath = _versionListingCachePath(ref); + final cachePath = _versionListingCachePath(ref, cache); final stat = io.File(cachePath).statSync(); final now = DateTime.now(); if (stat.type == io.FileSystemEntityType.file) { @@ -487,6 +470,7 @@ cachedDoc, ref, Uri.file(cachePath), + cache, ); _responseCache[ref] = Pair(parsedTimestamp, res); return res; @@ -509,8 +493,11 @@ /// Saves the (decoded) response from package-listing of [ref]. Future<void> _cacheVersionListingResponse( - Map<String, dynamic> body, PackageRef ref) async { - final path = _versionListingCachePath(ref); + Map<String, dynamic> body, + PackageRef ref, + SystemCache cache, + ) async { + final path = _versionListingCachePath(ref, cache); try { ensureDir(p.dirname(path)); await writeTextFileAsync( @@ -532,18 +519,36 @@ } @override - Future<PackageStatus> status(PackageId id, {Duration? maxAge}) async { + Future<PackageStatus> status(PackageId id, SystemCache cache, + {Duration? maxAge}) async { + if (cache.isOffline) { + // Do we have a cached version response on disk? + final versionListing = + await _cachedVersionListingResponse(id.toRef(), cache); + + if (versionListing == null) { + return PackageStatus(); + } + // If we don't have the specific version we return the empty response. + // + // This should not happen. But in production we want to avoid a crash, since + // it is more or less harmless. + // + // TODO(sigurdm): Consider representing the non-existence of the + // package-version in the return value. + return versionListing[id]?.status ?? PackageStatus(); + } final ref = id.toRef(); // Did we already get info for this package? - var versionListing = _scheduler.peek(ref); + var versionListing = _scheduler.peek(_RefAndCache(ref, cache)); if (maxAge != null) { // Do we have a cached version response on disk? versionListing ??= - await _cachedVersionListingResponse(ref, maxAge: maxAge); + await _cachedVersionListingResponse(ref, cache, maxAge: maxAge); } // Otherwise retrieve the info from the host. versionListing ??= await _scheduler - .schedule(ref) + .schedule(_RefAndCache(ref, cache)) // Failures retrieving the listing here should just be ignored. .catchError( (_) => <PackageId, _VersionInfo>{}, @@ -563,12 +568,15 @@ } // The path where the response from the package-listing api is cached. - String _versionListingCachePath(PackageRef ref) { - final parsed = source._asDescription(ref.description); - final dir = _urlToDirectory(parsed.uri); + String _versionListingCachePath(PackageRef ref, SystemCache cache) { + final description = ref.description; + if (description is! HostedDescription) { + throw ArgumentError('Wrong source'); + } + final dir = _urlToDirectory(description.url); // Use a dot-dir because older versions of pub won't choke on that // name when iterating the cache (it is not listed by [listDir]). - return p.join(systemCacheRoot, dir, _versionListingDirectory, + return p.join(cache.rootDirForSource(this), dir, _versionListingDirectory, '${ref.name}-versions.json'); } @@ -578,52 +586,90 @@ /// site. @override Future<List<PackageId>> doGetVersions( - PackageRef ref, Duration? maxAge) async { - var versionListing = _scheduler.peek(ref); + PackageRef ref, + Duration? maxAge, + SystemCache cache, + ) async { + final description = ref.description; + if (description is! HostedDescription) { + throw ArgumentError('Wrong source'); + } + if (cache.isOffline) { + final url = description.url; + final root = cache.rootDirForSource(HostedSource.instance); + final dir = p.join(root, _urlToDirectory(url)); + log.io('Finding versions of ${ref.name} in $dir'); + List<PackageId> offlineVersions; + if (dirExists(dir)) { + offlineVersions = listDir(dir) + .where(_looksLikePackageDir) + .map((entry) => _idForBasename(p.basename(entry), url)) + .where((id) => id.name == ref.name && id.version != Version.none) + .toList(); + } else { + offlineVersions = []; + } + + // If there are no versions in the cache, report a clearer error. + if (offlineVersions.isEmpty) { + throw PackageNotFoundException( + 'could not find package ${ref.name} in cache', + hint: 'Try again without --offline!', + ); + } + + return offlineVersions; + } + var versionListing = _scheduler.peek(_RefAndCache(ref, cache)); if (maxAge != null) { // Do we have a cached version response on disk? versionListing ??= - await _cachedVersionListingResponse(ref, maxAge: maxAge); + await _cachedVersionListingResponse(ref, cache, maxAge: maxAge); } - versionListing ??= await _scheduler.schedule(ref); + versionListing ??= await _scheduler.schedule(_RefAndCache(ref, cache)); return versionListing!.keys.toList(); } /// Parses [description] into its server and package name components, then /// converts that to a Uri for listing versions of the given package. - Uri _listVersionsUrl(description) { - final parsed = source._asDescription(description); - final hostedUrl = parsed.uri; - final package = Uri.encodeComponent(parsed.packageName); - return hostedUrl.resolve('api/packages/$package'); - } - - /// Parses [description] into server name component. - Uri _hostedUrl(description) { - final parsed = source._asDescription(description); - return parsed.uri; + Uri _listVersionsUrl(PackageRef ref) { + final description = ref.description; + if (description is! HostedDescription) { + throw ArgumentError('Wrong source'); + } + final package = Uri.encodeComponent(ref.name); + return Uri.parse(description.url).resolve('api/packages/$package'); } /// Retrieves the pubspec for a specific version of a package that is /// available from the site. @override - Future<Pubspec> describeUncached(PackageId id) async { - final versions = await _scheduler.schedule(id.toRef()); - final url = _listVersionsUrl(id.description); + Future<Pubspec> describeUncached(PackageId id, SystemCache cache) async { + if (cache.isOffline) { + throw PackageNotFoundException( + '${id.name} ${id.version} is not available in cache', + hint: 'Try again without --offline!', + ); + } + final versions = await _scheduler.schedule(_RefAndCache(id.toRef(), cache)); + final url = _listVersionsUrl(id.toRef()); return versions![id]?.pubspec ?? (throw PackageNotFoundException('Could not find package $id at $url')); } /// Downloads the package identified by [id] to the system cache. @override - Future<Package> downloadToSystemCache(PackageId id) async { - if (!isInSystemCache(id)) { - var packageDir = getDirectoryInCache(id); + Future<Package> downloadToSystemCache(PackageId id, SystemCache cache) async { + if (!isInSystemCache(id, cache)) { + if (cache.isOffline) { + throw StateError('Cannot download packages when offline.'); + } + var packageDir = getDirectoryInCache(id, cache); ensureDir(p.dirname(packageDir)); - await _download(id, packageDir); + await _download(id, packageDir, cache); } - return Package.load(id.name, getDirectoryInCache(id), systemCache.sources); + return Package.load(id.name, getDirectoryInCache(id, cache), cache.sources); } /// The system cache directory for the hosted source contains subdirectories @@ -632,21 +678,27 @@ /// Each of these subdirectories then contains a subdirectory for each /// package downloaded from that site. @override - String getDirectoryInCache(PackageId id) { - var parsed = source._asDescription(id.description); - var dir = _urlToDirectory(parsed.uri); - return p.join(systemCacheRoot, dir, '${parsed.packageName}-${id.version}'); + String getDirectoryInCache(PackageId id, SystemCache cache) { + final description = id.description.description; + if (description is! HostedDescription) { + throw ArgumentError('Wrong source'); + } + final rootDir = cache.rootDirForSource(this); + + var dir = _urlToDirectory(description.url); + return p.join(rootDir, dir, '${id.name}-${id.version}'); } /// Re-downloads all packages that have been previously downloaded into the /// system cache from any server. @override - Future<Iterable<RepairResult>> repairCachedPackages() async { - if (!dirExists(systemCacheRoot)) return []; + Future<Iterable<RepairResult>> repairCachedPackages(SystemCache cache) async { + final rootDir = cache.rootDirForSource(this); + if (!dirExists(rootDir)) return []; - return (await Future.wait(listDir(systemCacheRoot).map((serverDir) async { + return (await Future.wait(listDir(rootDir).map((serverDir) async { final directory = p.basename(serverDir); - Uri url; + late final String url; try { url = _directoryToUrl(directory); } on FormatException { @@ -662,15 +714,18 @@ var packages = <Package>[]; for (var entry in listDir(serverDir)) { try { - packages.add(Package.load(null, entry, systemCache.sources)); + packages.add(Package.load(null, entry, cache.sources)); } catch (error, stackTrace) { log.error('Failed to load package', error, stackTrace); + final id = _idForBasename( + p.basename(entry), + url, + ); results.add( RepairResult( - _idForBasename( - p.basename(entry), - url: url, - ), + id.name, + id.version, + this, success: false, ), ); @@ -686,20 +741,20 @@ return results ..addAll(await Future.wait( packages.map((package) async { - var id = source.idFor(package.name, package.version, url: url); + var id = idFor(package.name, package.version, url: url); try { deleteEntry(package.dir); - await _download(id, package.dir); - return RepairResult(id, success: true); + await _download(id, package.dir, cache); + return RepairResult(id.name, id.version, this, success: true); } catch (error, stackTrace) { var message = 'Failed to repair ${log.bold(package.name)} ' '${package.version}'; - if (url != source.defaultUrl) message += ' from $url'; + if (url != defaultUrl) message += ' from $url'; log.error('$message. Error:\n$error'); log.fine(stackTrace); tryDeleteEntry(package.dir); - return RepairResult(id, success: false); + return RepairResult(id.name, id.version, this, success: false); } }), )); @@ -709,36 +764,49 @@ /// Returns the best-guess package ID for [basename], which should be a /// subdirectory in a hosted cache. - PackageId _idForBasename(String basename, {Uri? url}) { + PackageId _idForBasename(String basename, String url) { var components = split1(basename, '-'); var version = Version.none; if (components.length > 1) { try { version = Version.parse(components.last); - } catch (_) { + } on FormatException { // Default to Version.none. } } final name = components.first; - return source.idFor(name, version, url: url); + return PackageId( + name, + version, + ResolvedHostedDescription(HostedDescription(name, url)), + ); } - bool _looksLikePackageDir(String path) => - dirExists(path) && - _idForBasename(p.basename(path)).version != Version.none; + bool _looksLikePackageDir(String path) { + var components = split1(p.basename(path), '-'); + if (components.length < 2) return false; + try { + Version.parse(components.last); + } on FormatException { + return false; + } + return dirExists(path); + } /// Gets all of the packages that have been downloaded into the system cache /// from the default server. @override - List<Package> getCachedPackages() { - var cacheDir = p.join(systemCacheRoot, _urlToDirectory(source.defaultUrl)); + List<Package> getCachedPackages(SystemCache cache) { + final root = cache.rootDirForSource(HostedSource.instance); + var cacheDir = + p.join(root, _urlToDirectory(HostedSource.instance.defaultUrl)); if (!dirExists(cacheDir)) return []; return listDir(cacheDir) .where(_looksLikePackageDir) .map((entry) { try { - return Package.load(null, entry, systemCache.sources); + return Package.load(null, entry, cache.sources); } catch (error, stackTrace) { log.fine('Failed to load package from $entry:\n' '$error\n' @@ -756,7 +824,15 @@ /// If there is no archive_url, try to fetch it from /// `$server/packages/$package/versions/$version.tar.gz` where server comes /// from `id.description`. - Future _download(PackageId id, String destPath) async { + Future _download( + PackageId id, + String destPath, + SystemCache cache, + ) async { + final description = id.description.description; + if (description is! HostedDescription) { + throw ArgumentError('Wrong source'); + } // We never want to use a cached `archive_url`, so we never attempt to load // the version listing from cache. Besides in most cases we already have // downloaded a fresh copy of the version listing response in the in-memory @@ -766,7 +842,7 @@ // a custom package server may include a temporary signature in the // query-string as is the case with signed S3 URLs. And we wish to allow for // such URLs to be used. - final versions = await _scheduler.schedule(id.toRef()); + final versions = await _scheduler.schedule(_RefAndCache(id.toRef(), cache)); final versionInfo = versions![id]; final packageName = id.name; final version = id.version; @@ -774,8 +850,6 @@ throw PackageNotFoundException( 'Package $packageName has no version $version'); } - final parsedDescription = source._asDescription(id.description); - final server = parsedDescription.uri; var url = versionInfo.archiveUrl; log.io('Get package from $url.'); @@ -785,7 +859,9 @@ await withTempDir((tempDirForArchive) async { var archivePath = p.join(tempDirForArchive, '$packageName-$version.tar.gz'); - var response = await withAuthenticatedClient(systemCache, server, + var response = await withAuthenticatedClient( + cache, + Uri.parse(description.url), (client) => client.send(http.Request('GET', url))); // We download the archive to disk instead of streaming it directly into @@ -794,7 +870,7 @@ // cancelling a http stream makes it not reusable. // There are ways around this, and we might revisit this later. await createFileFromStream(response.stream, archivePath); - var tempDir = systemCache.createTempDir(); + var tempDir = cache.createTempDir(); await extractTarGz(readBinaryFileAsSream(archivePath), tempDir); // Now that the get has succeeded, move it to the real location in the @@ -803,17 +879,7 @@ // If this fails with a "directory not empty" exception we assume that // another pub process has installed the same package version while we // downloaded. - try { - renameDir(tempDir, destPath); - } on io.FileSystemException catch (e) { - tryDeleteEntry(tempDir); - if (!isDirectoryNotEmptyException(e)) { - rethrow; - } - log.fine(''' -Destination directory $destPath already existed. -Assuming a concurrent pub invocation installed it.'''); - } + tryRenameDir(tempDir, destPath); }); } @@ -825,7 +891,7 @@ Exception error, StackTrace stackTrace, String package, - Uri hostedUrl, + String hostedUrl, ) { if (error is PubHttpException) { if (error.response.statusCode == 404) { @@ -881,68 +947,6 @@ } } - /// Given a URL, returns a "normalized" string to be used as a directory name - /// for packages downloaded from the server at that URL. - /// - /// This normalization strips off the scheme (which is presumed to be HTTP or - /// HTTPS) and *sort of* URL-encodes it. I say "sort of" because it does it - /// incorrectly: it uses the character's *decimal* ASCII value instead of hex. - /// - /// This could cause an ambiguity since some characters get encoded as three - /// digits and others two. It's possible for one to be a prefix of the other. - /// In practice, the set of characters that are encoded don't happen to have - /// any collisions, so the encoding is reversible. - /// - /// This behavior is a bug, but is being preserved for compatibility. - String _urlToDirectory(Uri hostedUrl) { - var url = hostedUrl.toString(); - // Normalize all loopback URLs to "localhost". - url = url.replaceAllMapped( - RegExp(r'^(https?://)(127\.0\.0\.1|\[::1\]|localhost)?'), (match) { - // Don't include the scheme for HTTPS URLs. This makes the directory names - // nice for the default and most recommended scheme. We also don't include - // it for localhost URLs, since they're always known to be HTTP. - var localhost = match[2] == null ? '' : 'localhost'; - var scheme = - match[1] == 'https://' || localhost.isNotEmpty ? '' : match[1]; - return '$scheme$localhost'; - }); - return replace( - url, - RegExp(r'[<>:"\\/|?*%]'), - (match) => '%${match[0]!.codeUnitAt(0)}', - ); - } - - /// Given a directory name in the system cache, returns the URL of the server - /// whose packages it contains. - /// - /// See [_urlToDirectory] for details on the mapping. Note that because the - /// directory name does not preserve the scheme, this has to guess at it. It - /// chooses "http" for loopback URLs (mainly to support the pub tests) and - /// "https" for all others. - Uri _directoryToUrl(String directory) { - // Decode the pseudo-URL-encoded characters. - var chars = '<>:"\\/|?*%'; - for (var i = 0; i < chars.length; i++) { - var c = chars.substring(i, i + 1); - directory = directory.replaceAll('%${c.codeUnitAt(0)}', c); - } - - // If the URL has an explicit scheme, use that. - if (directory.contains('://')) { - return Uri.parse(directory); - } - - // Otherwise, default to http for localhost and https for everything else. - var scheme = - isLoopback(directory.replaceAll(RegExp(':.*'), '')) ? 'http' : 'https'; - return Uri.parse('$scheme://$directory'); - } - - /// Returns the server URL for [description]. - Uri _serverFor(description) => source._asDescription(description).uri; - /// Enables speculative prefetching of dependencies of packages queried with /// [getVersions]. Future<T> withPrefetching<T>(Future<T> Function() callback) async { @@ -956,80 +960,143 @@ static const _prefetchingKey = #_prefetch; } -/// This is the modified hosted source used when pub get or upgrade are run -/// with "--offline". +/// The [PackageName.description] for a [HostedSource], storing the package name +/// and resolved URI of the package server. +class HostedDescription extends Description { + final String packageName; + final String url; + + HostedDescription(this.packageName, this.url); + + @override + int get hashCode => Object.hash(packageName, url); + + @override + bool operator ==(Object other) { + return other is HostedDescription && + other.packageName == packageName && + other.url == url; + } + + @override + String format() => 'on $url'; + + @override + Object? serializeForPubspec({ + required String? containingDir, + required LanguageVersion languageVersion, + }) { + if (url == source.defaultUrl) { + return null; + } + return {'url': url, 'name': packageName}; + } + + @override + HostedSource get source => HostedSource.instance; +} + +class ResolvedHostedDescription extends ResolvedDescription { + @override + HostedDescription get description => super.description as HostedDescription; + + ResolvedHostedDescription(HostedDescription description) : super(description); + + @override + Object? serializeForLockfile({required String? containingDir}) { + late final String url; + try { + url = validateAndNormalizeHostedUrl(description.url).toString(); + } on FormatException catch (e) { + throw ArgumentError.value(url, 'url', 'url must be normalized: $e'); + } + return {'name': description.packageName, 'url': url.toString()}; + } + + @override + int get hashCode => description.hashCode; + + @override + bool operator ==(Object other) { + return other is ResolvedHostedDescription && + other.description == description; + } +} + +/// Information about a package version retrieved from /api/packages/$package< +class _VersionInfo { + final Pubspec pubspec; + final Uri archiveUrl; + final PackageStatus status; + + _VersionInfo(this.pubspec, this.archiveUrl, this.status); +} + +/// Given a URL, returns a "normalized" string to be used as a directory name +/// for packages downloaded from the server at that URL. /// -/// This uses the system cache to get the list of available packages and does -/// no network access. -class _OfflineHostedSource extends BoundHostedSource { - _OfflineHostedSource(HostedSource source, SystemCache systemCache) - : super(source, systemCache); +/// This normalization strips off the scheme (which is presumed to be HTTP or +/// HTTPS) and *sort of* URL-encodes it. I say "sort of" because it does it +/// incorrectly: it uses the character's *decimal* ASCII value instead of hex. +/// +/// This could cause an ambiguity since some characters get encoded as three +/// digits and others two. It's possible for one to be a prefix of the other. +/// In practice, the set of characters that are encoded don't happen to have +/// any collisions, so the encoding is reversible. +/// +/// This behavior is a bug, but is being preserved for compatibility. +String _urlToDirectory(String hostedUrl) { + // Normalize all loopback URLs to "localhost". + final url = hostedUrl.replaceAllMapped( + RegExp(r'^(https?://)(127\.0\.0\.1|\[::1\]|localhost)?'), (match) { + // Don't include the scheme for HTTPS URLs. This makes the directory names + // nice for the default and most recommended scheme. We also don't include + // it for localhost URLs, since they're always known to be HTTP. + var localhost = match[2] == null ? '' : 'localhost'; + var scheme = match[1] == 'https://' || localhost.isNotEmpty ? '' : match[1]; + return '$scheme$localhost'; + }); + return replace( + url, + RegExp(r'[<>:"\\/|?*%]'), + (match) => '%${match[0]!.codeUnitAt(0)}', + ); +} - /// Gets the list of all versions of [ref] that are in the system cache. - @override - Future<List<PackageId>> doGetVersions( - PackageRef ref, - Duration? maxAge, - ) async { - var parsed = source._asDescription(ref.description); - var server = parsed.uri; - log.io('Finding versions of ${ref.name} in ' - '$systemCacheRoot/${_urlToDirectory(server)}'); - - var dir = p.join(systemCacheRoot, _urlToDirectory(server)); - - List<PackageId> versions; - if (dirExists(dir)) { - versions = listDir(dir) - .where(_looksLikePackageDir) - .map((entry) => _idForBasename(p.basename(entry), url: server)) - .where((id) => id.name == ref.name && id.version != Version.none) - .toList(); - } else { - versions = []; - } - - // If there are no versions in the cache, report a clearer error. - if (versions.isEmpty) { - throw PackageNotFoundException( - 'could not find package ${ref.name} in cache', - hint: 'Try again without --offline!', - ); - } - - return versions; +/// Given a directory name in the system cache, returns the URL of the server +/// whose packages it contains. +/// +/// See [_urlToDirectory] for details on the mapping. Note that because the +/// directory name does not preserve the scheme, this has to guess at it. It +/// chooses "http" for loopback URLs (mainly to support the pub tests) and +/// "https" for all others. +String _directoryToUrl(String directory) { + // Decode the pseudo-URL-encoded characters. + var chars = '<>:"\\/|?*%'; + for (var i = 0; i < chars.length; i++) { + var c = chars.substring(i, i + 1); + directory = directory.replaceAll('%${c.codeUnitAt(0)}', c); } - @override - Future _download(PackageId id, String destPath) { - // Since HostedSource is cached, this will only be called for uncached - // packages. - throw UnsupportedError('Cannot download packages when offline.'); + // If the URL has an explicit scheme, use that. + if (directory.contains('://')) { + return Uri.parse(directory).toString(); } - @override - Future<Pubspec> describeUncached(PackageId id) { - throw PackageNotFoundException( - '${id.name} ${id.version} is not available in cache', - hint: 'Try again without --offline!', - ); - } + // Otherwise, default to http for localhost and https for everything else. + var scheme = + isLoopback(directory.replaceAll(RegExp(':.*'), '')) ? 'http' : 'https'; + return Uri.parse('$scheme://$directory').toString(); +} + +// TODO(sigurdm): This is quite inelegant. +class _RefAndCache { + final PackageRef ref; + final SystemCache cache; + _RefAndCache(this.ref, this.cache); @override - Future<PackageStatus> status(PackageId id, {Duration? maxAge}) async { - // Do we have a cached version response on disk? - final versionListing = await _cachedVersionListingResponse(id.toRef()); - - if (versionListing == null) { - return PackageStatus(); - } - // If we don't have the specific version we return the empty response. - // - // This should not happen. But in production we want to avoid a crash, since - // it is more or less harmless. - // - // TODO(sigurdm): Consider representing the non-existence of the - // package-version in the return value. - return versionListing[id]?.status ?? PackageStatus(); - } + int get hashCode => ref.hashCode; + @override + bool operator ==(Object other) => other is _RefAndCache && other.ref == ref; }
diff --git a/lib/src/source/path.dart b/lib/src/source/path.dart index 0401ebd..9e4deff 100644 --- a/lib/src/source/path.dart +++ b/lib/src/source/path.dart
@@ -18,45 +18,36 @@ /// A package [Source] that gets packages from a given local file path. class PathSource extends Source { + static PathSource instance = PathSource._(); + PathSource._(); + @override final name = 'path'; - @override - BoundSource bind(SystemCache systemCache) => - BoundPathSource(this, systemCache); - - /// Given a valid path reference description, returns the file path it - /// describes. - /// - /// This returned path may be relative or absolute and it is up to the caller - /// to know how to interpret a relative path. - String pathFromDescription(description) => description['path']; - - /// Returns a reference to a path package named [name] at [path]. - PackageRef refFor(String name, String path) { - return PackageRef( - name, this, {'path': path, 'relative': p.isRelative(path)}); - } - + // /// Returns a reference to a path package named [name] at [path]. + // PackageRef<PathDescription> refFor(String name, String path) { + // if (p.isRelative(path)) { + // PackageRef(name, {'path':p.absolute(path), 'relative': p.isRelative(path)}); + // } + // return PackageRef(name, {'path': path, 'relative': p.isRelative(path)}); + // } +//{name: myapp, dev_dependencies: {foo: 1.2.2}, dependency_overrides: {foo: {path: ../foo}}, environment: {sdk: >=0.1.2 <1.0.0}} +//{name: myapp, dev_dependencies: {foo: ^1.2.2}, dependency_overrides: {foo: {path: ../foo}}, environment: {sdk: >=0.1.2 <1.0.0}} /// Returns an ID for a path package with the given [name] and [version] at /// [path]. - PackageId idFor(String name, Version version, String path) { + /// + /// If [path] is relative it is resolved relative to [relativeTo] + PackageId idFor( + String name, Version version, String path, String relativeTo) { return PackageId( - name, this, version, {'path': path, 'relative': p.isRelative(path)}); + name, + version, + ResolvedPathDescription( + PathDescription(p.join(relativeTo, path), p.isRelative(path)), + ), + ); } - @override - bool descriptionsEqual(description1, description2) { - // Compare real paths after normalizing and resolving symlinks. - var path1 = canonicalize(description1['path']); - var path2 = canonicalize(description2['path']); - return path1 == path2; - } - - @override - int hashDescription(description) => - canonicalize(description['path']).hashCode; - /// Parses a path dependency. /// /// This takes in a path string and returns a map. The "path" key will be the @@ -66,13 +57,13 @@ PackageRef parseRef( String name, description, { - String? containingPath, + String? containingDir, LanguageVersion? languageVersion, }) { if (description is! String) { throw FormatException('The description must be a path string.'); } - + var dir = description; // Resolve the path relative to the containing file path, and remember // whether the original path was relative or absolute. var isRelative = p.isRelative(description); @@ -80,66 +71,54 @@ // Relative paths coming from pubspecs that are not on the local file // system aren't allowed. This can happen if a hosted or git dependency // has a path dependency. - if (containingPath == null) { + if (containingDir == null) { throw FormatException('"$description" is a relative path, but this ' 'isn\'t a local pubspec.'); } - description = p.normalize(p.join(p.dirname(containingPath), description)); + dir = p.normalize( + p.absolute(p.join(containingDir, description)), + ); } - - return PackageRef( - name, this, {'path': description, 'relative': isRelative}); + return PackageRef(name, PathDescription(dir, isRelative)); } @override PackageId parseId(String name, Version version, description, - {String? containingPath}) { + {String? containingDir}) { if (description is! Map) { throw FormatException('The description must be a map.'); } - - if (description['path'] is! String) { + var path = description['path']; + if (path is! String) { throw FormatException("The 'path' field of the description must " 'be a string.'); } - - if (description['relative'] is! bool) { + final relative = description['relative']; + if (relative is! bool) { throw FormatException("The 'relative' field of the description " 'must be a boolean.'); } // Resolve the path relative to the containing file path. - if (description['relative']) { + if (relative) { // Relative paths coming from lockfiles that are not on the local file // system aren't allowed. - if (containingPath == null) { + if (containingDir == null) { throw FormatException('"$description" is a relative path, but this ' 'isn\'t a local pubspec.'); } - description = Map.from(description); - description['path'] = - p.normalize(p.join(p.dirname(containingPath), description['path'])); + path = p.normalize( + p.absolute(p.join(containingDir, description['path'])), + ); } - return PackageId(name, this, version, description); - } - - /// Serializes path dependency's [description]. - /// - /// For the descriptions where `relative` attribute is `true`, tries to make - /// `path` relative to the specified [containingPath]. - @override - dynamic serializeDescription(String containingPath, description) { - if (description['relative']) { - return { - 'path': relativePathWithPosixSeparators( - p.relative(description['path'], from: containingPath)), - 'relative': true - }; - } - return description; + return PackageId( + name, + version, + ResolvedPathDescription(PathDescription(path, relative)), + ); } /// On both Windows and linux we prefer `/` in the pubspec.lock for relative @@ -149,49 +128,49 @@ return p.posix.joinAll(p.split(path)); } - /// Converts a parsed relative path to its original relative form. - @override - String formatDescription(description) { - var sourcePath = description['path']; - if (description['relative']) sourcePath = p.relative(description['path']); - return sourcePath; - } -} - -/// The [BoundSource] for [PathSource]. -class BoundPathSource extends BoundSource { - @override - final PathSource source; - - @override - final SystemCache systemCache; - - BoundPathSource(this.source, this.systemCache); - @override Future<List<PackageId>> doGetVersions( - PackageRef ref, Duration? maxAge) async { + PackageRef ref, Duration? maxAge, SystemCache cache) async { + final description = ref.description; + if (description is! PathDescription) { + throw ArgumentError('Wrong source'); + } // There's only one package ID for a given path. We just need to find the // version. - var pubspec = _loadPubspec(ref); - var id = PackageId(ref.name, source, pubspec.version, ref.description); - memoizePubspec(id, pubspec); + var pubspec = _loadPubspec(ref, cache); + var id = PackageId( + ref.name, pubspec.version, ResolvedPathDescription(description)); + // Store the pubspec in memory if we need to refer to it again. + cache.cachedPubspecs[id] = pubspec; return [id]; } @override - Future<Pubspec> doDescribe(PackageId id) async => _loadPubspec(id.toRef()); + Future<Pubspec> doDescribe(PackageId id, SystemCache cache) async => + _loadPubspec(id.toRef(), cache); - Pubspec _loadPubspec(PackageRef ref) { - var dir = _validatePath(ref.name, ref.description); - return Pubspec.load(dir, systemCache.sources, expectedName: ref.name); + Pubspec _loadPubspec(PackageRef ref, SystemCache cache) { + final description = ref.description; + if (description is! PathDescription) { + throw ArgumentError('Wrong source'); + } + var dir = _validatePath(ref.name, description); + return Pubspec.load(dir, cache.sources, expectedName: ref.name); } @override - String getDirectory(PackageId id, {String? relativeFrom}) { - return id.description['relative'] - ? p.relative(id.description['path'], from: relativeFrom) - : id.description['path']!; + String doGetDirectory( + PackageId id, + SystemCache cache, { + String? relativeFrom, + }) { + final description = id.description.description; + if (description is! PathDescription) { + throw ArgumentError('Wrong source'); + } + return description.relative + ? p.relative(description.path, from: relativeFrom) + : description.path; } /// Ensures that [description] is a valid path description and returns a @@ -200,8 +179,8 @@ /// It must be a map, with a "path" key containing a path that points to an /// existing directory. Throws an [ApplicationException] if the path is /// invalid. - String _validatePath(String name, description) { - var dir = description['path']; + String _validatePath(String name, PathDescription description) { + final dir = description.path; if (dirExists(dir)) return dir; @@ -209,8 +188,68 @@ fail('Path dependency for package $name must refer to a directory, ' 'not a file. Was "$dir".'); } - - throw PackageNotFoundException('could not find package $name at "$dir"', - innerError: FileException('$dir does not exist.', dir)); + throw PackageNotFoundException( + 'could not find package $name at "${description.format()}"', + innerError: FileException('$dir does not exist.', dir), + ); } } + +class PathDescription extends Description { + final String path; + final bool relative; + + PathDescription(this.path, this.relative) : assert(!p.isRelative(path)); + @override + String format() { + return relative ? p.relative(path) : path; + } + + @override + Object? serializeForPubspec({ + required String? containingDir, + required LanguageVersion languageVersion, + }) { + return relative + ? PathSource.relativePathWithPosixSeparators( + p.relative(path, from: containingDir)) + : path; + } + + @override + Source get source => PathSource.instance; + + @override + bool operator ==(Object other) { + return other is PathDescription && + canonicalize(path) == canonicalize(other.path); + } + + @override + int get hashCode => canonicalize(path).hashCode; +} + +class ResolvedPathDescription extends ResolvedDescription { + @override + PathDescription get description => super.description as PathDescription; + + ResolvedPathDescription(PathDescription description) : super(description); + + @override + Object? serializeForLockfile({required String? containingDir}) { + if (description.relative) { + return { + 'path': p.relative(description.path, from: containingDir), + 'relative': true + }; + } + return {'path': description.path, 'relative': p.relative('false')}; + } + + @override + bool operator ==(Object other) => + other is ResolvedPathDescription && other.description == description; + + @override + int get hashCode => description.hashCode; +}
diff --git a/lib/src/source/root.dart b/lib/src/source/root.dart new file mode 100644 index 0000000..2c02e69 --- /dev/null +++ b/lib/src/source/root.dart
@@ -0,0 +1,106 @@ +// Copyright (c) 2022, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'package:pub_semver/pub_semver.dart'; + +import '../language_version.dart'; +import '../package.dart'; +import '../package_name.dart'; +import '../pubspec.dart'; +import '../source.dart'; +import '../system_cache.dart'; + +class RootSource extends Source { + static final RootSource instance = RootSource._(); + + RootSource._(); + + @override + String get name => 'root'; + + @override + Future<Pubspec> doDescribe( + PackageId id, + SystemCache cache, + ) async { + final description = id.description.description; + if (description is! RootDescription) { + throw ArgumentError('Wrong source'); + } + return description.package.pubspec; + } + + @override + Future<List<PackageId>> doGetVersions( + PackageRef ref, Duration? maxAge, SystemCache cache) async { + final description = ref.description; + if (description is! RootDescription) { + throw ArgumentError('Wrong source'); + } + return [PackageId.root(description.package)]; + } + + @override + String doGetDirectory(PackageId id, SystemCache cache, + {String? relativeFrom}) { + // TODO(sigurdm): Should we support this. + throw UnsupportedError('Cannot get the directory of the root package'); + } + + @override + PackageId parseId(String name, Version version, description, + {String? containingDir}) { + throw UnsupportedError('Trying to parse a root package description.'); + } + + @override + PackageRef parseRef(String name, description, + {String? containingDir, required LanguageVersion languageVersion}) { + throw UnsupportedError('Trying to parse a root package description.'); + } +} + +class ResolvedRootDescription extends ResolvedDescription { + ResolvedRootDescription(RootDescription description) : super(description); + + @override + Object? serializeForLockfile({required String? containingDir}) { + throw UnsupportedError('Trying to serialize a root package description.'); + } + + @override + bool operator ==(Object other) => + other is ResolvedRootDescription && other.description == description; + + @override + int get hashCode => description.hashCode; +} + +class RootDescription extends Description { + final Package package; + + RootDescription(this.package); + @override + String format() { + throw UnsupportedError('Trying to format a root package description.'); + } + + @override + Object? serializeForPubspec({ + required String? containingDir, + required LanguageVersion languageVersion, + }) { + throw UnsupportedError('Trying to serialize the root package description.'); + } + + @override + Source get source => RootSource.instance; + + @override + bool operator ==(Object other) => + other is RootDescription && other.package == package; + + @override + int get hashCode => 'root'.hashCode; +}
diff --git a/lib/src/source/sdk.dart b/lib/src/source/sdk.dart index 1458d1b..f06e833 100644 --- a/lib/src/source/sdk.dart +++ b/lib/src/source/sdk.dart
@@ -17,89 +17,84 @@ /// A package [Source] that gets packages from a hard-coded SDK. class SdkSource extends Source { + static final SdkSource instance = SdkSource._(); + + SdkSource._(); + @override final name = 'sdk'; - @override - BoundSource bind(SystemCache systemCache) => - BoundSdkSource(this, systemCache); - - /// Returns a reference to an SDK package named [name] from [sdk]. - PackageRef refFor(String name, String sdk) => PackageRef(name, this, sdk); - - /// Returns an ID for an SDK package with the given [name] and [version] from - /// [sdk]. - PackageId idFor(String name, Version version, String sdk) => - PackageId(name, this, version, sdk); - /// Parses an SDK dependency. @override PackageRef parseRef(String name, description, - {String? containingPath, LanguageVersion? languageVersion}) { + {String? containingDir, LanguageVersion? languageVersion}) { if (description is! String) { throw FormatException('The description must be an SDK name.'); } - return PackageRef(name, this, description); + return PackageRef(name, SdkDescription(description)); } @override PackageId parseId(String name, Version version, description, - {String? containingPath}) { + {String? containingDir}) { if (description is! String) { throw FormatException('The description must be an SDK name.'); } - return PackageId(name, this, version, description); + return PackageId( + name, + version, + ResolvedSdkDescription(SdkDescription(description)), + ); } @override - bool descriptionsEqual(description1, description2) => - description1 == description2; - - @override - int hashDescription(description) => description.hashCode; -} - -/// The [BoundSource] for [SdkSource]. -class BoundSdkSource extends BoundSource { - @override - final SdkSource source; - - @override - final SystemCache systemCache; - - BoundSdkSource(this.source, this.systemCache); - - @override Future<List<PackageId>> doGetVersions( - PackageRef ref, Duration? maxAge) async { - var pubspec = _loadPubspec(ref); - var id = PackageId(ref.name, source, pubspec.version, ref.description); - memoizePubspec(id, pubspec); + PackageRef ref, Duration? maxAge, SystemCache cache) async { + final description = ref.description; + if (description is! SdkDescription) { + throw ArgumentError('Wrong source'); + } + var pubspec = _loadPubspec(ref, cache); + var id = PackageId( + ref.name, + pubspec.version, + ResolvedSdkDescription(description), + ); + // Store the pubspec in memory if we need to refer to it again. + cache.cachedPubspecs[id] = pubspec; return [id]; } @override - Future<Pubspec> doDescribe(PackageId id) async => _loadPubspec(id); + Future<Pubspec> doDescribe( + PackageId id, + SystemCache cache, + ) async => + _loadPubspec(id.toRef(), cache); - /// Loads the pubspec for the Flutter package named [name]. + /// Loads the pubspec for the SDK package named [ref]. /// - /// Throws a [PackageNotFoundException] if [package]'s SDK is unavailable or + /// Throws a [PackageNotFoundException] if [ref]'s SDK is unavailable or /// doesn't contain the package. - Pubspec _loadPubspec(PackageName package) => - Pubspec.load(_verifiedPackagePath(package), systemCache.sources, - expectedName: package.name); + Pubspec _loadPubspec(PackageRef ref, SystemCache cache) => + Pubspec.load(_verifiedPackagePath(ref), cache.sources, + expectedName: ref.name); - /// Returns the path for the given [package]. + /// Returns the path for the given [ref]. /// - /// Throws a [PackageNotFoundException] if [package]'s SDK is unavailable or + /// Throws a [PackageNotFoundException] if [ref]'s SDK is unavailable or /// doesn't contain the package. - String _verifiedPackagePath(PackageName package) { - var identifier = package.description as String?; - var sdk = sdks[identifier!]; + String _verifiedPackagePath(PackageRef ref) { + final description = ref.description; + if (description is! SdkDescription) { + throw ArgumentError('Wrong source'); + } + var sdkName = description.sdk; + var sdk = sdks[sdkName]; if (sdk == null) { - throw PackageNotFoundException('unknown SDK "$identifier"'); + throw PackageNotFoundException('unknown SDK "$sdkName"'); } else if (!sdk.isAvailable) { throw PackageNotFoundException( 'the ${sdk.name} SDK is not available', @@ -107,17 +102,18 @@ ); } - var path = sdk.packagePath(package.name); + var path = sdk.packagePath(ref.name); if (path != null) return path; throw PackageNotFoundException( - 'could not find package ${package.name} in the ${sdk.name} SDK'); + 'could not find package ${ref.name} in the ${sdk.name} SDK'); } @override - String getDirectory(PackageId id, {String? relativeFrom}) { + String doGetDirectory(PackageId id, SystemCache cache, + {String? relativeFrom}) { try { - return _verifiedPackagePath(id); + return _verifiedPackagePath(id.toRef()); } on PackageNotFoundException catch (error) { // [PackageNotFoundException]s are uncapitalized and unpunctuated because // they're used within other sentences by the version solver, but @@ -126,3 +122,35 @@ } } } + +class SdkDescription extends Description { + /// The sdk the described package comes from. + final String sdk; + + SdkDescription(this.sdk); + @override + String format() => sdk; + + @override + Object? serializeForPubspec({ + required String? containingDir, + required LanguageVersion languageVersion, + }) { + return sdk; + } + + @override + Source get source => SdkSource.instance; +} + +class ResolvedSdkDescription extends ResolvedDescription { + @override + SdkDescription get description => super.description as SdkDescription; + + ResolvedSdkDescription(SdkDescription description) : super(description); + + @override + Object? serializeForLockfile({required String? containingDir}) { + return description.sdk; + } +}
diff --git a/lib/src/source/unknown.dart b/lib/src/source/unknown.dart index baa46ea..55e5018 100644 --- a/lib/src/source/unknown.dart +++ b/lib/src/source/unknown.dart
@@ -3,6 +3,7 @@ // BSD-style license that can be found in the LICENSE file. import 'dart:async'; +import 'dart:convert'; import 'package:pub_semver/pub_semver.dart'; @@ -24,10 +25,6 @@ UnknownSource(this.name); - @override - BoundSource bind(SystemCache systemCache) => - _BoundUnknownSource(this, systemCache); - /// Two unknown sources are the same if their names are the same. @override bool operator ==(other) => other is UnknownSource && other.name == name; @@ -36,48 +33,86 @@ int get hashCode => name.hashCode; @override - bool descriptionsEqual(description1, description2) => - description1 == description2; - - @override - int hashDescription(description) => description.hashCode; - - @override PackageRef parseRef( String name, - description, { - String? containingPath, + Object? description, { + String? containingDir, LanguageVersion? languageVersion, }) => - PackageRef(name, this, description); + PackageRef(name, UnknownDescription(description, this)); @override - PackageId parseId(String name, Version version, description, - {String? containingPath}) => - PackageId(name, this, version, description); -} - -class _BoundUnknownSource extends BoundSource { - @override - final UnknownSource source; + PackageId parseId(String name, Version version, Object? description, + {String? containingDir}) => + PackageId(name, version, + ResolvedUnknownDescription(UnknownDescription(description, this))); @override - final SystemCache systemCache; - - _BoundUnknownSource(this.source, this.systemCache); - - @override - Future<List<PackageId>> doGetVersions(PackageRef ref, Duration? maxAge) => + Future<List<PackageId>> doGetVersions( + PackageRef ref, Duration? maxAge, SystemCache cache) => throw UnsupportedError( - "Cannot get package versions from unknown source '${source.name}'."); + "Cannot get package versions from unknown source '$name'."); @override - Future<Pubspec> doDescribe(PackageId id) => throw UnsupportedError( - "Cannot describe a package from unknown source '${source.name}'."); + Future<Pubspec> doDescribe(PackageId id, SystemCache cache) => + throw UnsupportedError( + "Cannot describe a package from unknown source '$name'."); /// Returns the directory where this package can be found locally. @override - String getDirectory(PackageId id, {String? relativeFrom}) => + String doGetDirectory( + PackageId id, + SystemCache cache, { + String? relativeFrom, + }) => throw UnsupportedError( - "Cannot find a package from an unknown source '${source.name}'."); + "Cannot find a package from an unknown source '$name'."); +} + +class UnknownDescription extends Description { + final Object? description; + @override + final UnknownSource source; + UnknownDescription(this.description, this.source); + + @override + String format() { + return json.encode(description); + } + + @override + Object? serializeForPubspec({ + required String? containingDir, + required LanguageVersion languageVersion, + }) { + throw UnsupportedError( + "Cannot serialize a package description from an unknown source '${source.name}'."); + } + + @override + operator ==(Object other) => + other is UnknownDescription && + source.name == other.source.name && + json.encode(description) == json.encode(other.description); + + @override + int get hashCode => Object.hash(source.name, json.encode(description)); +} + +class ResolvedUnknownDescription extends ResolvedDescription { + ResolvedUnknownDescription(UnknownDescription description) + : super(description); + + @override + Object? serializeForLockfile({required String? containingDir}) { + throw UnsupportedError( + "Cannot serialize a package description from an unknown source '${description.source.name}'."); + } + + @override + operator ==(Object other) => + other is ResolvedUnknownDescription && description == other.description; + + @override + int get hashCode => description.hashCode; }
diff --git a/lib/src/source_registry.dart b/lib/src/source_registry.dart deleted file mode 100644 index 7e8aa84..0000000 --- a/lib/src/source_registry.dart +++ /dev/null
@@ -1,86 +0,0 @@ -// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file -// for details. All rights reserved. Use of this source code is governed by a -// BSD-style license that can be found in the LICENSE file. - -import 'source.dart'; -import 'source/git.dart'; -import 'source/hosted.dart'; -import 'source/path.dart'; -import 'source/sdk.dart'; -import 'source/unknown.dart'; - -/// A class that keeps track of [Source]s used for getting packages. -class SourceRegistry { - /// The registered sources. - /// - /// This is initialized with the three built-in sources. - final _sources = { - 'git': GitSource(), - 'hosted': HostedSource(), - 'path': PathSource(), - 'sdk': SdkSource() - }; - - /// The default source, which is used when no source is specified. - /// - /// This defaults to [hosted]. - Source get defaultSource => _default; - late Source _default; - - /// The registered sources, in name order. - List<Source> get all { - var sources = _sources.values.toList(); - sources.sort((a, b) => a.name.compareTo(b.name)); - return sources; - } - - /// The built-in [GitSource]. - GitSource get git => _sources['git'] as GitSource; - - /// The built-in [HostedSource]. - HostedSource get hosted => _sources['hosted'] as HostedSource; - - /// The built-in [PathSource]. - PathSource get path => _sources['path'] as PathSource; - - /// The built-in [SdkSource]. - SdkSource get sdk => _sources['sdk'] as SdkSource; - - SourceRegistry() { - _default = hosted; - } - - /// Sets the default source. - /// - /// This takes a string, which must be the name of a registered source. - void setDefault(String name) { - if (!_sources.containsKey(name)) { - throw StateError('Default source $name is not in the registry'); - } - - _default = _sources[name]!; - } - - /// Registers a new source. - /// - /// This source may not have the same name as a source that's already been - /// registered. - void register(Source source) { - if (_sources.containsKey(source.name)) { - throw StateError('Source registry already has a source named ' - '${source.name}'); - } - - _sources[source.name] = source; - } - - /// Returns the source named [name]. - /// - /// Returns an [UnknownSource] if no source with that name has been - /// registered. If [name] is null, returns the default source. - Source? operator [](String? name) { - if (name == null) return _default; - if (_sources.containsKey(name)) return _sources[name]; - return UnknownSource(name); - } -}
diff --git a/lib/src/system_cache.dart b/lib/src/system_cache.dart index dc6eb4a..ebbaa97 100644 --- a/lib/src/system_cache.dart +++ b/lib/src/system_cache.dart
@@ -4,14 +4,18 @@ import 'dart:io'; +import 'package:collection/collection.dart'; import 'package:path/path.dart' as p; +import 'package:pub_semver/pub_semver.dart'; import 'authentication/token_store.dart'; +import 'exceptions.dart'; import 'io.dart'; import 'io.dart' as io show createTempDir; import 'log.dart' as log; import 'package.dart'; import 'package_name.dart'; +import 'pubspec.dart'; import 'source.dart'; import 'source/cached.dart'; import 'source/git.dart'; @@ -19,7 +23,6 @@ import 'source/path.dart'; import 'source/sdk.dart'; import 'source/unknown.dart'; -import 'source_registry.dart'; /// The system-wide cache of downloaded packages. /// @@ -30,6 +33,8 @@ /// The root directory where this package cache is located. final String rootDir; + String rootDirForSource(CachedSource source) => p.join(rootDir, source.name); + String get tempDir => p.join(rootDir, '_temp'); static String defaultDir = (() { @@ -54,69 +59,57 @@ } })(); - /// The registry for sources used by this system cache. - /// - /// New sources registered here will be available through the [source] - /// function. - final sources = SourceRegistry(); + /// The available sources. + late final _sources = + Map.fromIterable([hosted, git, path, sdk], key: (source) => source.name); - /// The sources bound to this cache. - final _boundSources = <Source?, BoundSource>{}; + Source sources(String? name) { + return name == null + ? defaultSource + : (_sources[name] ?? UnknownSource(name)); + } - /// The built-in Git source bound to this cache. - BoundGitSource get git => _boundSources[sources.git] as BoundGitSource; + Source get defaultSource => hosted; - /// The built-in hosted source bound to this cache. - BoundHostedSource get hosted => - _boundSources[sources.hosted] as BoundHostedSource; + /// The built-in Git source. + GitSource get git => GitSource.instance; + + /// The built-in hosted source. + HostedSource get hosted => HostedSource.instance; /// The built-in path source bound to this cache. - BoundPathSource get path => _boundSources[sources.path] as BoundPathSource; + PathSource get path => PathSource.instance; /// The built-in SDK source bound to this cache. - BoundSdkSource get sdk => _boundSources[sources.sdk] as BoundSdkSource; - - /// The default source bound to this cache. - BoundSource get defaultSource => source(sources[null]); + SdkSource get sdk => SdkSource.instance; /// The default credential store. final TokenStore tokenStore; + /// If true, cached sources will attempt to use the cached packages for + /// resolution. + final bool isOffline; + /// Creates a system cache and registers all sources in [sources]. /// /// If [isOffline] is `true`, then the offline hosted source will be used. /// Defaults to `false`. - SystemCache({String? rootDir, bool isOffline = false}) + SystemCache({String? rootDir, this.isOffline = false}) : rootDir = rootDir ?? SystemCache.defaultDir, - tokenStore = TokenStore(dartConfigDir) { - for (var source in sources.all) { - if (source is HostedSource) { - _boundSources[source] = source.bind(this, isOffline: isOffline); - } else { - _boundSources[source] = source.bind(this); - } - } - } - - /// Returns the version of [source] bound to this cache. - BoundSource source(Source? source) => - _boundSources.putIfAbsent(source, () => source!.bind(this)); + tokenStore = TokenStore(dartConfigDir); /// Loads the package identified by [id]. /// /// Throws an [ArgumentError] if [id] has an invalid source. Package load(PackageId id) { - if (id.source is UnknownSource) { - throw ArgumentError('Unknown source ${id.source}.'); - } - - return Package.load(id.name, source(id.source).getDirectory(id), sources); + return Package.load(id.name, getDirectory(id), sources); } Package loadCached(PackageId id) { - final bound = source(id.source); - if (bound is CachedSource) { - return Package.load(id.name, bound.getDirectoryInCache(id), sources); + final source = id.description.description.source; + if (source is CachedSource) { + return Package.load( + id.name, source.getDirectoryInCache(id, this), sources); } else { throw ArgumentError('Call only on Cached ids.'); } @@ -124,9 +117,11 @@ /// Determines if the system cache contains the package identified by [id]. bool contains(PackageId id) { - var source = this.source(id.source); + final source = id.source; - if (source is CachedSource) return source.isInSystemCache(id); + if (source is CachedSource) { + return source.isInSystemCache(id, this); + } throw ArgumentError('Package $id is not cacheable.'); } @@ -146,4 +141,116 @@ log.fine('Clean up system cache temp directory $tempDir.'); if (dirExists(tempDir)) deleteEntry(tempDir); } + + /// An in-memory cache of pubspecs described by [describe]. + final cachedPubspecs = <PackageId, Pubspec>{}; + + /// Loads the (possibly remote) pubspec for the package version identified by + /// [id]. + /// + /// This may be called for packages that have not yet been downloaded during + /// the version resolution process. Its results are automatically memoized. + /// + /// Throws a [DataException] if the pubspec's version doesn't match [id]'s + /// version. + Future<Pubspec> describe(PackageId id) async { + var pubspec = cachedPubspecs[id] ??= await id.source.doDescribe(id, this); + if (pubspec.version != id.version) { + throw PackageNotFoundException( + 'the pubspec for $id has version ${pubspec.version}', + ); + } + return pubspec; + } + + /// Get the IDs of all versions that match [ref]. + /// + /// Note that this does *not* require the packages to be downloaded locally, + /// which is the point. This is used during version resolution to determine + /// which package versions are available to be downloaded (or already + /// downloaded). + /// + /// By default, this assumes that each description has a single version and + /// uses [describe] to get that version. + /// + /// If [maxAge] is given answers can be taken from cache - up to that age old. + /// + /// If given, the [allowedRetractedVersion] is the only version which can be + /// selected even if it is marked as retracted. Otherwise, all the returned + /// IDs correspond to non-retracted versions. + Future<List<PackageId>> getVersions(PackageRef ref, + {Duration? maxAge, Version? allowedRetractedVersion}) async { + if (ref.isRoot) { + throw ArgumentError('Cannot get versions for the root package.'); + } + var versions = await ref.source.doGetVersions(ref, maxAge, this); + + versions = (await Future.wait(versions.map((id) async { + final packageStatus = await ref.source.status(id, this, maxAge: maxAge); + if (!packageStatus.isRetracted || id.version == allowedRetractedVersion) { + return id; + } + return null; + }))) + .whereNotNull() + .toList(); + + return versions; + } + + /// Returns the directory where this package can (or could) be found locally. + /// + /// If the source is cached, this will be a path in the system cache. + /// + /// If id is a relative path id, the directory will be relative from + /// [relativeFrom]. Returns an absolute path if [relativeFrom] is not passed. + String getDirectory(PackageId id, {String? relativeFrom}) { + return id.source.doGetDirectory(id, this, relativeFrom: relativeFrom); + } + + Future<void> downloadPackage(PackageId id) async { + final source = id.source; + assert(source is CachedSource); + await (source as CachedSource).downloadToSystemCache(id, this); + } + + /// Get the latest version of [package]. + /// + /// Will consider _prereleases_ if: + /// * [allowPrereleases] is true, or, + /// * If [version] is non-null and is a prerelease version and there are no + /// later stable version we return a prerelease version if it exists. + /// + /// Returns `null`, if unable to find the package or if [package] is `null`. + Future<PackageId?> getLatest( + PackageRef? package, { + Version? version, + bool allowPrereleases = false, + }) async { + if (package == null) { + return null; + } + // TODO: Pass some maxAge to getVersions + final available = await getVersions(package); + if (available.isEmpty) { + return null; + } + + available.sort(allowPrereleases + ? (x, y) => x.version.compareTo(y.version) + : (x, y) => Version.prioritize(x.version, y.version)); + var latest = available.last; + + if (version != null && version.isPreRelease && version > latest.version) { + available.sort((x, y) => x.version.compareTo(y.version)); + latest = available.last; + } + + // There should be exactly one entry in [available] matching [latest] + assert(available.where((id) => id.version == latest.version).length == 1); + + return latest; + } } + +typedef SourceRegistry = Source Function(String? name);
diff --git a/lib/src/third_party/tar/README.md b/lib/src/third_party/tar/README.md index 11a8f37..a2a220f 100644 --- a/lib/src/third_party/tar/README.md +++ b/lib/src/third_party/tar/README.md
@@ -1,7 +1,7 @@ # package:tar -Vendored elements from `package:tar` for use in creation and extration of +Vendored elements from `package:tar` for use in creation and extraction of tar-archives. * Repository: `https://github.com/simolus3/tar/` - * Revision: `b5c5a11d8969f458ccdeb8cf01615f692fed3e97` + * Revision: `7cdb563c9894600c6a739ec268f8673d6122006f`
diff --git a/lib/src/third_party/tar/src/constants.dart b/lib/src/third_party/tar/src/constants.dart index aac7669..05accb0 100644 --- a/lib/src/third_party/tar/src/constants.dart +++ b/lib/src/third_party/tar/src/constants.dart
@@ -216,13 +216,6 @@ /// Sticky bit const c_ISVTX = 512; -/// ********************** -/// Convenience constants -/// ********************** -/// 64-bit integer max and min values -const int64MaxValue = 9223372036854775807; -const int64MinValue = -9223372036854775808; - /// Constants to determine file modes. const modeType = 2401763328; const modeSymLink = 134217728;
diff --git a/lib/src/third_party/tar/src/entry.dart b/lib/src/third_party/tar/src/entry.dart index f6b0a5a..160974b 100644 --- a/lib/src/third_party/tar/src/entry.dart +++ b/lib/src/third_party/tar/src/entry.dart
@@ -52,8 +52,17 @@ TarEntry._(this.header, this.contents); /// Creates an in-memory tar entry from the [header] and the [data] to store. - factory TarEntry.data(TarHeader header, List<int> data) { + static SynchronousTarEntry data(TarHeader header, List<int> data) { (header as HeaderImpl).size = data.length; - return TarEntry(header, Stream.value(data)); + return SynchronousTarEntry._(header, data); } } + +/// A tar entry stored in memory. +class SynchronousTarEntry extends TarEntry { + /// The contents of this tar entry as a byte array. + final List<int> data; + + SynchronousTarEntry._(TarHeader header, this.data) + : super._(header, Stream.value(data)); +}
diff --git a/lib/src/third_party/tar/src/reader.dart b/lib/src/third_party/tar/src/reader.dart index 713235a..b9bc3d3 100644 --- a/lib/src/third_party/tar/src/reader.dart +++ b/lib/src/third_party/tar/src/reader.dart
@@ -22,14 +22,10 @@ /// to read each archive where possible. @sealed class TarReader implements StreamIterator<TarEntry> { - /// A chunked stream iterator to enable us to get our data. - final ChunkedStreamReader<int> _chunkedStream; + final BlockReader _reader; final PaxHeaders _paxHeaders = PaxHeaders(); final int _maxSpecialFileSize; - /// Skip the next [_skipNext] elements when reading in the stream. - int _skipNext = 0; - TarEntry? _current; /// The underlying content stream for the [_current] entry. Draining this @@ -88,7 +84,7 @@ TarReader(Stream<List<int>> tarStream, {int maxSpecialFileSize = defaultSpecialLength, bool disallowTrailingData = false}) - : _chunkedStream = ChunkedStreamReader(tarStream), + : _reader = BlockReader(tarStream), _checkNoTrailingData = disallowTrailingData, _maxSpecialFileSize = maxSpecialFileSize; @@ -152,13 +148,7 @@ // iterates through one or more "header files" until it finds a // "normal file". while (true) { - if (_skipNext > 0) { - await _readFullBlock(_skipNext); - _skipNext = 0; - } - - final rawHeader = - await _readFullBlock(blockSize, allowEmpty: eofAcceptable); + final rawHeader = await _readFullBlock(allowEmpty: eofAcceptable); nextHeader = await _readHeader(rawHeader); if (nextHeader == null) { @@ -180,19 +170,21 @@ nextHeader.typeFlag == TypeFlag.xGlobalHeader) { format = format.mayOnlyBe(TarFormat.pax); final paxHeaderSize = _checkSpecialSize(nextHeader.size); - final rawPaxHeaders = await _readFullBlock(paxHeaderSize); + + final rawPaxHeaders = + (await _readFullBlock(amount: numBlocks(paxHeaderSize))) + .sublistView(0, paxHeaderSize); _paxHeaders.readPaxHeaders( rawPaxHeaders, nextHeader.typeFlag == TypeFlag.xGlobalHeader); - _markPaddingToSkip(paxHeaderSize); // This is a meta header affecting the next header. continue; } else if (nextHeader.typeFlag == TypeFlag.gnuLongLink || nextHeader.typeFlag == TypeFlag.gnuLongName) { format = format.mayOnlyBe(TarFormat.gnu); - final realName = await _readFullBlock( - _checkSpecialSize(nextBlockSize(nextHeader.size))); + final size = _checkSpecialSize(nextHeader.size); + final realName = await _readFullBlock(amount: numBlocks(size)); final readName = realName.readString(0, realName.length); if (nextHeader.typeFlag == TypeFlag.gnuLongName) { @@ -247,7 +239,7 @@ // Note: Calling cancel is safe when the stream has already been completed. // It's a noop in that case, which is what we want. - return _chunkedStream.cancel(); + return _reader.close(); } /// Utility function for quickly iterating through all entries in [tarStream]. @@ -317,7 +309,7 @@ Uint8List block; do { - block = await _chunkedStream.readBytes(blockSize); + block = await _reader.nextBlock(); if (!block.isAllZeroes) { throw TarException( 'Illegal content after the end of the tar archive.'); @@ -333,15 +325,24 @@ throw TarException.header('Unexpected end of file'); } - /// Reads a block with the requested [size], or throws an unexpected EoF - /// exception. - Future<Uint8List> _readFullBlock(int size, {bool allowEmpty = false}) async { - final block = await _chunkedStream.readBytes(size); - if (block.length != size && !(allowEmpty && block.isEmpty)) { - _unexpectedEof(); - } + /// Reads [amount] blocks from the input stream, or throws an exception if + /// the stream ends prematurely. + Future<Uint8List> _readFullBlock({bool allowEmpty = false, int amount = 1}) { + final blocks = Uint8List(amount * blockSize); + var offset = 0; - return block; + return _reader.nextBlocks(amount).forEach((chunk) { + blocks.setAll(offset, chunk); + offset += chunk.length; + }).then((void _) { + if (allowEmpty && offset == 0) { + return Uint8List(0); + } else if (offset < blocks.length) { + _unexpectedEof(); + } else { + return blocks; + } + }); } /// Reads the next block header and assumes that the underlying reader @@ -357,7 +358,7 @@ if (rawHeader.isEmpty) return null; if (rawHeader.isAllZeroes) { - rawHeader = await _chunkedStream.readBytes(blockSize); + rawHeader = await _reader.nextBlock(); // Exactly 1 block of zeroes is read and EOF is hit. if (rawHeader.isEmpty) return null; @@ -393,9 +394,9 @@ final sparseDataLength = sparseData.fold<int>(0, (value, element) => value + element.length); - final streamLength = nextBlockSize(sparseDataLength); - final safeStream = - _publishStream(_chunkedStream.readStream(streamLength), streamLength); + final streamBlockCount = numBlocks(sparseDataLength); + final safeStream = _publishStream( + _reader.nextBlocks(streamBlockCount), streamBlockCount * blockSize); return sparseStream(safeStream, sparseHoles, header.size); } else { var size = header.size; @@ -408,9 +409,8 @@ if (size == 0) { return _publishStream(const Stream<Never>.empty(), 0); } else { - _markPaddingToSkip(size); - return _publishStream( - _chunkedStream.readStream(header.size), header.size); + final blockCount = numBlocks(header.size); + return _publishStream(_reader.nextBlocks(blockCount), header.size); } } } @@ -424,7 +424,37 @@ // There can only be one content stream at a time. This precondition is // checked by _prepareToReadHeaders. assert(_underlyingContentStream == null); - return _underlyingContentStream = Stream.eventTransformed(stream, (sink) { + Stream<List<int>>? thisStream; + + return thisStream = + _underlyingContentStream = Stream.eventTransformed(stream, (sink) { + // This callback is called when we have a listener. Make sure that, at + // this point, this stream is still the active content stream. + // If users store the contents of a tar header, then read more tar + // entries, and finally try to read the stream of the old contents, they'd + // get an exception about the straem already being listened to. + // This can be a bit confusing, so this check enables a better error UX. + if (thisStream != _underlyingContentStream) { + throw StateError( + 'Tried listening to an outdated tar entry. \n' + 'As all tar entries found by a reader are backed by a single source ' + 'stream, only the latest tar entry can be read. It looks like you ' + 'stored the results of `tarEntry.contents` somewhere, called ' + '`reader.moveNext()` and then read the contents of the previous ' + 'entry.\n' + 'For more details, including a discussion of workarounds, see ' + 'https://github.com/simolus3/tar/issues/18', + ); + } else if (_listenedToContentsOnce) { + throw StateError( + 'A tar entry has been listened to multiple times. \n' + 'As all tar entries are read from what\'s likely a single-' + 'subscription stream, this is unsupported. If you didn\'t read a tar ' + 'entry multiple times yourself, perhaps you\'ve called `moveNext()` ' + 'before reading contents?', + ); + } + _listenedToContentsOnce = true; late _OutgoingStreamGuard guard; @@ -432,7 +462,7 @@ length, sink, // Reset state when the stream is done. This will only be called when - // the sream is done, not when a listener cancels. + // the stream is done, not when a listener cancels. () { _underlyingContentStream = null; if (guard.hadError) { @@ -443,15 +473,6 @@ }); } - /// Skips to the next block after reading [readSize] bytes from the beginning - /// of a previous block. - void _markPaddingToSkip(int readSize) { - final offsetInLastBlock = readSize.toUnsigned(blockSizeLog2); - if (offsetInLastBlock != 0) { - _skipNext = blockSize - offsetInLastBlock; - } - } - /// Checks the PAX headers for GNU sparse headers. /// If they are found, then this function reads the sparse map and returns it. /// This assumes that 0.0 headers have already been converted to 0.1 headers @@ -519,7 +540,7 @@ /// Ensures that [block] h as at least [n] tokens. Future<void> feedTokens(int n) async { while (newLineCount < n) { - final newBlock = await _chunkedStream.readBytes(blockSize); + final newBlock = await _readFullBlock(); if (newBlock.length < blockSize) { throw TarException.header( 'GNU Sparse Map does not have enough lines!'); @@ -639,54 +660,44 @@ throw TarException.header('Tried to read sparse map of non-GNU header'); } + // Read the real size of the file when sparse holes are expanded. header.size = rawHeader.readNumeric(483, 12); - final sparseMaps = <Uint8List>[]; + final sparseEntries = <SparseEntry>[]; - var sparse = rawHeader.sublistView(386, 483); - sparseMaps.add(sparse); + bool readEntry(Uint8List source, int offset) { + // If a sparse header starts with a null byte, it marks the end of the + // sparse structures. + if (rawHeader[offset] == 0) return false; - while (true) { - final maxEntries = sparse.length ~/ 24; - if (sparse[24 * maxEntries] > 0) { - // If there are more entries, read an extension header and parse its - // entries. - sparse = await _chunkedStream.readBytes(blockSize); - sparseMaps.add(sparse); - continue; + final fileOffset = source.readNumeric(offset, 12); + final length = source.readNumeric(offset + 12, 12); + + sparseEntries.add(SparseEntry(fileOffset, length)); + return true; + } + + // The first four sparse headers are stored in the tar header itself + for (var i = 0; i < 4; i++) { + final offset = 386 + 24 * i; + if (!readEntry(rawHeader, offset)) break; + } + + var isExtended = rawHeader[482] != 0; + + while (isExtended) { + // Ok, we have a new block of sparse headers to process + final block = await _readFullBlock(); + + // A full block of sparse data contains up to 21 entries + for (var i = 0; i < 21; i++) { + if (!readEntry(block, i * 24)) break; } - break; + // The last bytes indicates whether another sparse header block follows. + isExtended = block[504] != 0; } - try { - return _processOldGNUSparseMap(sparseMaps); - } on FormatException { - throw TarException('Invalid old GNU Sparse Map'); - } - } - - /// Process [sparseMaps], which is known to be an OLD GNU v0.1 sparse map. - /// - /// For details, see https://www.gnu.org/software/tar/manual/html_section/tar_94.html#SEC191 - List<SparseEntry> _processOldGNUSparseMap(List<Uint8List> sparseMaps) { - final sparseData = <SparseEntry>[]; - - for (final sparseMap in sparseMaps) { - final maxEntries = sparseMap.length ~/ 24; - for (var i = 0; i < maxEntries; i++) { - // This termination condition is identical to GNU and BSD tar. - if (sparseMap[i * 24] == 0) { - // Don't return, need to process extended headers (even if empty) - break; - } - - final offset = sparseMap.readNumeric(i * 24, 12); - final length = sparseMap.readNumeric(i * 24 + 12, 12); - - sparseData.add(SparseEntry(offset, length)); - } - } - return sparseData; + return sparseEntries; } } @@ -703,10 +714,6 @@ _globalHeaders.addAll(headers); } - void addLocal(String key, String value) => _localHeaders[key] = value; - - void removeLocal(String key) => _localHeaders.remove(key); - /// Applies new local PAX-headers from the map. /// /// This replaces all currently active local headers. @@ -792,18 +799,20 @@ // Skip over the equals sign offset = nextEquals + 1; - // Subtract one for trailing newline + // Subtract one for trailing newline for value final endOfValue = endOfEntry - 1; - final value = utf8.decoder.convert(data, offset, endOfValue); - if (!_isValidPaxRecord(key, value)) { + if (!_isValidPaxKey(key)) { error(); } // If we're seeing weird PAX Version 0.0 sparse keys, expect alternating // GNU.sparse.offset and GNU.sparse.numbytes headers. if (key == paxGNUSparseNumBytes || key == paxGNUSparseOffset) { - if ((sparseMap.length.isEven && key != paxGNUSparseOffset) || + final value = utf8.decoder.convert(data, offset, endOfValue); + + if (!_isValidPaxRecord(key, value) || + (sparseMap.length.isEven && key != paxGNUSparseOffset) || (sparseMap.length.isOdd && key != paxGNUSparseNumBytes) || value.contains(',')) { error(); @@ -813,6 +822,12 @@ } else if (!ignoreUnknown || supportedPaxHeaders.contains(key)) { // Ignore unrecognized headers to avoid unbounded growth of the global // header map. + final value = unsafeUtf8Decoder.convert(data, offset, endOfValue); + + if (!_isValidPaxRecord(key, value)) { + error(); + } + map[key] = value; } @@ -837,16 +852,23 @@ } } + // NB: Some Tar files have malformed UTF-8 data in the headers, we should + // decode them anyways even if they're broken + static const unsafeUtf8Decoder = Utf8Decoder(allowMalformed: true); + + static bool _isValidPaxKey(String key) { + // These limitations are documented in the PAX standard. + return key.isNotEmpty && !key.contains('=') & !key.codeUnits.contains(0); + } + /// Checks whether [key], [value] is a valid entry in a pax header. /// /// This is adopted from the Golang tar reader (`validPAXRecord`), which says /// that "Keys and values should be UTF-8, but the number of bad writers out /// there forces us to be a more liberal." static bool _isValidPaxRecord(String key, String value) { - // These limitations are documented in the PAX standard. - if (key.isEmpty || key.contains('=')) return false; - - // These aren't, but Golangs's tar has them and got away with it. + // These aren't documented in any standard, but Golangs's tar has them and + // got away with it. switch (key) { case paxPath: case paxLinkpath: @@ -854,7 +876,7 @@ case paxGname: return !value.codeUnits.contains(0); default: - return !key.codeUnits.contains(0); + return true; } } } @@ -864,27 +886,50 @@ /// [ChunkedStreamReader.readStream] might return a stream shorter than /// expected. That indicates an invalid tar file though, since the correct size /// is stored in the header. -class _OutgoingStreamGuard extends EventSink<List<int>> { - final int expectedSize; +class _OutgoingStreamGuard extends EventSink<Uint8List> { + int remainingContentSize; + int remainingPaddingSize; + final EventSink<List<int>> out; void Function() onDone; - int emittedSize = 0; bool hadError = false; + bool isInContent = true; - _OutgoingStreamGuard(this.expectedSize, this.out, this.onDone); + _OutgoingStreamGuard(this.remainingContentSize, this.out, this.onDone) + : remainingPaddingSize = _paddingFor(remainingContentSize); + + static int _paddingFor(int contentSize) { + final offsetInLastBlock = contentSize.toUnsigned(blockSizeLog2); + if (offsetInLastBlock != 0) { + return blockSize - offsetInLastBlock; + } + return 0; + } @override - void add(List<int> event) { - emittedSize += event.length; - // We have checks limiting the length of outgoing streams. If the stream is - // larger than expected, that's a bug in pkg:tar. - assert( - emittedSize <= expectedSize, - 'Stream now emitted $emittedSize bytes, but only expected ' - '$expectedSize'); + void add(Uint8List event) { + if (isInContent) { + if (event.length <= remainingContentSize) { + // We can fully add this chunk as it consists entirely of data + out.add(event); + remainingContentSize -= event.length; + } else { + // We can add the first bytes as content, the others are padding that we + // shouldn't emit + out.add(event.sublistView(0, remainingContentSize)); + isInContent = false; + remainingPaddingSize -= event.length - remainingContentSize; + remainingContentSize = 0; + } + } else { + // Ok, the entire event is padding + remainingPaddingSize -= event.length; + } - out.add(event); + // The underlying stream comes from pkg:tar, so if we get too many bytes + // that's a bug in this package. + assert(remainingPaddingSize >= 0, 'Stream emitted to many bytes'); } @override @@ -895,15 +940,14 @@ @override void close() { - onDone(); - // If the stream stopped after an error, the user is already aware that // something is wrong. - if (emittedSize < expectedSize && !hadError) { + if (remainingContentSize > 0 && !hadError) { out.addError( TarException('Unexpected end of tar file'), StackTrace.current); } + onDone(); out.close(); } }
diff --git a/lib/src/third_party/tar/src/utils.dart b/lib/src/third_party/tar/src/utils.dart index a6b7e13..4fa75b1 100644 --- a/lib/src/third_party/tar/src/utils.dart +++ b/lib/src/third_party/tar/src/utils.dart
@@ -1,3 +1,4 @@ +import 'dart:async'; import 'dart:convert'; import 'dart:math'; import 'dart:typed_data'; @@ -92,25 +93,31 @@ } int computeUnsignedHeaderChecksum() { - var result = 0; + // Accessing the last element first helps the VM eliminate bounds checks in + // the loops below. + this[blockSize - 1]; + var result = checksumLength * _checksumPlaceholder; - for (var i = 0; i < length; i++) { - result += (i < checksumOffset || i >= _checksumEnd) - ? this[i] // Not in range of where the checksum is written - : _checksumPlaceholder; + for (var i = 0; i < checksumOffset; i++) { + result += this[i]; + } + for (var i = _checksumEnd; i < blockSize; i++) { + result += this[i]; } return result; } int computeSignedHeaderChecksum() { - var result = 0; + this[blockSize - 1]; + // Note that _checksumPlaceholder.toSigned(8) == _checksumPlaceholder + var result = checksumLength * _checksumPlaceholder; - for (var i = 0; i < length; i++) { - // Note that _checksumPlaceholder.toSigned(8) == _checksumPlaceholder - result += (i < checksumOffset || i >= _checksumEnd) - ? this[i].toSigned(8) - : _checksumPlaceholder; + for (var i = 0; i < checksumOffset; i++) { + result += this[i].toSigned(8); + } + for (var i = _checksumEnd; i < blockSize; i++) { + result += this[i].toSigned(8); } return result; @@ -123,6 +130,14 @@ return true; } + + bool get isAllZeroes { + for (var i = 0; i < length; i++) { + if (this[i] != 0) return false; + } + + return true; + } } bool isNotAscii(int i) => i > 128; @@ -200,14 +215,6 @@ final $this = this; return $this is Uint8List ? $this : Uint8List.fromList(this); } - - bool get isAllZeroes { - for (var i = 0; i < length; i++) { - if (this[i] != 0) return false; - } - - return true; - } } /// Generates a chunked stream of [length] zeroes. @@ -229,3 +236,325 @@ yield Uint8List(remainingBytes); } } + +/// An optimized reader reading 512-byte blocks from an input stream. +class BlockReader { + final Stream<List<int>> _input; + StreamSubscription<List<int>>? _subscription; + bool _isClosed = false; + + /// If a request is active, returns the current stream that we're reporting. + /// This controler is synchronous. + StreamController<Uint8List>? _outgoing; + + /// The amount of (512-byte) blocks remaining before [_outgoing] should close. + int _remainingBlocksInOutgoing = 0; + + /// A pending tar block that has not been emitted yet. + /// + /// This can happen if we receive small chunks of data in [_onData] that + /// aren't enough to form a full block. + final Uint8List _pendingBlock = Uint8List(blockSize); + + /// The offset in [_pendingBlock] at which new data should start. + /// + /// For instance, if this value is `502`, we're missing `10` additional bytes + /// to complete the [_pendingBlock]. + /// When this value is `0`, there is no active pending block. + int _offsetInPendingBlock = 0; + + /// Additional data that we received, but were unable to dispatch to a + /// downstream listener yet. + /// + /// This can happen if a we receive a large chunk of data and a listener is + /// only interested in a small chunk. + /// + /// We will never have trailing data and a pending block at the same time. + /// When we haver fewer than 512 bytes of trailing data, it should be stored + /// as a pending block instead. + Uint8List? _trailingData; + + /// The offset in the [_trailingData] byte array. + /// + /// When a new listener attaches, we can start by emitting the sublist + /// starting at this offset. + int _offsetInTrailingData = 0; + + BlockReader(this._input); + + /// Emits full blocks. + /// + /// Returns `true` if the listener detached in response to emitting these + /// blocks. In this case, remaining data must be saved in [_trailingData]. + bool _emitBlocks(Uint8List data, {int amount = 1}) { + assert(_remainingBlocksInOutgoing >= amount); + final outgoing = _outgoing!; + + if (!outgoing.isClosed) outgoing.add(data); + + final remainingNow = _remainingBlocksInOutgoing -= amount; + if (remainingNow == 0) { + _outgoing = null; + _pause(); + + scheduleMicrotask(() { + outgoing.close(); + }); + return true; + } else if (outgoing.isPaused || outgoing.isClosed) { + _pause(); + return true; + } + + return false; + } + + void _onData(List<int> data) { + assert(_outgoing != null && _trailingData == null); + + final typedData = data.asUint8List(); + var offsetInData = 0; + + /// Saves parts of the current chunks that couldn't be emitted. + void saveTrailingState() { + assert(_trailingData == null && _offsetInPendingBlock == 0); + + final remaining = typedData.length - offsetInData; + + if (remaining == 0) { + return; // Nothing to save, the chunk has been consumed fully. + } else if (remaining < blockSize) { + // Store remaining data as a pending block. + _pendingBlock.setAll(0, typedData.sublistView(offsetInData)); + _offsetInPendingBlock = remaining; + } else { + _trailingData = typedData; + _offsetInTrailingData = offsetInData; + } + } + + // Try to complete a pending block first + var offsetInPending = _offsetInPendingBlock; + final canWriteIntoPending = min(blockSize - offsetInPending, data.length); + + if (offsetInPending != 0 && canWriteIntoPending > 0) { + _pendingBlock.setAll( + offsetInPending, typedData.sublistView(0, canWriteIntoPending)); + offsetInPending = _offsetInPendingBlock += canWriteIntoPending; + offsetInData += canWriteIntoPending; + + // Did this finish the pending block? + if (offsetInPending == blockSize) { + _offsetInPendingBlock = 0; + if (_emitBlocks(_pendingBlock)) { + // Emitting the pending block completed all pending requests. + saveTrailingState(); + return; + } + } else { + // The chunk we received didn't fill up the pending block, so just stop + // here. + assert(offsetInData == data.length); + return; + } + } + + // At this point, the pending block should have been served. + assert(_offsetInPendingBlock == 0); + + final fullBlocksToEmit = min(_remainingBlocksInOutgoing, + (typedData.length - offsetInData) ~/ blockSize); + + if (fullBlocksToEmit > 0) { + _emitBlocks( + typedData.sublistView( + offsetInData, offsetInData += fullBlocksToEmit * blockSize), + amount: fullBlocksToEmit, + ); + } + + saveTrailingState(); + } + + void _onError(Object error, StackTrace trace) { + assert(_outgoing != null && _trailingData == null); + + _outgoing!.addError(error, trace); + } + + void _onDone() { + assert(_outgoing != null && _trailingData == null); + final outgoing = _outgoing!; + + // Add pending data, then close + if (_offsetInPendingBlock != 0) { + outgoing.add(_pendingBlock.sublistView(0, _offsetInPendingBlock)); + } + + _isClosed = true; + _subscription?.cancel(); + outgoing.close(); + } + + void _subscribeOrResume() { + // We should not resume the subscription if there is trailing data ready to + // be emitted. + assert(_trailingData == null); + + final sub = _subscription; + if (sub == null) { + _subscription = _input.listen(_onData, + onError: _onError, onDone: _onDone, cancelOnError: true); + } else { + sub.resume(); + } + } + + void _pause() { + final sub = _subscription!; // ignore: cancel_subscriptions + + if (!sub.isPaused) sub.pause(); + } + + Future<Uint8List> nextBlock() { + final result = Uint8List(blockSize); + var offset = 0; + + return nextBlocks(1).forEach((chunk) { + result.setAll(offset, chunk); + offset += chunk.length; + }).then((void _) => result.sublistView(0, offset)); + } + + Stream<Uint8List> nextBlocks(int amount) { + if (_isClosed || amount == 0) { + return const Stream.empty(); + } + if (_outgoing != null) { + throw StateError( + 'Cannot call nextBlocks() before the previous stream completed.'); + } + assert(_remainingBlocksInOutgoing == 0); + + // We're making this synchronous because we will mostly add events in + // response to receiving chunks from the source stream. We manually ensure + // that other emits are happening asynchronously. + final controller = StreamController<Uint8List>(sync: true); + _outgoing = controller; + _remainingBlocksInOutgoing = amount; + + var state = _StreamState.initial; + + /// Sends trailing data to the stream. Reeturns true if the subscription + /// should still be resumed afterwards. + bool emitTrailing() { + // Attempt to serve requests from pending data first. + final trailing = _trailingData; + if (trailing != null) { + // There should never be trailing data and a pending block at the + // same time + assert(_offsetInPendingBlock == 0); + + var remaining = trailing.length - _offsetInTrailingData; + // If there is trailing data, it should contain a full block + // (otherwise we would have stored it as a pending block) + assert(remaining >= blockSize); + + final blocks = min(_remainingBlocksInOutgoing, remaining ~/ blockSize); + assert(blocks > 0); + + final done = _emitBlocks( + trailing.sublistView(_offsetInTrailingData, + _offsetInTrailingData + blocks * blockSize), + amount: blocks); + + remaining -= blocks * blockSize; + _offsetInTrailingData += blocks * blockSize; + + if (remaining == 0) { + _trailingData = null; + _offsetInTrailingData = 0; + } else if (remaining < blockSize) { + assert(_offsetInPendingBlock == 0); + + // Move trailing data into a pending block + _pendingBlock.setAll(0, trailing.sublistView(_offsetInTrailingData)); + _offsetInPendingBlock = remaining; + _trailingData = null; + _offsetInTrailingData = 0; + } else { + // If there is still more than a full block of data waiting, we + // should not listen. This implies that the stream is done already. + assert(done); + } + + // The listener detached in response to receiving the event. + if (done) { + if (_remainingBlocksInOutgoing == 0) state = _StreamState.done; + return false; + } + } + + return true; + } + + void scheduleInitialEmit() { + scheduleMicrotask(() { + if (state != _StreamState.initial) return; + state = _StreamState.attached; + + if (emitTrailing()) { + _subscribeOrResume(); + } + }); + } + + controller + ..onListen = scheduleInitialEmit + ..onPause = () { + assert(state == _StreamState.initial || state == _StreamState.attached); + + if (state == _StreamState.initial) { + state = _StreamState.pausedAfterInitial; + } else { + _pause(); + state = _StreamState.pausedAfterAttached; + } + } + ..onResume = () { + // We're done already + if (_remainingBlocksInOutgoing == 0) return; + + assert(state == _StreamState.pausedAfterAttached || + state == _StreamState.pausedAfterInitial); + + if (state == _StreamState.pausedAfterInitial) { + state = _StreamState.initial; + scheduleInitialEmit(); + } else { + state = _StreamState.attached; + if (emitTrailing()) { + _subscribeOrResume(); + } + } + } + ..onCancel = () { + state = _StreamState.done; + }; + + return controller.stream; + } + + FutureOr<void> close() { + _isClosed = true; + return _subscription?.cancel(); + } +} + +enum _StreamState { + initial, + attached, + pausedAfterInitial, + pausedAfterAttached, + done, +}
diff --git a/lib/src/third_party/tar/src/writer.dart b/lib/src/third_party/tar/src/writer.dart index 71ab368..5ff92b9 100644 --- a/lib/src/third_party/tar/src/writer.dart +++ b/lib/src/third_party/tar/src/writer.dart
@@ -99,6 +99,33 @@ return _WritingSink(output, format); } +/// A synchronous encoder for in-memory tar files. +/// +/// The default [tarWriter] creates an asynchronous conversion from a stream of +/// tar entries to a byte stream. +/// When all tar entries are in-memory ([SynchronousTarEntry]), it is possible +/// to write them synchronously too. +/// +/// To create a tar archive consisting of a single entry, use +/// [Converter.convert] on this [tarConverter]. +/// To create a tar archive consisting of any number of entries, first call +/// [Converter.startChunkedConversion] with a suitable output sink. Next, call +/// [Sink.add] for each tar entry and finish the archive by calling +/// [Sink.close]. +/// +/// To change the output format of the tar converter, use [tarConverterWith]. +/// To encode any kind of tar entries, use the asynchronous [tarWriter]. +const Converter<SynchronousTarEntry, List<int>> tarConverter = + _SynchronousTarConverter(OutputFormat.pax); + +/// A synchronous encoder for in-memory tar files, with custom encoding options. +/// +/// For more information on how to use the converter, see [tarConverter]. +Converter<SynchronousTarEntry, List<int>> tarConverterWith( + {OutputFormat format = OutputFormat.pax}) { + return _SynchronousTarConverter(format); +} + /// This option controls how long file and link names should be written. /// /// This option can be passed to writer in [tarWritingSink] or[tarWriterWith]. @@ -127,16 +154,15 @@ class _WritingSink extends StreamSink<TarEntry> { final StreamSink<List<int>> _output; - final OutputFormat format; - - int _paxHeaderCount = 0; + final _SynchronousTarSink _synchronousWriter; bool _closed = false; final Completer<Object?> _done = Completer(); int _pendingOperations = 0; Future<void> _ready = Future.value(); - _WritingSink(this._output, this.format); + _WritingSink(this._output, OutputFormat format) + : _synchronousWriter = _SynchronousTarSink(_output, format); @override Future<void> get done => _done.future; @@ -175,6 +201,120 @@ size = bufferedData.length; } + _synchronousWriter._writeHeader(header, size); + + // Write content. + if (bufferedData != null) { + _output.add(bufferedData); + } else { + await _output.addStream(event.contents); + } + + _output.add(_paddingBytes(size)); + } + + @override + void addError(Object error, [StackTrace? stackTrace]) { + _output.addError(error, stackTrace); + } + + @override + Future<void> addStream(Stream<TarEntry> stream) async { + await for (final entry in stream) { + await add(entry); + } + } + + @override + Future<void> close() async { + if (!_closed) { + _closed = true; + + // Add two empty blocks at the end. + await _doWork(_synchronousWriter.close); + } + + return done; + } +} + +Uint8List _paddingBytes(int size) { + final padding = -size % blockSize; + assert((size + padding) % blockSize == 0 && + padding <= blockSize && + padding >= 0); + + return Uint8List(padding); +} + +class _SynchronousTarConverter + extends Converter<SynchronousTarEntry, List<int>> { + final OutputFormat format; + + const _SynchronousTarConverter(this.format); + + @override + Sink<SynchronousTarEntry> startChunkedConversion(Sink<List<int>> sink) { + return _SynchronousTarSink(sink, format); + } + + @override + List<int> convert(SynchronousTarEntry input) { + final output = BytesBuilder(copy: false); + startChunkedConversion(ByteConversionSink.withCallback(output.add)) + ..add(input) + ..close(); + + return output.takeBytes(); + } +} + +class _SynchronousTarSink extends Sink<SynchronousTarEntry> { + final OutputFormat _format; + final Sink<List<int>> _output; + + bool _closed = false; + int _paxHeaderCount = 0; + + _SynchronousTarSink(this._output, this._format); + + @override + void add(SynchronousTarEntry data) { + addHeaderAndData(data.header, data.data); + } + + void addHeaderAndData(TarHeader header, List<int> data) { + _throwIfClosed(); + + _writeHeader(header, data.length); + _output + ..add(data) + ..add(_paddingBytes(data.length)); + } + + @override + void close() { + if (_closed) return; + + // End the tar archive by writing two zero blocks. + _output + ..add(UnmodifiableUint8ListView(zeroBlock)) + ..add(UnmodifiableUint8ListView(zeroBlock)); + _output.close(); + + _closed = true; + } + + void _throwIfClosed() { + if (_closed) { + throw StateError('Encoder is closed. ' + 'After calling `endOfArchive()`, encoder must not be used.'); + } + } + + void _writeHeader(TarHeader header, int size) { + assert(header.size < 0 || header.size == size); + var nameBytes = utf8.encode(header.name); var linkBytes = utf8.encode(header.linkName ?? ''); var gnameBytes = utf8.encode(header.groupName ?? ''); @@ -209,10 +349,10 @@ } if (paxHeader.isNotEmpty) { - if (format == OutputFormat.pax) { - await _writePaxHeader(paxHeader); + if (_format == OutputFormat.pax) { + _writePaxHeader(paxHeader); } else { - await _writeGnuLongName(paxHeader); + _writeGnuLongName(paxHeader); } } @@ -238,24 +378,13 @@ checksum += byte; } headerBlock.setUint(checksum, 148, 8); - _output.add(headerBlock); - - // Write content. - if (bufferedData != null) { - _output.add(bufferedData); - } else { - await event.contents.forEach(_output.add); - } - - final padding = -size % blockSize; - _output.add(Uint8List(padding)); } - /// Writes an extended pax header. + /// Encodes an extended pax header. /// /// https://pubs.opengroup.org/onlinepubs/9699919799/utilities/pax.html#tag_20_92_13_03 - Future<void> _writePaxHeader(Map<String, List<int>> values) { + void _writePaxHeader(Map<String, List<int>> values) { final buffer = BytesBuilder(); // format of each entry: "%d %s=%s\n", <length>, <keyword>, <value> // note that the length includes the trailing \n and the length description @@ -287,7 +416,7 @@ }); final paxData = buffer.takeBytes(); - final file = TarEntry.data( + addHeaderAndData( HeaderImpl.internal( format: TarFormat.pax, modified: millisecondsSinceEpoch(0), @@ -298,10 +427,9 @@ ), paxData, ); - return _safeAdd(file); } - Future<void> _writeGnuLongName(Map<String, List<int>> values) async { + void _writeGnuLongName(Map<String, List<int>> values) { // Ensure that a file that can't be written in the GNU format is not written const allowedKeys = {paxPath, paxLinkpath}; final invalidOptions = values.keys.toSet()..removeAll(allowedKeys); @@ -316,54 +444,25 @@ final name = values[paxPath]; final linkName = values[paxLinkpath]; - Future<void> write(List<int> name, TypeFlag flag) { - return _safeAdd( - TarEntry.data( - HeaderImpl.internal( - name: '././@LongLink', - modified: millisecondsSinceEpoch(0), - format: TarFormat.gnu, - typeFlag: flag, - ), - name, + void create(List<int> name, TypeFlag flag) { + return addHeaderAndData( + HeaderImpl.internal( + name: '././@LongLink', + modified: millisecondsSinceEpoch(0), + format: TarFormat.gnu, + typeFlag: flag, ), + name, ); } if (name != null) { - await write(name, TypeFlag.gnuLongName); + create(name, TypeFlag.gnuLongName); } if (linkName != null) { - await write(linkName, TypeFlag.gnuLongLink); + create(linkName, TypeFlag.gnuLongLink); } } - - @override - void addError(Object error, [StackTrace? stackTrace]) { - _output.addError(error, stackTrace); - } - - @override - Future<void> addStream(Stream<TarEntry> stream) async { - await for (final entry in stream) { - await add(entry); - } - } - - @override - Future<void> close() async { - if (!_closed) { - _closed = true; - - // Add two empty blocks at the end. - await _doWork(() { - _output.add(zeroBlock); - _output.add(zeroBlock); - }); - } - - return done; - } } extension on Uint8List {
diff --git a/lib/src/third_party/tar/tar.dart b/lib/src/third_party/tar/tar.dart index 218a6a2..14247bd 100644 --- a/lib/src/third_party/tar/tar.dart +++ b/lib/src/third_party/tar/tar.dart
@@ -9,7 +9,7 @@ import 'src/writer.dart'; export 'src/constants.dart' show TypeFlag; -export 'src/entry.dart'; +export 'src/entry.dart' show TarEntry, SynchronousTarEntry; export 'src/exception.dart'; export 'src/format.dart'; export 'src/header.dart' show TarHeader;
diff --git a/lib/src/validator.dart b/lib/src/validator.dart index 1b07560..ef04bc4 100644 --- a/lib/src/validator.dart +++ b/lib/src/validator.dart
@@ -5,6 +5,7 @@ import 'dart:async'; import 'package:meta/meta.dart'; +import 'package:path/path.dart' as p; import 'package:pub_semver/pub_semver.dart'; import 'entrypoint.dart'; @@ -42,9 +43,6 @@ /// package not to be uploaded; warnings will require the user to confirm the /// upload. abstract class Validator { - /// The entrypoint that's being validated. - final Entrypoint entrypoint; - /// The accumulated errors for this validator. /// /// Filled by calling [validate]. @@ -60,11 +58,15 @@ /// Filled by calling [validate]. final hints = <String>[]; - Validator(this.entrypoint); + late ValidationContext context; + Entrypoint get entrypoint => context.entrypoint; + int get packageSize => context.packageSize; + Uri get serverUrl => context.serverUrl; + List<String> get files => context.files; /// Validates the entrypoint, adding any errors and warnings to [errors] and /// [warnings], respectively. - Future validate(); + Future<void> validate(); /// Adds an error if the package's SDK constraint doesn't exclude Dart SDK /// versions older than [firstSdkVersion]. @@ -114,45 +116,55 @@ /// Run all validators on the [entrypoint] package and print their results. /// + /// [files] should be the result of `entrypoint.root.listFiles()`. + /// /// When the future completes [hints] [warnings] amd [errors] will have been /// appended with the reported hints warnings and errors respectively. /// /// [packageSize], if passed, should complete to the size of the tarred /// package, in bytes. This is used to validate that it's not too big to /// upload to the server. - static Future<void> runAll( - Entrypoint entrypoint, Future<int> packageSize, Uri? serverUrl, + static Future<void> runAll(Entrypoint entrypoint, Future<int> packageSize, + Uri serverUrl, List<String> files, {required List<String> hints, required List<String> warnings, - required List<String> errors}) { + required List<String> errors}) async { var validators = [ - GitignoreValidator(entrypoint), - PubspecValidator(entrypoint), - LicenseValidator(entrypoint), - NameValidator(entrypoint), - PubspecFieldValidator(entrypoint), - DependencyValidator(entrypoint), - DependencyOverrideValidator(entrypoint), - DeprecatedFieldsValidator(entrypoint), - DirectoryValidator(entrypoint), - ExecutableValidator(entrypoint), - CompiledDartdocValidator(entrypoint), - ReadmeValidator(entrypoint), - ChangelogValidator(entrypoint), - SdkConstraintValidator(entrypoint), - StrictDependenciesValidator(entrypoint), - FlutterConstraintValidator(entrypoint), - FlutterPluginFormatValidator(entrypoint), - LanguageVersionValidator(entrypoint), - RelativeVersionNumberingValidator(entrypoint, serverUrl), - NullSafetyMixedModeValidator(entrypoint), - PubspecTypoValidator(entrypoint), - LeakDetectionValidator(entrypoint), + GitignoreValidator(), + PubspecValidator(), + LicenseValidator(), + NameValidator(), + PubspecFieldValidator(), + DependencyValidator(), + DependencyOverrideValidator(), + DeprecatedFieldsValidator(), + DirectoryValidator(), + ExecutableValidator(), + CompiledDartdocValidator(), + ReadmeValidator(), + ChangelogValidator(), + SdkConstraintValidator(), + StrictDependenciesValidator(), + FlutterConstraintValidator(), + FlutterPluginFormatValidator(), + LanguageVersionValidator(), + RelativeVersionNumberingValidator(), + NullSafetyMixedModeValidator(), + PubspecTypoValidator(), + LeakDetectionValidator(), + SizeValidator(), ]; - validators.add(SizeValidator(entrypoint, packageSize)); - return Future.wait(validators.map((validator) => validator.validate())) - .then((_) { + final context = ValidationContext( + entrypoint, + await packageSize, + serverUrl, + files, + ); + return await Future.wait(validators.map((validator) async { + validator.context = context; + await validator.validate(); + })).then((_) { hints.addAll([for (final validator in validators) ...validator.hints]); warnings .addAll([for (final validator in validators) ...validator.warnings]); @@ -190,4 +202,28 @@ } }); } + + /// Returns the [files] that are inside [dir] (relative to the package + /// entrypoint). + // TODO(sigurdm): Consider moving this to a more central location. + List<String> filesBeneath(String dir, {required bool recursive}) { + final base = p.canonicalize(p.join(entrypoint.root.dir, dir)); + return files + .where( + recursive + ? (file) => p.canonicalize(file).startsWith(base) + : (file) => p.canonicalize(p.dirname(file)) == base, + ) + .toList(); + } +} + +class ValidationContext { + final Entrypoint entrypoint; + final int packageSize; + final Uri serverUrl; + final List<String> files; + + ValidationContext( + this.entrypoint, this.packageSize, this.serverUrl, this.files); }
diff --git a/lib/src/validator/changelog.dart b/lib/src/validator/changelog.dart index 614eba2..bb897a7 100644 --- a/lib/src/validator/changelog.dart +++ b/lib/src/validator/changelog.dart
@@ -5,53 +5,52 @@ import 'dart:async'; import 'dart:convert'; +import 'package:collection/collection.dart'; import 'package:path/path.dart' as p; -import '../entrypoint.dart'; import '../io.dart'; import '../validator.dart'; +final _changelogRegexp = RegExp(r'^CHANGELOG($|\.)', caseSensitive: false); + /// A validator that validates a package's changelog file. class ChangelogValidator extends Validator { - ChangelogValidator(Entrypoint entrypoint) : super(entrypoint); - @override - Future validate() { - return Future.sync(() { - final changelog = entrypoint.root.changelogPath; + Future<void> validate() async { + final changelog = filesBeneath('.', recursive: false).firstWhereOrNull( + (entry) => p.basename(entry).contains(_changelogRegexp)); - if (changelog == null) { - warnings.add('Please add a `CHANGELOG.md` to your package. ' - 'See https://dart.dev/tools/pub/publishing#important-files.'); - return; - } + if (changelog == null) { + warnings.add('Please add a `CHANGELOG.md` to your package. ' + 'See https://dart.dev/tools/pub/publishing#important-files.'); + return; + } - if (p.basename(changelog) != 'CHANGELOG.md') { - warnings.add('Please consider renaming $changelog to `CHANGELOG.md`. ' - 'See https://dart.dev/tools/pub/publishing#important-files.'); - } + if (p.basename(changelog) != 'CHANGELOG.md') { + warnings.add('Please consider renaming $changelog to `CHANGELOG.md`. ' + 'See https://dart.dev/tools/pub/publishing#important-files.'); + } - var bytes = readBinaryFile(changelog); - String contents; + var bytes = readBinaryFile(changelog); + String contents; - try { - // utf8.decode doesn't allow invalid UTF-8. - contents = utf8.decode(bytes); - } on FormatException catch (_) { - warnings.add('$changelog contains invalid UTF-8.\n' - 'This will cause it to be displayed incorrectly on ' - 'the Pub site (https://pub.dev).'); - // Failed to decode contents, so there's nothing else to check. - return; - } + try { + // utf8.decode doesn't allow invalid UTF-8. + contents = utf8.decode(bytes); + } on FormatException catch (_) { + warnings.add('$changelog contains invalid UTF-8.\n' + 'This will cause it to be displayed incorrectly on ' + 'the Pub site (https://pub.dev).'); + // Failed to decode contents, so there's nothing else to check. + return; + } - final version = entrypoint.root.pubspec.version.toString(); + final version = entrypoint.root.pubspec.version.toString(); - if (!contents.contains(version)) { - warnings.add("$changelog doesn't mention current version ($version).\n" - 'Consider updating it with notes on this version prior to ' - 'publication.'); - } - }); + if (!contents.contains(version)) { + warnings.add("$changelog doesn't mention current version ($version).\n" + 'Consider updating it with notes on this version prior to ' + 'publication.'); + } } }
diff --git a/lib/src/validator/compiled_dartdoc.dart b/lib/src/validator/compiled_dartdoc.dart index 3f3376a..ae25f83 100644 --- a/lib/src/validator/compiled_dartdoc.dart +++ b/lib/src/validator/compiled_dartdoc.dart
@@ -6,19 +6,16 @@ import 'package:path/path.dart' as path; -import '../entrypoint.dart'; import '../io.dart'; import '../validator.dart'; /// Validates that a package doesn't contain compiled Dartdoc /// output. class CompiledDartdocValidator extends Validator { - CompiledDartdocValidator(Entrypoint entrypoint) : super(entrypoint); - @override Future validate() { return Future.sync(() { - for (var entry in entrypoint.root.listFiles()) { + for (var entry in files) { if (path.basename(entry) != 'nav.json') continue; var dir = path.dirname(entry);
diff --git a/lib/src/validator/dependency.dart b/lib/src/validator/dependency.dart index 5d158d1..40e9c2a 100644 --- a/lib/src/validator/dependency.dart +++ b/lib/src/validator/dependency.dart
@@ -6,7 +6,6 @@ import 'package:pub_semver/pub_semver.dart'; -import '../entrypoint.dart'; import '../exceptions.dart'; import '../log.dart' as log; import '../package_name.dart'; @@ -25,228 +24,207 @@ /// A validator that validates a package's dependencies. class DependencyValidator extends Validator { - /// Whether any dependency has a caret constraint. - var _hasCaretDep = false; - - /// Whether any dependency depends on package features. - var _hasFeatures = false; - - DependencyValidator(Entrypoint entrypoint) : super(entrypoint); - @override Future validate() async { - await _validateDependencies(entrypoint.root.pubspec.dependencies.values); + /// Whether any dependency has a caret constraint. + var _hasCaretDep = false; - for (var feature in entrypoint.root.pubspec.features.values) { - // Allow off-by-default features, since older pubs will just ignore them - // anyway. - _hasFeatures = _hasFeatures || feature.onByDefault; + /// Emit an error for dependencies from unknown SDKs or without appropriate + /// constraints on the Dart SDK. + void _warnAboutSdkSource(PackageRange dep) { + var identifier = (dep.description as SdkDescription).sdk; + var sdk = sdks[identifier]; + if (sdk == null) { + errors.add('Unknown SDK "$identifier" for dependency "${dep.name}".'); + return; + } - await _validateDependencies(feature.dependencies); + validateSdkConstraint(sdk.firstPubVersion, + "Older versions of pub don't support the ${sdk.name} SDK."); } + /// Warn that dependencies should use the hosted source. + Future _warnAboutSource(PackageRange dep) async { + List<Version> versions; + try { + var ids = await entrypoint.cache + .getVersions(entrypoint.cache.hosted.refFor(dep.name)); + versions = ids.map((id) => id.version).toList(); + } on ApplicationException catch (_) { + versions = []; + } + + String constraint; + if (versions.isNotEmpty) { + constraint = '^${Version.primary(versions)}'; + } else { + constraint = dep.constraint.toString(); + if (!dep.constraint.isAny && dep.constraint is! Version) { + constraint = '"$constraint"'; + } + } + + // Path sources are errors. Other sources are just warnings. + var messages = dep.source is PathSource ? errors : warnings; + + messages.add('Don\'t depend on "${dep.name}" from the ${dep.source} ' + 'source. Use the hosted source instead. For example:\n' + '\n' + 'dependencies:\n' + ' ${dep.name}: $constraint\n' + '\n' + 'Using the hosted source ensures that everyone can download your ' + 'package\'s dependencies along with your package.'); + } + + /// Warn about improper dependencies on Flutter. + void _warnAboutFlutterSdk(PackageRange dep) { + if (dep.source is SdkSource) { + _warnAboutSdkSource(dep); + return; + } + + errors.add('Don\'t depend on "${dep.name}" from the ${dep.source} ' + 'source. Use the SDK source instead. For example:\n' + '\n' + 'dependencies:\n' + ' ${dep.name}:\n' + ' sdk: ${dep.constraint}\n' + '\n' + 'The Flutter SDK is downloaded and managed outside of pub.'); + } + + /// Warn that dependencies should have version constraints. + void _warnAboutNoConstraint(PackageRange dep) { + var message = 'Your dependency on "${dep.name}" should have a version ' + 'constraint.'; + var locked = entrypoint.lockFile.packages[dep.name]; + if (locked != null) { + message = '$message For example:\n' + '\n' + 'dependencies:\n' + ' ${dep.name}: ^${locked.version}\n'; + } + warnings.add('$message\n' + 'Without a constraint, you\'re promising to support ${log.bold("all")} ' + 'future versions of "${dep.name}".'); + } + + /// Warn that dependencies should allow more than a single version. + void _warnAboutSingleVersionConstraint(PackageRange dep) { + warnings.add( + 'Your dependency on "${dep.name}" should allow more than one version. ' + 'For example:\n' + '\n' + 'dependencies:\n' + ' ${dep.name}: ^${dep.constraint}\n' + '\n' + 'Constraints that are too tight will make it difficult for people to ' + 'use your package\n' + 'along with other packages that also depend on "${dep.name}".'); + } + + /// Warn that dependencies should have lower bounds on their constraints. + void _warnAboutNoConstraintLowerBound(PackageRange dep) { + var message = + 'Your dependency on "${dep.name}" should have a lower bound.'; + var locked = entrypoint.lockFile.packages[dep.name]; + if (locked != null) { + String constraint; + if (locked.version == (dep.constraint as VersionRange).max) { + constraint = '^${locked.version}'; + } else { + constraint = '">=${locked.version} ${dep.constraint}"'; + } + + message = '$message For example:\n' + '\n' + 'dependencies:\n' + ' ${dep.name}: $constraint\n'; + } + warnings.add('$message\n' + 'Without a constraint, you\'re promising to support ${log.bold("all")} ' + 'previous versions of "${dep.name}".'); + } + + /// Warn that dependencies should have upper bounds on their constraints. + void _warnAboutNoConstraintUpperBound(PackageRange dep) { + String constraint; + if ((dep.constraint as VersionRange).includeMin) { + constraint = '^${(dep.constraint as VersionRange).min}'; + } else { + constraint = '"${dep.constraint} ' + '<${(dep.constraint as VersionRange).min!.nextBreaking}"'; + } + // TODO: Handle the case where `dep.constraint.min` is null. + + warnings.add( + 'Your dependency on "${dep.name}" should have an upper bound. For ' + 'example:\n' + '\n' + 'dependencies:\n' + ' ${dep.name}: $constraint\n' + '\n' + 'Without an upper bound, you\'re promising to support ' + '${log.bold("all")} future versions of ${dep.name}.'); + } + + void _warnAboutPrerelease(String dependencyName, VersionRange constraint) { + final packageVersion = entrypoint.root.version; + if (constraint.min != null && + constraint.min!.isPreRelease && + !packageVersion.isPreRelease) { + warnings.add('Packages dependent on a pre-release of another package ' + 'should themselves be published as a pre-release version. ' + 'If this package needs $dependencyName version ${constraint.min}, ' + 'consider publishing the package as a pre-release instead.\n' + 'See https://dart.dev/tools/pub/publishing#publishing-prereleases ' + 'For more information on pre-releases.'); + } + } + + /// Validates all dependencies in [dependencies]. + Future _validateDependencies(Iterable<PackageRange> dependencies) async { + for (var dependency in dependencies) { + var constraint = dependency.constraint; + if (dependency.name == 'flutter') { + _warnAboutFlutterSdk(dependency); + } else if (dependency.source is SdkSource) { + _warnAboutSdkSource(dependency); + } else if (dependency.source is! HostedSource) { + await _warnAboutSource(dependency); + + final description = dependency.description; + if (description is GitDescription && description.path != '.') { + validateSdkConstraint(_firstGitPathVersion, + "Older versions of pub don't support Git path dependencies."); + } + } else { + if (constraint.isAny) { + _warnAboutNoConstraint(dependency); + } else if (constraint is VersionRange) { + if (constraint is Version) { + _warnAboutSingleVersionConstraint(dependency); + } else { + _warnAboutPrerelease(dependency.name, constraint); + if (constraint.min == null) { + _warnAboutNoConstraintLowerBound(dependency); + } else if (constraint.max == null) { + _warnAboutNoConstraintUpperBound(dependency); + } + } + _hasCaretDep = + _hasCaretDep || constraint.toString().startsWith('^'); + } + } + } + } + + await _validateDependencies(entrypoint.root.pubspec.dependencies.values); + if (_hasCaretDep) { validateSdkConstraint(_firstCaretVersion, "Older versions of pub don't support ^ version constraints."); } - - if (_hasFeatures) { - // TODO(nweiz): Allow packages with features to be published when we have - // analyzer support for telling the user that a given import requires a - // given feature. When we do this, verify that packages with features have - // an SDK constraint that's at least >=2.0.0-dev.11.0. - errors.add('Packages with package features may not be published yet.'); - } - } - - /// Validates all dependencies in [dependencies]. - Future _validateDependencies(Iterable<PackageRange> dependencies) async { - for (var dependency in dependencies) { - var constraint = dependency.constraint; - if (dependency.name == 'flutter') { - _warnAboutFlutterSdk(dependency); - } else if (dependency.source is SdkSource) { - _warnAboutSdkSource(dependency); - } else if (dependency.source is! HostedSource) { - await _warnAboutSource(dependency); - - if (dependency.source is GitSource && - dependency.description['path'] != '.') { - validateSdkConstraint(_firstGitPathVersion, - "Older versions of pub don't support Git path dependencies."); - } - } else { - if (constraint.isAny) { - _warnAboutNoConstraint(dependency); - } else if (constraint is VersionRange) { - if (constraint is Version) { - _warnAboutSingleVersionConstraint(dependency); - } else { - _warnAboutPrerelease(dependency.name, constraint); - if (constraint.min == null) { - _warnAboutNoConstraintLowerBound(dependency); - } else if (constraint.max == null) { - _warnAboutNoConstraintUpperBound(dependency); - } - } - _hasCaretDep = _hasCaretDep || constraint.toString().startsWith('^'); - } - } - - _hasFeatures = _hasFeatures || dependency.features.isNotEmpty; - } - } - - /// Warn about improper dependencies on Flutter. - void _warnAboutFlutterSdk(PackageRange dep) { - if (dep.source is SdkSource) { - _warnAboutSdkSource(dep); - return; - } - - errors.add('Don\'t depend on "${dep.name}" from the ${dep.source} ' - 'source. Use the SDK source instead. For example:\n' - '\n' - 'dependencies:\n' - ' ${dep.name}:\n' - ' sdk: ${dep.constraint}\n' - '\n' - 'The Flutter SDK is downloaded and managed outside of pub.'); - } - - /// Emit an error for dependencies from unknown SDKs or without appropriate - /// constraints on the Dart SDK. - void _warnAboutSdkSource(PackageRange dep) { - var identifier = dep.description as String; - var sdk = sdks[identifier]; - if (sdk == null) { - errors.add('Unknown SDK "$identifier" for dependency "${dep.name}".'); - return; - } - - validateSdkConstraint(sdk.firstPubVersion, - "Older versions of pub don't support the ${sdk.name} SDK."); - } - - /// Warn that dependencies should use the hosted source. - Future _warnAboutSource(PackageRange dep) async { - List<Version> versions; - try { - var ids = await entrypoint.cache.hosted - .getVersions(entrypoint.cache.sources.hosted.refFor(dep.name)); - versions = ids.map((id) => id.version).toList(); - } on ApplicationException catch (_) { - versions = []; - } - - String constraint; - if (versions.isNotEmpty) { - constraint = '^${Version.primary(versions)}'; - } else { - constraint = dep.constraint.toString(); - if (!dep.constraint.isAny && dep.constraint is! Version) { - constraint = '"$constraint"'; - } - } - - // Path sources are errors. Other sources are just warnings. - var messages = dep.source is PathSource ? errors : warnings; - - messages.add('Don\'t depend on "${dep.name}" from the ${dep.source} ' - 'source. Use the hosted source instead. For example:\n' - '\n' - 'dependencies:\n' - ' ${dep.name}: $constraint\n' - '\n' - 'Using the hosted source ensures that everyone can download your ' - 'package\'s dependencies along with your package.'); - } - - /// Warn that dependencies should have version constraints. - void _warnAboutNoConstraint(PackageRange dep) { - var message = 'Your dependency on "${dep.name}" should have a version ' - 'constraint.'; - var locked = entrypoint.lockFile.packages[dep.name]; - if (locked != null) { - message = '$message For example:\n' - '\n' - 'dependencies:\n' - ' ${dep.name}: ^${locked.version}\n'; - } - warnings.add('$message\n' - 'Without a constraint, you\'re promising to support ${log.bold("all")} ' - 'future versions of "${dep.name}".'); - } - - /// Warn that dependencies should allow more than a single version. - void _warnAboutSingleVersionConstraint(PackageRange dep) { - warnings.add( - 'Your dependency on "${dep.name}" should allow more than one version. ' - 'For example:\n' - '\n' - 'dependencies:\n' - ' ${dep.name}: ^${dep.constraint}\n' - '\n' - 'Constraints that are too tight will make it difficult for people to ' - 'use your package\n' - 'along with other packages that also depend on "${dep.name}".'); - } - - /// Warn that dependencies should have lower bounds on their constraints. - void _warnAboutNoConstraintLowerBound(PackageRange dep) { - var message = 'Your dependency on "${dep.name}" should have a lower bound.'; - var locked = entrypoint.lockFile.packages[dep.name]; - if (locked != null) { - String constraint; - if (locked.version == (dep.constraint as VersionRange).max) { - constraint = '^${locked.version}'; - } else { - constraint = '">=${locked.version} ${dep.constraint}"'; - } - - message = '$message For example:\n' - '\n' - 'dependencies:\n' - ' ${dep.name}: $constraint\n'; - } - warnings.add('$message\n' - 'Without a constraint, you\'re promising to support ${log.bold("all")} ' - 'previous versions of "${dep.name}".'); - } - - /// Warn that dependencies should have upper bounds on their constraints. - void _warnAboutNoConstraintUpperBound(PackageRange dep) { - String constraint; - if ((dep.constraint as VersionRange).includeMin) { - constraint = '^${(dep.constraint as VersionRange).min}'; - } else { - constraint = '"${dep.constraint} ' - '<${(dep.constraint as VersionRange).min!.nextBreaking}"'; - } - // TODO: Handle the case where `dep.constraint.min` is null. - - warnings - .add('Your dependency on "${dep.name}" should have an upper bound. For ' - 'example:\n' - '\n' - 'dependencies:\n' - ' ${dep.name}: $constraint\n' - '\n' - 'Without an upper bound, you\'re promising to support ' - '${log.bold("all")} future versions of ${dep.name}.'); - } - - void _warnAboutPrerelease(String dependencyName, VersionRange constraint) { - final packageVersion = entrypoint.root.version; - if (constraint.min != null && - constraint.min!.isPreRelease && - !packageVersion.isPreRelease) { - warnings.add('Packages dependent on a pre-release of another package ' - 'should themselves be published as a pre-release version. ' - 'If this package needs $dependencyName version ${constraint.min}, ' - 'consider publishing the package as a pre-release instead.\n' - 'See https://dart.dev/tools/pub/publishing#publishing-prereleases ' - 'For more information on pre-releases.'); - } } }
diff --git a/lib/src/validator/dependency_override.dart b/lib/src/validator/dependency_override.dart index 24e4e84..b6a49a0 100644 --- a/lib/src/validator/dependency_override.dart +++ b/lib/src/validator/dependency_override.dart
@@ -6,14 +6,11 @@ import 'package:collection/collection.dart'; -import '../entrypoint.dart'; import '../validator.dart'; /// A validator that validates a package's dependencies overrides (or the /// absence thereof). class DependencyOverrideValidator extends Validator { - DependencyOverrideValidator(Entrypoint entrypoint) : super(entrypoint); - @override Future validate() { var overridden = MapKeySet(entrypoint.root.dependencyOverrides);
diff --git a/lib/src/validator/deprecated_fields.dart b/lib/src/validator/deprecated_fields.dart index e1aee4f..942c8f5 100644 --- a/lib/src/validator/deprecated_fields.dart +++ b/lib/src/validator/deprecated_fields.dart
@@ -4,14 +4,11 @@ import 'dart:async'; -import '../entrypoint.dart'; import '../validator.dart'; /// A validator that validates that a pubspec is not including deprecated fields /// which are no longer read. class DeprecatedFieldsValidator extends Validator { - DeprecatedFieldsValidator(Entrypoint entrypoint) : super(entrypoint); - @override Future validate() async { if (entrypoint.root.pubspec.fields.containsKey('transformers')) {
diff --git a/lib/src/validator/directory.dart b/lib/src/validator/directory.dart index 409e243..ae8ac57 100644 --- a/lib/src/validator/directory.dart +++ b/lib/src/validator/directory.dart
@@ -6,14 +6,11 @@ import 'package:path/path.dart' as path; -import '../entrypoint.dart'; import '../io.dart'; import '../validator.dart'; /// A validator that validates a package's top-level directories. class DirectoryValidator extends Validator { - DirectoryValidator(Entrypoint entrypoint) : super(entrypoint); - static final _pluralNames = [ 'benchmarks', 'docs', @@ -22,30 +19,35 @@ 'tools' ]; + static String docRef = 'See https://dart.dev/tools/pub/package-layout.'; + @override - Future validate() { - return Future.sync(() { - for (var dir in listDir(entrypoint.root.dir)) { - if (!dirExists(dir)) continue; + Future<void> validate() async { + final visited = <String>{}; + for (final file in files) { + // Find the topmost directory name of [file]. + final dir = path.join(entrypoint.root.dir, + path.split(path.relative(file, from: entrypoint.root.dir)).first); + if (!visited.add(dir)) continue; + if (!dirExists(dir)) continue; - dir = path.basename(dir); - if (_pluralNames.contains(dir)) { - // Cut off the "s" - var singularName = dir.substring(0, dir.length - 1); - warnings.add('Rename the top-level "$dir" directory to ' - '"$singularName".\n' - 'The Pub layout convention is to use singular directory ' - 'names.\n' - 'Plural names won\'t be correctly identified by Pub and other ' - 'tools.'); - } - - if (dir.contains(RegExp(r'^samples?$'))) { - warnings.add('Rename the top-level "$dir" directory to "example".\n' - 'This allows Pub to find your examples and create "packages" ' - 'directories for them.\n'); - } + final dirName = path.basename(dir); + if (_pluralNames.contains(dirName)) { + // Cut off the "s" + var singularName = dirName.substring(0, dirName.length - 1); + warnings.add('Rename the top-level "$dirName" directory to ' + '"$singularName".\n' + 'The Pub layout convention is to use singular directory ' + 'names.\n' + 'Plural names won\'t be correctly identified by Pub and other ' + 'tools.\n$docRef'); } - }); + + if (dirName.contains(RegExp(r'^samples?$'))) { + warnings.add('Rename the top-level "$dirName" directory to "example".\n' + 'This allows Pub to find your examples and create "packages" ' + 'directories for them.\n$docRef'); + } + } } }
diff --git a/lib/src/validator/executable.dart b/lib/src/validator/executable.dart index 2163e8c..b7679eb 100644 --- a/lib/src/validator/executable.dart +++ b/lib/src/validator/executable.dart
@@ -6,20 +6,15 @@ import 'package:path/path.dart' as p; -import '../entrypoint.dart'; import '../validator.dart'; /// Validates that a package's pubspec doesn't contain executables that /// reference non-existent scripts. class ExecutableValidator extends Validator { - ExecutableValidator(Entrypoint entrypoint) : super(entrypoint); - @override Future validate() async { - var binFiles = entrypoint.root - .listFiles(beneath: 'bin', recursive: false) - .map(entrypoint.root.relative) - .toList(); + final binFiles = + filesBeneath('bin', recursive: false).map(entrypoint.root.relative); entrypoint.root.pubspec.executables.forEach((executable, script) { var scriptPath = p.join('bin', '$script.dart');
diff --git a/lib/src/validator/flutter_constraint.dart b/lib/src/validator/flutter_constraint.dart index d689a22..1b1e224 100644 --- a/lib/src/validator/flutter_constraint.dart +++ b/lib/src/validator/flutter_constraint.dart
@@ -6,12 +6,10 @@ import 'package:pub_semver/pub_semver.dart'; -import '../entrypoint.dart'; import '../validator.dart'; /// Validates that a package's flutter constraint doesn't contain an upper bound class FlutterConstraintValidator extends Validator { - FlutterConstraintValidator(Entrypoint entrypoint) : super(entrypoint); static const explanationUrl = 'https://dart.dev/go/flutter-upper-bound-deprecation';
diff --git a/lib/src/validator/flutter_plugin_format.dart b/lib/src/validator/flutter_plugin_format.dart index 6f00f06..a56f7df 100644 --- a/lib/src/validator/flutter_plugin_format.dart +++ b/lib/src/validator/flutter_plugin_format.dart
@@ -6,7 +6,6 @@ import 'package:pub_semver/pub_semver.dart'; -import '../entrypoint.dart'; import '../validator.dart'; const _pluginDocsUrl = @@ -19,8 +18,6 @@ /// See: /// https://flutter.dev/docs/development/packages-and-plugins/developing-packages class FlutterPluginFormatValidator extends Validator { - FlutterPluginFormatValidator(Entrypoint entrypoint) : super(entrypoint); - @override Future validate() async { final pubspec = entrypoint.root.pubspec;
diff --git a/lib/src/validator/gitignore.dart b/lib/src/validator/gitignore.dart index 4fb49b1..809c537 100644 --- a/lib/src/validator/gitignore.dart +++ b/lib/src/validator/gitignore.dart
@@ -7,10 +7,10 @@ import 'package:path/path.dart' as p; -import '../entrypoint.dart'; import '../git.dart' as git; import '../ignore.dart'; import '../io.dart'; +import '../log.dart' as log; import '../utils.dart'; import '../validator.dart'; @@ -18,17 +18,24 @@ /// .gitignore. These would be considered part of the package by previous /// versions of pub. class GitignoreValidator extends Validator { - GitignoreValidator(Entrypoint entrypoint) : super(entrypoint); - @override Future<void> validate() async { if (entrypoint.root.inGitRepo) { - final checkedIntoGit = git.runSync([ - 'ls-files', - '--cached', - '--exclude-standard', - '--recurse-submodules' - ], workingDir: entrypoint.root.dir); + late final List<String> checkedIntoGit; + try { + checkedIntoGit = git.runSync([ + 'ls-files', + '--cached', + '--exclude-standard', + '--recurse-submodules' + ], workingDir: entrypoint.root.dir); + } on git.GitException catch (e) { + log.fine('Could not run `git ls-files` files in repo (${e.message}).'); + // This validation is only a warning. + // If git is not supported on the platform, or too old to support + // --recurse-submodules we just continue silently. + return; + } final root = git.repoRoot(entrypoint.root.dir) ?? entrypoint.root.dir; var beneath = p.posix.joinAll( p.split(p.normalize(p.relative(entrypoint.root.dir, from: root))));
diff --git a/lib/src/validator/language_version.dart b/lib/src/validator/language_version.dart index e52a92a..2041bcb 100644 --- a/lib/src/validator/language_version.dart +++ b/lib/src/validator/language_version.dart
@@ -9,7 +9,6 @@ import 'package:stack_trace/stack_trace.dart'; import '../dart.dart'; -import '../entrypoint.dart'; import '../language_version.dart'; import '../log.dart' as log; import '../utils.dart'; @@ -18,22 +17,18 @@ /// Validates that libraries do not opt into newer language versions than what /// they declare in their pubspec. class LanguageVersionValidator extends Validator { - final AnalysisContextManager analysisContextManager = - AnalysisContextManager(); - - LanguageVersionValidator(Entrypoint entrypoint) : super(entrypoint) { - var packagePath = p.normalize(p.absolute(entrypoint.root.dir)); - analysisContextManager.createContextsForDirectory(packagePath); - } - @override Future validate() async { + var packagePath = p.normalize(p.absolute(entrypoint.root.dir)); + final analysisContextManager = AnalysisContextManager() + ..createContextsForDirectory(packagePath); + final declaredLanguageVersion = entrypoint.root.pubspec.languageVersion; - for (final path in ['lib', 'bin'] - .map((path) => entrypoint.root.listFiles(beneath: path)) - .expand((files) => files) - .where((String file) => p.extension(file) == '.dart')) { + for (final path in ['lib', 'bin'].expand((path) { + return filesBeneath(path, recursive: true) + .where((file) => p.extension(file) == '.dart'); + })) { CompilationUnit unit; try { unit = analysisContextManager.parse(path);
diff --git a/lib/src/validator/leak_detection.dart b/lib/src/validator/leak_detection.dart index 7a398d4..03100f8 100644 --- a/lib/src/validator/leak_detection.dart +++ b/lib/src/validator/leak_detection.dart
@@ -12,7 +12,6 @@ import 'package:pool/pool.dart'; import 'package:source_span/source_span.dart'; -import '../entrypoint.dart'; import '../ignore.dart'; import '../validator.dart'; @@ -26,8 +25,6 @@ /// accidentally leaked. @sealed class LeakDetectionValidator extends Validator { - LeakDetectionValidator(Entrypoint entrypoint) : super(entrypoint); - @override Future<void> validate() async { // Load `false_secrets` from `pubspec.yaml`. @@ -37,7 +34,7 @@ ); final pool = Pool(20); // don't read more than 20 files concurrently! - final leaks = await Future.wait(entrypoint.root.listFiles().map((f) async { + final leaks = await Future.wait(files.map((f) async { final relPath = entrypoint.root.relative(f); // Skip files matching patterns in `false_secrets`
diff --git a/lib/src/validator/license.dart b/lib/src/validator/license.dart index 1a2c68a..993a48f 100644 --- a/lib/src/validator/license.dart +++ b/lib/src/validator/license.dart
@@ -4,26 +4,23 @@ import 'dart:async'; -import 'package:path/path.dart' as path; +import 'package:path/path.dart' as p; -import '../entrypoint.dart'; import '../validator.dart'; +final licenseLike = + RegExp(r'^(([a-zA-Z0-9]+[-_])?(LICENSE|COPYING)|UNLICENSE)(\..*)?$'); + /// A validator that checks that a LICENSE-like file exists. class LicenseValidator extends Validator { - LicenseValidator(Entrypoint entrypoint) : super(entrypoint); - @override Future validate() { return Future.sync(() { - final licenseLike = - RegExp(r'^(([a-zA-Z0-9]+[-_])?(LICENSE|COPYING)|UNLICENSE)(\..*)?$'); - final candidates = entrypoint.root - .listFiles(recursive: false) - .map(path.basename) - .where(licenseLike.hasMatch); + final candidates = filesBeneath('.', recursive: false) + .where((file) => licenseLike.hasMatch(p.basename(file))); if (candidates.isNotEmpty) { - if (!candidates.contains('LICENSE')) { + if (!candidates + .any((candidate) => p.basename(candidate) == 'LICENSE')) { final firstCandidate = candidates.first; warnings.add('Please consider renaming $firstCandidate to `LICENSE`. ' 'See https://dart.dev/tools/pub/publishing#important-files.');
diff --git a/lib/src/validator/name.dart b/lib/src/validator/name.dart index 07d945c..bc02031 100644 --- a/lib/src/validator/name.dart +++ b/lib/src/validator/name.dart
@@ -6,21 +6,18 @@ import 'package:path/path.dart' as path; -import '../entrypoint.dart'; import '../utils.dart'; import '../validator.dart'; /// A validator that the name of a package is legal and matches the library name /// in the case of a single library. class NameValidator extends Validator { - NameValidator(Entrypoint entrypoint) : super(entrypoint); - @override Future validate() { return Future.sync(() { _checkName(entrypoint.root.name); - var libraries = _libraries; + var libraries = _libraries(files); if (libraries.length == 1) { var libName = path.basenameWithoutExtension(libraries[0]); @@ -34,10 +31,9 @@ /// Returns a list of all libraries in the current package as paths relative /// to the package's root directory. - List<String> get _libraries { + List<String> _libraries(List<String> files) { var libDir = entrypoint.root.path('lib'); - return entrypoint.root - .listFiles(beneath: 'lib') + return filesBeneath('lib', recursive: true) .map((file) => path.relative(file, from: path.dirname(libDir))) .where((file) => !path.split(file).contains('src') &&
diff --git a/lib/src/validator/null_safety_mixed_mode.dart b/lib/src/validator/null_safety_mixed_mode.dart index 42f9f77..36f9796 100644 --- a/lib/src/validator/null_safety_mixed_mode.dart +++ b/lib/src/validator/null_safety_mixed_mode.dart
@@ -6,16 +6,14 @@ import 'package:path/path.dart' as p; -import '../entrypoint.dart'; import '../null_safety_analysis.dart'; import '../package_name.dart'; +import '../source/path.dart'; import '../validator.dart'; /// Gives a warning when publishing a new version, if this package opts into /// null safety, but any of the dependencies do not. class NullSafetyMixedModeValidator extends Validator { - NullSafetyMixedModeValidator(Entrypoint entrypoint) : super(entrypoint); - @override Future<void> validate() async { final pubspec = entrypoint.root.pubspec; @@ -23,13 +21,17 @@ if (!declaredLanguageVersion.supportsNullSafety) { return; } - final analysisResult = await NullSafetyAnalysis(entrypoint.cache) - .nullSafetyCompliance(PackageId( - entrypoint.root.name, - entrypoint.cache.sources.path, - entrypoint.root.version, - {'relative': false, 'path': p.absolute(entrypoint.root.dir)})); - + final analysisResult = + await NullSafetyAnalysis(entrypoint.cache).nullSafetyCompliance( + PackageId( + entrypoint.root.name, + entrypoint.root.version, + ResolvedPathDescription( + PathDescription(p.absolute(entrypoint.root.dir), false), + ), + ), + ); + print(analysisResult.compliance); if (analysisResult.compliance == NullSafetyCompliance.mixed) { warnings.add(''' This package is opting into null-safety, but a dependency or file is not.
diff --git a/lib/src/validator/pubspec.dart b/lib/src/validator/pubspec.dart index 2a5e052..ef9cff8 100644 --- a/lib/src/validator/pubspec.dart +++ b/lib/src/validator/pubspec.dart
@@ -6,7 +6,6 @@ import 'package:path/path.dart' as p; -import '../entrypoint.dart'; import '../validator.dart'; /// Validates that a package's pubspec exists. @@ -14,13 +13,10 @@ /// In most cases this is clearly true, since pub can't run without a pubspec, /// but it's possible that the pubspec is gitignored. class PubspecValidator extends Validator { - PubspecValidator(Entrypoint entrypoint) : super(entrypoint); - @override Future validate() async { - var files = entrypoint.root.listFiles(recursive: false); - if (!files.any((file) => - p.canonicalize(file) == p.canonicalize(entrypoint.pubspecPath))) { + if (!filesBeneath('.', recursive: false) + .any((file) => p.basename(file) == 'pubspec.yaml')) { errors.add('The pubspec is hidden, probably by .gitignore or pubignore.'); } }
diff --git a/lib/src/validator/pubspec_field.dart b/lib/src/validator/pubspec_field.dart index 8a5c23e..91d8da3 100644 --- a/lib/src/validator/pubspec_field.dart +++ b/lib/src/validator/pubspec_field.dart
@@ -4,16 +4,13 @@ import 'dart:async'; -import '../entrypoint.dart'; import '../validator.dart'; /// A validator that checks that the pubspec has valid "author" and "homepage" /// fields. class PubspecFieldValidator extends Validator { - PubspecFieldValidator(Entrypoint entrypoint) : super(entrypoint); - @override - Future validate() { + Future<void> validate() { _validateFieldIsString('description'); _validateFieldUrl('homepage'); _validateFieldUrl('repository');
diff --git a/lib/src/validator/pubspec_typo.dart b/lib/src/validator/pubspec_typo.dart index 9b04292..0843bc4 100644 --- a/lib/src/validator/pubspec_typo.dart +++ b/lib/src/validator/pubspec_typo.dart
@@ -2,14 +2,11 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -import '../entrypoint.dart'; import '../levenshtein.dart'; import '../validator.dart'; /// Validates that a package's pubspec does not contain any typos in its keys. class PubspecTypoValidator extends Validator { - PubspecTypoValidator(Entrypoint entrypoint) : super(entrypoint); - @override Future validate() async { final fields = entrypoint.root.pubspec.fields;
diff --git a/lib/src/validator/readme.dart b/lib/src/validator/readme.dart index 85de479..3604232 100644 --- a/lib/src/validator/readme.dart +++ b/lib/src/validator/readme.dart
@@ -5,40 +5,51 @@ import 'dart:async'; import 'dart:convert'; -import 'package:path/path.dart' as path; +import 'package:path/path.dart' as p; -import '../entrypoint.dart'; import '../io.dart'; import '../validator.dart'; +final _readmeRegexp = RegExp(r'^README($|\.)', caseSensitive: false); + /// Validates that a package's README exists and is valid utf-8. class ReadmeValidator extends Validator { - ReadmeValidator(Entrypoint entrypoint) : super(entrypoint); - @override - Future validate() { - return Future.sync(() { - var readme = entrypoint.root.readmePath; - if (readme == null) { - warnings - .add('Please add a README.md file that describes your package.'); - return; - } + Future<void> validate() async { + // Find the path to the README file at the root of the entrypoint. + // + // If multiple READMEs are found, this uses the same conventions as + // pub.dev for choosing the primary one: the README with the fewest + // extensions that is lexically ordered first is chosen. + final readmes = filesBeneath('.', recursive: false) + .where((file) => p.basename(file).contains(_readmeRegexp)); - if (path.basename(readme) != 'README.md') { - warnings.add('Please consider renaming $readme to `README.md`. ' - 'See https://dart.dev/tools/pub/publishing#important-files.'); - } + if (readmes.isEmpty) { + warnings.add('Please add a README.md file that describes your package.'); + return; + } - var bytes = readBinaryFile(readme); - try { - // utf8.decode doesn't allow invalid UTF-8. - utf8.decode(bytes); - } on FormatException catch (_) { - warnings.add('$readme contains invalid UTF-8.\n' - 'This will cause it to be displayed incorrectly on ' - 'the Pub site (https://pub.dev).'); - } + final readme = readmes.reduce((readme1, readme2) { + final extensions1 = '.'.allMatches(p.basename(readme1)).length; + final extensions2 = '.'.allMatches(p.basename(readme2)).length; + var comparison = extensions1.compareTo(extensions2); + if (comparison == 0) comparison = readme1.compareTo(readme2); + return (comparison <= 0) ? readme1 : readme2; }); + + if (p.basename(readme) != 'README.md') { + warnings.add('Please consider renaming $readme to `README.md`. ' + 'See https://dart.dev/tools/pub/publishing#important-files.'); + } + + var bytes = readBinaryFile(readme); + try { + // utf8.decode doesn't allow invalid UTF-8. + utf8.decode(bytes); + } on FormatException catch (_) { + warnings.add('$readme contains invalid UTF-8.\n' + 'This will cause it to be displayed incorrectly on ' + 'the Pub site (https://pub.dev).'); + } } }
diff --git a/lib/src/validator/relative_version_numbering.dart b/lib/src/validator/relative_version_numbering.dart index eb4246c..1b99b79 100644 --- a/lib/src/validator/relative_version_numbering.dart +++ b/lib/src/validator/relative_version_numbering.dart
@@ -6,7 +6,6 @@ import 'package:collection/collection.dart' show IterableExtension; -import '../entrypoint.dart'; import '../exceptions.dart'; import '../null_safety_analysis.dart'; import '../package_name.dart'; @@ -18,19 +17,14 @@ static const String semverUrl = 'https://dart.dev/tools/pub/versioning#semantic-versions'; - final Uri? _server; - - RelativeVersionNumberingValidator(Entrypoint entrypoint, this._server) - : super(entrypoint); - @override Future<void> validate() async { - final hostedSource = entrypoint.cache.sources.hosted; + final hostedSource = entrypoint.cache.hosted; List<PackageId> existingVersions; try { - existingVersions = await hostedSource - .bind(entrypoint.cache) - .getVersions(hostedSource.refFor(entrypoint.root.name, url: _server)); + existingVersions = await entrypoint.cache.getVersions( + hostedSource.refFor(entrypoint.root.name, url: serverUrl.toString()), + ); } on PackageNotFoundException { existingVersions = []; } @@ -39,8 +33,7 @@ .lastWhereOrNull((id) => id.version < entrypoint.root.version); if (previousVersion == null) return; - final previousPubspec = - await hostedSource.bind(entrypoint.cache).describe(previousVersion); + final previousPubspec = await entrypoint.cache.describe(previousVersion); final currentOptedIn = entrypoint.root.pubspec.languageVersion.supportsNullSafety;
diff --git a/lib/src/validator/sdk_constraint.dart b/lib/src/validator/sdk_constraint.dart index ff25e53..736876b 100644 --- a/lib/src/validator/sdk_constraint.dart +++ b/lib/src/validator/sdk_constraint.dart
@@ -6,7 +6,6 @@ import 'package:pub_semver/pub_semver.dart'; -import '../entrypoint.dart'; import '../sdk.dart'; import '../validator.dart'; @@ -18,8 +17,6 @@ /// * is not depending on a prerelease, unless the package itself is a /// prerelease. class SdkConstraintValidator extends Validator { - SdkConstraintValidator(Entrypoint entrypoint) : super(entrypoint); - /// Get SDK version constraint from `pubspec.yaml` without any defaults or /// overrides. VersionConstraint _sdkConstraintFromPubspecYaml() {
diff --git a/lib/src/validator/size.dart b/lib/src/validator/size.dart index e989518..ae32496 100644 --- a/lib/src/validator/size.dart +++ b/lib/src/validator/size.dart
@@ -3,9 +3,7 @@ // BSD-style license that can be found in the LICENSE file. import 'dart:async'; -import 'dart:math' as math; -import '../entrypoint.dart'; import '../io.dart'; import '../validator.dart'; @@ -14,30 +12,24 @@ /// A validator that validates that a package isn't too big. class SizeValidator extends Validator { - final Future<int> packageSize; - - SizeValidator(Entrypoint entrypoint, this.packageSize) : super(entrypoint); - @override - Future validate() { - return packageSize.then((size) { - if (size <= _maxSize) return; - var sizeInMb = (size / math.pow(2, 20)).toStringAsPrecision(4); - // Current implementation of Package.listFiles skips hidden files - var ignoreExists = fileExists(entrypoint.root.path('.gitignore')); + Future<void> validate() async { + if (packageSize <= _maxSize) return; + var sizeInMb = (packageSize / (1 << 20)).toStringAsPrecision(4); + // Current implementation of Package.listFiles skips hidden files + var ignoreExists = fileExists(entrypoint.root.path('.gitignore')); - var error = StringBuffer('Your package is $sizeInMb MB. Hosted ' - 'packages must be smaller than 100 MB.'); + var error = StringBuffer('Your package is $sizeInMb MB. Hosted ' + 'packages must be smaller than 100 MB.'); - if (ignoreExists && !entrypoint.root.inGitRepo) { - error.write(' Your .gitignore has no effect since your project ' - 'does not appear to be in version control.'); - } else if (!ignoreExists && entrypoint.root.inGitRepo) { - error.write(' Consider adding a .gitignore to avoid including ' - 'temporary files.'); - } + if (ignoreExists && !entrypoint.root.inGitRepo) { + error.write(' Your .gitignore has no effect since your project ' + 'does not appear to be in version control.'); + } else if (!ignoreExists && entrypoint.root.inGitRepo) { + error.write(' Consider adding a .gitignore to avoid including ' + 'temporary files.'); + } - errors.add(error.toString()); - }); + errors.add(error.toString()); } }
diff --git a/lib/src/validator/strict_dependencies.dart b/lib/src/validator/strict_dependencies.dart index 881c55c..84f7a09 100644 --- a/lib/src/validator/strict_dependencies.dart +++ b/lib/src/validator/strict_dependencies.dart
@@ -11,7 +11,6 @@ import 'package:stack_trace/stack_trace.dart'; import '../dart.dart'; -import '../entrypoint.dart'; import '../io.dart'; import '../log.dart' as log; import '../utils.dart'; @@ -19,19 +18,16 @@ /// Validates that Dart source files only import declared dependencies. class StrictDependenciesValidator extends Validator { - final AnalysisContextManager analysisContextManager = - AnalysisContextManager(); - - StrictDependenciesValidator(Entrypoint entrypoint) : super(entrypoint) { - var packagePath = p.normalize(p.absolute(entrypoint.root.dir)); - analysisContextManager.createContextsForDirectory(packagePath); - } - /// Lazily returns all dependency uses in [files]. /// /// Files that do not parse and directives that don't import or export /// `package:` URLs are ignored. Iterable<_Usage> _findPackages(Iterable<String> files) sync* { + final packagePath = p.normalize(p.absolute(entrypoint.root.dir)); + final AnalysisContextManager analysisContextManager = + AnalysisContextManager(); + analysisContextManager.createContextsForDirectory(packagePath); + for (var file in files) { List<UriBasedDirective> directives; var contents = readTextFile(file); @@ -105,10 +101,16 @@ } } - Iterable<_Usage> _usagesBeneath(List<String> paths) => _findPackages(paths - .map((path) => entrypoint.root.listFiles(beneath: path)) - .expand((files) => files) - .where((String file) => p.extension(file) == '.dart')); + Iterable<_Usage> _usagesBeneath(List<String> paths) { + return _findPackages( + paths.expand( + (path) { + return filesBeneath(path, recursive: true) + .where((file) => p.extension(file) == '.dart'); + }, + ), + ); + } } /// A parsed import or export directive in a D source file.
diff --git a/pubspec.yaml b/pubspec.yaml index a7d9bb4..54090a5 100644 --- a/pubspec.yaml +++ b/pubspec.yaml
@@ -6,7 +6,7 @@ dependencies: # Note: Pub's test infrastructure assumes that any dependencies used in tests # will be hosted dependencies. - analyzer: ^2.7.0 + analyzer: ^3.3.1 args: ^2.1.0 async: ^2.6.1 cli_util: ^0.3.5
diff --git a/test/add/hosted/non_default_pub_server_test.dart b/test/add/hosted/non_default_pub_server_test.dart index 0a83ac5..c3f7afa 100644 --- a/test/add/hosted/non_default_pub_server_test.dart +++ b/test/add/hosted/non_default_pub_server_test.dart
@@ -14,7 +14,7 @@ // be accessed. (await servePackages()).serveErrors(); - final server = await servePackages(); + final server = await startPackageServer(); server.serve('foo', '0.2.5'); server.serve('foo', '1.1.0'); server.serve('foo', '1.2.3'); @@ -26,7 +26,9 @@ await pubAdd(args: ['foo:1.2.3', '--hosted-url', url]); await d.cacheDir({'foo': '1.2.3'}, port: server.port).validate(); + await d.appPackagesFile({'foo': '1.2.3'}).validate(); + await d.appDir({ 'foo': { 'version': '1.2.3', @@ -40,7 +42,7 @@ // be accessed. (await servePackages()).serveErrors(); - final server = await servePackages(); + final server = await startPackageServer(); server.serve('foo', '1.1.0'); server.serve('foo', '1.2.3'); server.serve('bar', '0.2.5'); @@ -57,8 +59,10 @@ await d.cacheDir({'foo': '1.2.3', 'bar': '3.2.3', 'baz': '1.3.5'}, port: server.port).validate(); + await d.appPackagesFile( {'foo': '1.2.3', 'bar': '3.2.3', 'baz': '1.3.5'}).validate(); + await d.appDir({ 'foo': { 'version': '1.2.3', @@ -105,7 +109,7 @@ // be accessed. (await servePackages()).serveErrors(); - final server = await servePackages(); + final server = await startPackageServer(); server.serve('foo', '0.2.5'); server.serve('foo', '1.1.0'); server.serve('foo', '1.2.3'); @@ -132,7 +136,7 @@ // be accessed. (await servePackages()).serveErrors(); - final server = await servePackages(); + final server = await startPackageServer(); server.serve('foo', '0.2.5'); server.serve('foo', '1.1.0'); server.serve('foo', '1.2.3'); @@ -159,7 +163,8 @@ // Make the default server serve errors. Only the custom server should // be accessed. (await servePackages()).serveErrors(); - final server = await servePackages(); + + final server = await startPackageServer(); server.serve('foo', '0.2.5'); server.serve('foo', '1.1.0'); server.serve('foo', '1.2.3');
diff --git a/test/add/sdk/sdk_test.dart b/test/add/sdk/sdk_test.dart index 632725e..1a98b78 100644 --- a/test/add/sdk/sdk_test.dart +++ b/test/add/sdk/sdk_test.dart
@@ -39,7 +39,7 @@ d.pubspec({ 'name': 'myapp', 'dependencies': { - 'foo': {'sdk': 'flutter', 'version': '^0.0.1'} + 'foo': {'sdk': 'flutter'} } }), d.packagesFile({
diff --git a/test/dependency_services/dependency_services_test.dart b/test/dependency_services/dependency_services_test.dart new file mode 100644 index 0000000..ed21a19 --- /dev/null +++ b/test/dependency_services/dependency_services_test.dart
@@ -0,0 +1,285 @@ +// Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'dart:convert'; +import 'dart:io'; + +import 'package:path/path.dart' as p; +import 'package:pub/src/io.dart'; +import 'package:pub_semver/pub_semver.dart'; +import 'package:shelf/shelf.dart' as shelf; +import 'package:test/test.dart'; + +import '../descriptor.dart' as d; +import '../golden_file.dart'; +import '../test_pub.dart'; + +void manifestAndLockfile(GoldenTestContext context) { + String catFile(String filename) { + final contents = filterUnstableLines( + File(p.join(d.sandbox, appPath, filename)).readAsLinesSync()); + + return ''' +\$ cat $filename +${contents.join('\n')}'''; + } + + context.expectNextSection(''' +${catFile('pubspec.yaml')} +${catFile('pubspec.lock')} +'''); +} + +late final String snapshot; + +extension on GoldenTestContext { + /// Returns the stdout. + Future<String> runDependencyServices(List<String> args, + {String? stdin}) async { + final buffer = StringBuffer(); + buffer.writeln('## Section ${args.join(' ')}'); + final process = await Process.start( + Platform.resolvedExecutable, + [ + snapshot, + ...args, + ], + environment: getPubTestEnvironment(), + workingDirectory: p.join(d.sandbox, appPath), + ); + if (stdin != null) { + process.stdin.write(stdin); + await process.stdin.flush(); + await process.stdin.close(); + } + final outLines = outputLines(process.stdout); + final errLines = outputLines(process.stderr); + final exitCode = await process.exitCode; + + final pipe = stdin == null ? '' : ' echo ${escapeShellArgument(stdin)} |'; + buffer.writeln([ + '\$$pipe dependency_services ${args.map(escapeShellArgument).join(' ')}', + ...await outLines, + ...(await errLines).map((e) => '[STDERR] $e'), + if (exitCode != 0) '[EXIT CODE] $exitCode', + ].join('\n')); + + expectNextSection(buffer.toString()); + return (await outLines).join('\n'); + } +} + +Future<Iterable<String>> outputLines(Stream<List<int>> stream) async { + final s = await utf8.decodeStream(stream); + if (s.isEmpty) return []; + return filterUnstableLines(s.split('\n')); +} + +Future<void> listReportApply( + GoldenTestContext context, + List<_PackageVersion> upgrades, { + void Function(Map)? reportAssertions, +}) async { + manifestAndLockfile(context); + await context.runDependencyServices(['list']); + final report = await context.runDependencyServices(['report']); + if (reportAssertions != null) { + reportAssertions(json.decode(report)); + } + final input = json.encode({ + 'dependencyChanges': upgrades, + }); + + await context.runDependencyServices(['apply'], stdin: input); + manifestAndLockfile(context); +} + +Future<void> main() async { + setUpAll(() async { + final tempDir = Directory.systemTemp.createTempSync(); + snapshot = p.join(tempDir.path, 'dependency_services.dart.snapshot'); + final r = Process.runSync(Platform.resolvedExecutable, [ + '--snapshot=$snapshot', + p.join('bin', 'dependency_services.dart'), + ]); + expect(r.exitCode, 0, reason: r.stderr); + }); + + tearDownAll(() { + File(snapshot).parent.deleteSync(recursive: true); + }); + + testWithGolden('Removing transitive', (context) async { + final server = (await servePackages()) + ..serve('foo', '1.2.3', deps: {'transitive': '^1.0.0'}) + ..serve('foo', '2.2.3') + ..serve('transitive', '1.0.0'); + + await d.dir(appPath, [ + d.pubspec({ + 'name': 'app', + 'dependencies': { + 'foo': '^1.0.0', + }, + }) + ]).create(); + await pubGet(); + server.dontAllowDownloads(); + await listReportApply(context, [ + _PackageVersion('foo', Version.parse('2.2.3')), + _PackageVersion('transitive', null) + ], reportAssertions: (report) { + expect( + findChangeVersion(report, 'singleBreaking', 'foo'), + '2.2.3', + ); + expect( + findChangeVersion(report, 'singleBreaking', 'transitive'), + null, + ); + }); + }); + + testWithGolden('Compatible', (context) async { + final server = (await servePackages()) + ..serve('foo', '1.2.3') + ..serve('foo', '2.2.3') + ..serve('bar', '1.2.3') + ..serve('bar', '2.2.3') + ..serve('boo', '1.2.3'); + + await d.dir(appPath, [ + d.pubspec({ + 'name': 'app', + 'dependencies': { + 'foo': '^1.0.0', + 'bar': '^1.0.0', + 'boo': '^1.0.0', + }, + }) + ]).create(); + await pubGet(); + server.serve('foo', '1.2.4'); + server.serve('boo', '1.2.4'); + + server.dontAllowDownloads(); + + await listReportApply(context, [ + _PackageVersion('foo', Version.parse('1.2.4')), + ], reportAssertions: (report) { + expect( + findChangeVersion(report, 'compatible', 'foo'), + '1.2.4', + ); + }); + }); + + testWithGolden('Adding transitive', (context) async { + final server = (await servePackages()) + ..serve('foo', '1.2.3') + ..serve('foo', '2.2.3', deps: {'transitive': '^1.0.0'}) + ..serve('transitive', '1.0.0'); + + await d.dir(appPath, [ + d.pubspec({ + 'name': 'app', + 'dependencies': { + 'foo': '^1.0.0', + }, + }) + ]).create(); + await pubGet(); + server.dontAllowDownloads(); + + await listReportApply(context, [ + _PackageVersion('foo', Version.parse('2.2.3')), + _PackageVersion('transitive', Version.parse('1.0.0')) + ], reportAssertions: (report) { + expect( + findChangeVersion(report, 'singleBreaking', 'foo'), + '2.2.3', + ); + expect( + findChangeVersion(report, 'singleBreaking', 'transitive'), + '1.0.0', + ); + }); + }); + + testWithGolden('multibreaking', (context) async { + final server = (await servePackages()) + ..serve('foo', '1.0.0') + ..serve('bar', '1.0.0') + ..serve('baz', '1.0.0'); + + await d.dir(appPath, [ + d.pubspec({ + 'name': 'app', + 'dependencies': { + 'foo': '^1.0.0', + 'bar': '^1.0.0', + // Pinned version. See that the widened constraint is correct. + 'baz': '1.0.0', + }, + }) + ]).create(); + await pubGet(); + server + ..serve('foo', '1.5.0') // compatible + ..serve('foo', '2.0.0') // single breaking + ..serve('foo', '3.0.0', deps: {'bar': '^2.0.0'}) // multi breaking + ..serve('foo', '3.0.1', deps: {'bar': '^2.0.0'}) + ..serve('bar', '2.0.0', deps: {'foo': '^3.0.0'}) + ..serve('baz', '1.1.0'); + + server.dontAllowDownloads(); + + await listReportApply(context, [ + _PackageVersion('foo', Version.parse('3.0.1'), + constraint: VersionConstraint.parse('^3.0.0')), + _PackageVersion('bar', Version.parse('2.0.0')) + ], reportAssertions: (report) { + expect( + findChangeVersion(report, 'multiBreaking', 'foo'), + '3.0.1', + ); + expect( + findChangeVersion(report, 'multiBreaking', 'bar'), + '2.0.0', + ); + }); + }); +} + +dynamic findChangeVersion(dynamic json, String updateType, String name) { + final dep = json['dependencies'].firstWhere((p) => p['name'] == 'foo'); + return dep[updateType].firstWhere((p) => p['name'] == name)['version']; +} + +class _PackageVersion { + String name; + Version? version; + VersionConstraint? constraint; + _PackageVersion(this.name, this.version, {this.constraint}); + + Map<String, Object?> toJson() => { + 'name': name, + 'version': version?.toString(), + if (constraint != null) 'constraint': constraint.toString() + }; +} + +extension on PackageServer { + ///Check that nothing is downloaded. + void dontAllowDownloads() { + // This testing logic is a bit fragile, if we change the pattern for pattern + // for the download URL then this will pass silently. There isn't much we + // can / should do about it. Just accept the limitations, and remove it if + // the test becomes useless. + handle(RegExp(r'/.+\.tar\.gz'), (request) { + return shelf.Response.notFound( + 'This test should not download archives! Requested ${request.url}'); + }); + } +}
diff --git a/test/descriptor.dart b/test/descriptor.dart index 7b140e2..2491df4 100644 --- a/test/descriptor.dart +++ b/test/descriptor.dart
@@ -107,6 +107,16 @@ return pubspec(map); } +/// Describes a file named `pubspec_overrides.yaml` by default, with the given +/// YAML-serialized [contents], which should be a serializable object. +/// +/// [contents] may contain [Future]s that resolve to serializable objects, +/// which may in turn contain [Future]s recursively. +Descriptor pubspecOverrides(Map<String, Object> contents) => YamlDescriptor( + 'pubspec_overrides.yaml', + yaml(contents), + ); + /// Describes a directory named `lib` containing a single dart file named /// `<name>.dart` that contains a line of Dart code. Descriptor libDir(String name, [String? code]) {
diff --git a/test/get/git/git_not_installed_test.dart b/test/get/git/git_not_installed_test.dart index f00d795..e590770 100644 --- a/test/get/git/git_not_installed_test.dart +++ b/test/get/git/git_not_installed_test.dart
@@ -5,50 +5,61 @@ @TestOn('linux') import 'dart:io'; -import 'package:path/path.dart' as p; -import 'package:pub/src/io.dart' show runProcess; import 'package:test/test.dart'; -import 'package:test_descriptor/test_descriptor.dart' show sandbox; import '../../descriptor.dart' as d; import '../../test_pub.dart'; void main() { test('reports failure if Git is not installed', () async { - // Create temporary folder 'bin/' containing a 'git' script in [sandbox] - // By adding the bin/ folder to the search `$PATH` we can prevent `pub` from - // detecting the installed 'git' binary and we can test that it prints - // a useful error message. - await d.dir('bin', [ - d.file('git', ''' + await setUpFakeGitScript(bash: ''' #!/bin/bash -e echo "not git" exit 1 -'''), - d.file('git.bat', ''' +''', batch: ''' echo "not git" -''') - ]).create(); - final binFolder = p.join(sandbox, 'bin'); - // chmod the git script - if (!Platform.isWindows) { - await runProcess('chmod', ['+x', p.join(sandbox, 'bin', 'git')]); - } +'''); + await d.appDir({ + 'foo': {'git': '../foo.git'} + }).create(); + + await pubGet( + environment: extendedPathEnv(), + error: contains('Cannot find a Git executable'), + exitCode: 1, + ); + }); + + test('warns if git version is too old', () async { + await setUpFakeGitScript(bash: ''' +#!/bin/bash -e +if [ "\$1" == "--version" ] +then + echo "git version 2.13.1.616" + exit 1 +else + PATH=${Platform.environment['PATH']} git \$* +fi +''', batch: ''' +if "%1"=="--version" ( + echo "git version 2.13.1.616" +) else ( + set path="${Platform.environment['PATH']}" + git %* +) +'''); + + await d.git('foo.git', [d.libPubspec('foo', '1.0.0')]).create(); await d.appDir({ 'foo': {'git': '../foo.git'} }).create(); - final separator = Platform.isWindows ? ';' : ':'; - await pubGet( - environment: { - // Override 'PATH' to ensure that we can't detect a working "git" binary - 'PATH': '$binFolder$separator${Platform.environment['PATH']}', - }, - // We wish to verify that this error message is printed. - error: contains('Cannot find a Git executable'), - exitCode: 1, // exit code is non-zero. + environment: extendedPathEnv(), + warning: + contains('You have a very old version of git (version 2.13.1.616)'), + exitCode: 0, ); }); }
diff --git a/test/get/git/path_test.dart b/test/get/git/path_test.dart index 49607a1..d26d9cb 100644 --- a/test/get/git/path_test.dart +++ b/test/get/git/path_test.dart
@@ -3,9 +3,11 @@ // BSD-style license that can be found in the LICENSE file. import 'package:path/path.dart' as p; +import 'package:pub/src/exit_codes.dart' as exit_codes; import 'package:pub/src/io.dart'; import 'package:pub/src/lock_file.dart'; -import 'package:pub/src/source_registry.dart'; +import 'package:pub/src/source/git.dart'; +import 'package:pub/src/system_cache.dart'; import 'package:test/test.dart'; import '../../descriptor.dart' as d; @@ -76,12 +78,93 @@ }).validate(); final lockFile = LockFile.load( - p.join(d.sandbox, appPath, 'pubspec.lock'), SourceRegistry()); + p.join(d.sandbox, appPath, 'pubspec.lock'), SystemCache().sources); - expect(lockFile.packages['sub']!.description['path'], 'sub/dir%25', + expect( + (lockFile.packages['sub']!.description.description as GitDescription) + .path, + 'sub/dir%25', reason: 'use uris to specify the path relative to the repo'); }); + group('requires path to be absolute', () { + test('absolute path', () async { + await d.appDir({ + 'sub': { + 'git': {'url': '../foo.git', 'path': '/subdir'} + } + }).create(); + + await pubGet( + error: contains( + 'Invalid description in the "myapp" pubspec on the "sub" dependency: The \'path\' field of the description must be a relative path URL.', + ), + exitCode: exit_codes.DATA, + ); + }); + test('scheme', () async { + await d.appDir({ + 'sub': { + 'git': {'url': '../foo.git', 'path': 'https://subdir'} + } + }).create(); + + await pubGet( + error: contains( + 'Invalid description in the "myapp" pubspec on the "sub" dependency: The \'path\' field of the description must be a relative path URL.', + ), + exitCode: exit_codes.DATA, + ); + }); + test('fragment', () async { + await d.appDir({ + 'sub': { + 'git': {'url': '../foo.git', 'path': 'subdir/dir#fragment'} + } + }).create(); + + await pubGet( + error: contains( + 'Invalid description in the "myapp" pubspec on the "sub" dependency: The \'path\' field of the description must be a relative path URL.', + ), + exitCode: exit_codes.DATA, + ); + }); + + test('query', () async { + await d.appDir({ + 'sub': { + 'git': {'url': '../foo.git', 'path': 'subdir/dir?query'} + } + }).create(); + + await pubGet( + error: contains( + 'Invalid description in the "myapp" pubspec on the "sub" dependency: The \'path\' field of the description must be a relative path URL.', + ), + exitCode: exit_codes.DATA, + ); + }); + + test('authority', () async { + await d.appDir({ + 'sub': { + 'git': { + 'url': '../foo.git', + 'path': 'bob:pwd@somewhere.example.com/subdir' + } + } + }).create(); + + await pubGet( + error: contains( + 'Invalid description in the "myapp" pubspec on the "sub" dependency: The \'path\' field of the description must be a relative path URL.', + ), + exitCode: exit_codes.DATA, + ); + }); + }); + test('depends on a package in a deep subdirectory, non-relative uri', () async { ensureGit(); @@ -120,9 +203,12 @@ }).validate(); final lockFile = LockFile.load( - p.join(d.sandbox, appPath, 'pubspec.lock'), SourceRegistry()); + p.join(d.sandbox, appPath, 'pubspec.lock'), SystemCache().sources); - expect(lockFile.packages['sub']!.description['path'], 'sub/dir%25', + expect( + (lockFile.packages['sub']!.description.description as GitDescription) + .path, + 'sub/dir%25', reason: 'use uris to specify the path relative to the repo'); });
diff --git a/test/get/path/relative_path_test.dart b/test/get/path/relative_path_test.dart index 4255d49..c3a7e0b 100644 --- a/test/get/path/relative_path_test.dart +++ b/test/get/path/relative_path_test.dart
@@ -4,7 +4,8 @@ import 'package:path/path.dart' as path; import 'package:pub/src/lock_file.dart'; -import 'package:pub/src/source_registry.dart'; +import 'package:pub/src/source/path.dart'; +import 'package:pub/src/system_cache.dart'; import 'package:test/test.dart'; import '../../descriptor.dart' as d; @@ -90,10 +91,11 @@ await pubGet(); var lockfilePath = path.join(d.sandbox, appPath, 'pubspec.lock'); - var lockfile = LockFile.load(lockfilePath, SourceRegistry()); - var description = lockfile.packages['foo']!.description; + var lockfile = LockFile.load(lockfilePath, SystemCache().sources); + var description = + lockfile.packages['foo']!.description.description as PathDescription; - expect(description['relative'], isTrue); - expect(description['path'], path.join(d.sandbox, 'foo')); + expect(description.relative, isTrue); + expect(description.path, path.join(d.sandbox, 'foo')); }); }
diff --git a/test/global/activate/activate_git_after_hosted_test.dart b/test/global/activate/activate_git_after_hosted_test.dart index c553041..390aa87 100644 --- a/test/global/activate/activate_git_after_hosted_test.dart +++ b/test/global/activate/activate_git_after_hosted_test.dart
@@ -2,6 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +import 'package:path/path.dart'; import 'package:test/test.dart'; import '../../descriptor.dart' as d; @@ -28,11 +29,11 @@ output: allOf( startsWith('Package foo is currently active at version 1.0.0.\n' 'Resolving dependencies...\n' - '+ foo 1.0.0 from git ../foo.git at '), + '+ foo 1.0.0 from git ..${separator}foo.git at '), // Specific revision number goes here. endsWith('Building package executables...\n' 'Built foo:foo.\n' - 'Activated foo 1.0.0 from Git repository "../foo.git".'))); + 'Activated foo 1.0.0 from Git repository "..${separator}foo.git".'))); // Should now run the git one. var pub = await pubRun(global: true, args: ['foo']);
diff --git a/test/global/activate/activate_hosted_after_git_test.dart b/test/global/activate/activate_hosted_after_git_test.dart index 58c65fb..7ac174a 100644 --- a/test/global/activate/activate_hosted_after_git_test.dart +++ b/test/global/activate/activate_hosted_after_git_test.dart
@@ -2,6 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +import 'package:path/path.dart'; import 'package:test/test.dart'; import '../../descriptor.dart' as d; @@ -22,7 +23,7 @@ await runPub(args: ['global', 'activate', '-sgit', '../foo.git']); await runPub(args: ['global', 'activate', 'foo'], output: ''' - Package foo is currently active from Git repository "../foo.git". + Package foo is currently active from Git repository "..${separator}foo.git". Resolving dependencies... + foo 2.0.0 Downloading foo 2.0.0...
diff --git a/test/global/activate/activate_hosted_twice_test.dart b/test/global/activate/activate_hosted_twice_test.dart index 9baed5b..a4d1337 100644 --- a/test/global/activate/activate_hosted_twice_test.dart +++ b/test/global/activate/activate_hosted_twice_test.dart
@@ -24,15 +24,7 @@ d.dir('lib', [d.file('bar.dart', 'final version = "1.0.0";')]) ]); - await runPub(args: ['global', 'activate', 'foo'], output: ''' -Resolving dependencies... -+ bar 1.0.0 -+ foo 1.0.0 -Downloading foo 1.0.0... -Downloading bar 1.0.0... -Building package executables... -Built foo:foo. -Activated foo 1.0.0.'''); + await runPub(args: ['global', 'activate', 'foo'], output: anything); await runPub(args: ['global', 'activate', 'foo'], output: ''' Package foo is currently active at version 1.0.0.
diff --git a/test/global/activate/activate_path_after_hosted_test.dart b/test/global/activate/activate_path_after_hosted_test.dart index cfc28fd..01d493d 100644 --- a/test/global/activate/activate_path_after_hosted_test.dart +++ b/test/global/activate/activate_path_after_hosted_test.dart
@@ -10,7 +10,7 @@ import '../../test_pub.dart'; void main() { - test('activating a hosted package deactivates the path one', () async { + test('activating a path package deactivates the hosted one', () async { final server = await servePackages(); server.serve('foo', '1.0.0', contents: [ d.dir('bin', [d.file('foo.dart', "main(args) => print('hosted');")])
diff --git a/test/global/activate/git_package_test.dart b/test/global/activate/git_package_test.dart index ee88de7..02d06db 100644 --- a/test/global/activate/git_package_test.dart +++ b/test/global/activate/git_package_test.dart
@@ -2,6 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +import 'package:path/path.dart' as p; import 'package:test/test.dart'; import '../../descriptor.dart' as d; @@ -20,10 +21,72 @@ args: ['global', 'activate', '-sgit', '../foo.git'], output: allOf( startsWith('Resolving dependencies...\n' - '+ foo 1.0.0 from git ../foo.git at '), + '+ foo 1.0.0 from git ..${p.separator}foo.git at '), // Specific revision number goes here. endsWith('Building package executables...\n' 'Built foo:foo.\n' - 'Activated foo 1.0.0 from Git repository "../foo.git".'))); + 'Activated foo 1.0.0 from Git repository "..${p.separator}foo.git".'))); + }); + + test('activates a package from a Git repo with path and ref', () async { + ensureGit(); + + await d.git('foo.git', [ + d.libPubspec('foo', '0.0.0'), + d.dir('bin', [d.file('foo.dart', "main() => print('0');")]), + d.dir( + 'sub', + [ + d.libPubspec('foo', '1.0.0'), + d.dir('bin', [d.file('foo.dart', "main() => print('1');")]) + ], + ), + ]).create(); + await d.git('foo.git', [ + d.dir( + 'sub', + [ + d.libPubspec('foo', '2.0.0'), + d.dir('bin', [d.file('foo.dart', "main() => print('2');")]) + ], + ), + ]).commit(); + await d.git('foo.git', [ + d.dir( + 'sub', + [ + d.libPubspec('foo', '3.0.0'), + d.dir('bin', [d.file('foo.dart', "main() => print('3');")]) + ], + ), + ]).commit(); + + await runPub( + args: [ + 'global', + 'activate', + '-sgit', + '../foo.git', + '--git-ref=HEAD~', + '--git-path=sub/', + ], + output: allOf( + startsWith('Resolving dependencies...\n' + '+ foo 2.0.0 from git ..${p.separator}foo.git at'), + // Specific revision number goes here. + contains('in sub'), + endsWith('Building package executables...\n' + 'Built foo:foo.\n' + 'Activated foo 2.0.0 from Git repository "..${p.separator}foo.git".'), + ), + ); + await runPub( + args: [ + 'global', + 'run', + 'foo', + ], + output: contains('2'), + ); }); }
diff --git a/test/global/activate/reactivating_git_upgrades_test.dart b/test/global/activate/reactivating_git_upgrades_test.dart index 100aaea..5eb8d5f 100644 --- a/test/global/activate/reactivating_git_upgrades_test.dart +++ b/test/global/activate/reactivating_git_upgrades_test.dart
@@ -2,6 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +import 'package:path/path.dart'; import 'package:test/test.dart'; import '../../descriptor.dart' as d; @@ -20,11 +21,11 @@ args: ['global', 'activate', '-sgit', '../foo.git'], output: allOf( startsWith('Resolving dependencies...\n' - '+ foo 1.0.0 from git ../foo.git at '), + '+ foo 1.0.0 from git ..${separator}foo.git at '), // Specific revision number goes here. endsWith('Building package executables...\n' 'Built foo:foo.\n' - 'Activated foo 1.0.0 from Git repository "../foo.git".'))); + 'Activated foo 1.0.0 from Git repository "..${separator}foo.git".'))); await d.git('foo.git', [d.libPubspec('foo', '1.0.1')]).commit(); @@ -33,12 +34,12 @@ args: ['global', 'activate', '-sgit', '../foo.git'], output: allOf( startsWith('Package foo is currently active from Git repository ' - '"../foo.git".\n' + '"..${separator}foo.git".\n' 'Resolving dependencies...\n' - '+ foo 1.0.1 from git ../foo.git at '), + '+ foo 1.0.1 from git ..${separator}foo.git at '), // Specific revision number goes here. endsWith('Building package executables...\n' 'Built foo:foo.\n' - 'Activated foo 1.0.1 from Git repository "../foo.git".'))); + 'Activated foo 1.0.1 from Git repository "..${separator}foo.git".'))); }); }
diff --git a/test/global/activate/removes_old_lockfile_test.dart b/test/global/activate/removes_old_lockfile_test.dart deleted file mode 100644 index 3392aff..0000000 --- a/test/global/activate/removes_old_lockfile_test.dart +++ /dev/null
@@ -1,33 +0,0 @@ -// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file -// for details. All rights reserved. Use of this source code is governed by a -// BSD-style license that can be found in the LICENSE file. - -import 'package:test/test.dart'; - -import '../../descriptor.dart' as d; -import '../../test_pub.dart'; - -void main() { - test('removes the 1.6-style lockfile', () async { - final server = await servePackages(); - server.serve('foo', '1.0.0'); - - await d.dir(cachePath, [ - d.dir('global_packages', [ - d.file( - 'foo.lock', - 'packages: {foo: {description: foo, source: hosted, ' - 'version: "1.0.0"}}}') - ]) - ]).create(); - - await runPub(args: ['global', 'activate', 'foo']); - - await d.dir(cachePath, [ - d.dir('global_packages', [ - d.nothing('foo.lock'), - d.dir('foo', [d.file('pubspec.lock', contains('1.0.0'))]) - ]) - ]).validate(); - }); -}
diff --git a/test/global/deactivate/git_package_test.dart b/test/global/deactivate/git_package_test.dart index f06ce53..04b9267 100644 --- a/test/global/deactivate/git_package_test.dart +++ b/test/global/deactivate/git_package_test.dart
@@ -2,6 +2,7 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. +import 'package:path/path.dart'; import 'package:test/test.dart'; import '../../descriptor.dart' as d; @@ -21,6 +22,6 @@ await runPub( args: ['global', 'deactivate', 'foo'], output: - 'Deactivated package foo 1.0.0 from Git repository "../foo.git".'); + 'Deactivated package foo 1.0.0 from Git repository "..${separator}foo.git".'); }); }
diff --git a/test/global/list_test.dart b/test/global/list_test.dart index 8d4d02e..e1ba4fd 100644 --- a/test/global/list_test.dart +++ b/test/global/list_test.dart
@@ -32,7 +32,7 @@ await runPub( args: ['global', 'list'], - output: 'foo 1.0.0 from Git repository "../foo.git"'); + output: 'foo 1.0.0 from Git repository "..${p.separator}foo.git"'); }); test('lists an activated Path package', () async {
diff --git a/test/global/run/uses_old_lockfile_test.dart b/test/global/run/uses_old_lockfile_test.dart deleted file mode 100644 index 1afab81..0000000 --- a/test/global/run/uses_old_lockfile_test.dart +++ /dev/null
@@ -1,54 +0,0 @@ -// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file -// for details. All rights reserved. Use of this source code is governed by a -// BSD-style license that can be found in the LICENSE file. - -import 'package:test/test.dart'; - -import '../../descriptor.dart' as d; -import '../../test_pub.dart'; - -void main() { - test('uses the 1.6-style lockfile if necessary', () async { - await servePackages() - ..serve('bar', '1.0.0') - ..serve('foo', '1.0.0', deps: { - 'bar': 'any' - }, contents: [ - d.dir('bin', [ - d.file('script.dart', """ - import 'package:bar/bar.dart' as bar; - - main(args) => print(bar.main());""") - ]) - ]); - - await runPub(args: ['cache', 'add', 'foo']); - await runPub(args: ['cache', 'add', 'bar']); - - await d.dir(cachePath, [ - d.dir('global_packages', [ - d.file('foo.lock', ''' -packages: - foo: - description: foo - source: hosted - version: "1.0.0" - bar: - description: bar - source: hosted - version: "1.0.0"''') - ]) - ]).create(); - - var pub = await pubRun(global: true, args: ['foo:script']); - expect(pub.stdout, emitsThrough('bar 1.0.0')); - await pub.shouldExit(); - - await d.dir(cachePath, [ - d.dir('global_packages', [ - d.nothing('foo.lock'), - d.dir('foo', [d.file('pubspec.lock', contains('1.0.0'))]) - ]) - ]).validate(); - }); -}
diff --git a/test/golden_file.dart b/test/golden_file.dart index 7927776..6a88dd9 100644 --- a/test/golden_file.dart +++ b/test/golden_file.dart
@@ -151,6 +151,7 @@ List<String> args, { Map<String, String>? environment, String? workingDirectory, + String? stdin, }) async { // Create new section index number (before doing anything async) final sectionIndex = _nextSectionIndex++; @@ -161,6 +162,7 @@ s, environment: environment, workingDirectory: workingDirectory, + stdin: stdin, ); _expectSection(sectionIndex, s.toString());
diff --git a/test/io_test.dart b/test/io_test.dart index 2c529cd..0657dd6 100644 --- a/test/io_test.dart +++ b/test/io_test.dart
@@ -291,7 +291,7 @@ test( 'applies executable bits from tar file', () => withTempDir((tempDir) async { - final entry = Stream.value(TarEntry.data( + final entry = Stream<TarEntry>.value(TarEntry.data( TarHeader( name: 'weird_exe', typeFlag: TypeFlag.reg, @@ -309,7 +309,7 @@ test('extracts files and links', () { return withTempDir((tempDir) async { - final entries = Stream.fromIterable([ + final entries = Stream<TarEntry>.fromIterable([ TarEntry.data( TarHeader(name: 'lib/main.txt', typeFlag: TypeFlag.reg), utf8.encode('text content'), @@ -349,7 +349,7 @@ test('preserves empty directories', () { return withTempDir((tempDir) async { - final entry = Stream.value(TarEntry.data( + final entry = Stream<TarEntry>.value(TarEntry.data( TarHeader( name: 'bin/', typeFlag: TypeFlag.dir, @@ -368,7 +368,7 @@ test('throws for entries escaping the tar file', () { return withTempDir((tempDir) async { - final entry = Stream.value(TarEntry.data( + final entry = Stream<TarEntry>.value(TarEntry.data( TarHeader( name: '../other_package-1.2.3/lib/file.dart', typeFlag: TypeFlag.reg, @@ -386,7 +386,7 @@ test('skips symlinks escaping the tar file', () { return withTempDir((tempDir) async { - final entry = Stream.value(TarEntry.data( + final entry = Stream<TarEntry>.value(TarEntry.data( TarHeader( name: 'nested/bad_link', typeFlag: TypeFlag.symlink, @@ -403,7 +403,7 @@ test('skips hardlinks escaping the tar file', () { return withTempDir((tempDir) async { - final entry = Stream.value(TarEntry.data( + final entry = Stream<TarEntry>.value(TarEntry.data( TarHeader( name: 'nested/bad_link', typeFlag: TypeFlag.link,
diff --git a/test/lish/archives_and_uploads_a_package_test.dart b/test/lish/archives_and_uploads_a_package_test.dart index 88e4862..b033e28 100644 --- a/test/lish/archives_and_uploads_a_package_test.dart +++ b/test/lish/archives_and_uploads_a_package_test.dart
@@ -37,6 +37,31 @@ await pub.shouldExit(exit_codes.SUCCESS); }); + test('archives and uploads a package using token', () async { + await servePackages(); + await d.tokensFile({ + 'version': 1, + 'hosted': [ + {'url': globalServer.url, 'token': 'access token'}, + ] + }).create(); + var pub = await startPublish(globalServer); + + await confirmPublish(pub); + handleUploadForm(globalServer); + handleUpload(globalServer); + + globalServer.expect('GET', '/create', (request) { + return shelf.Response.ok(jsonEncode({ + 'success': {'message': 'Package test_pkg 1.0.0 uploaded!'} + })); + }); + + expect(pub.stdout, emits(startsWith('Uploading...'))); + expect(pub.stdout, emits('Package test_pkg 1.0.0 uploaded!')); + await pub.shouldExit(exit_codes.SUCCESS); + }); + test('publishes to hosted-url with path', () async { await servePackages(); await d.tokensFile({ @@ -48,7 +73,7 @@ var pub = await startPublish( globalServer, path: '/sub/folder', - authMethod: 'token', + overrideDefaultHostedServer: false, environment: {'TOKEN': 'access token'}, );
diff --git a/test/lock_file_test.dart b/test/lock_file_test.dart index 4360c7c..1bfac5e 100644 --- a/test/lock_file_test.dart +++ b/test/lock_file_test.dart
@@ -2,56 +2,17 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -import 'package:pub/src/language_version.dart'; import 'package:pub/src/lock_file.dart'; import 'package:pub/src/package_name.dart'; -import 'package:pub/src/source.dart'; -import 'package:pub/src/source_registry.dart'; +import 'package:pub/src/source/hosted.dart'; import 'package:pub/src/system_cache.dart'; import 'package:pub_semver/pub_semver.dart'; -import 'package:test/test.dart'; +import 'package:test/test.dart' hide Description; import 'package:yaml/yaml.dart'; -class FakeSource extends Source { - @override - final String name = 'fake'; - - @override - BoundSource bind(SystemCache cache) => - throw UnsupportedError('Cannot download fake packages.'); - - @override - PackageRef parseRef(String name, description, - {String? containingPath, LanguageVersion? languageVersion}) { - if (!description.endsWith(' desc')) throw FormatException('Bad'); - return PackageRef(name, this, description); - } - - @override - PackageId parseId(String name, Version version, description, - {String? containingPath}) { - if (!description.endsWith(' desc')) throw FormatException('Bad'); - return PackageId(name, this, version, description); - } - - @override - bool descriptionsEqual(description1, description2) => - description1 == description2; - - @override - int hashDescription(description) => description.hashCode; - - String packageName(String description) { - // Strip off ' desc'. - return description.substring(0, description.length - 5); - } -} - void main() { - var sources = SourceRegistry(); - var fakeSource = FakeSource(); - sources.register(fakeSource); - + final cache = SystemCache(); + final sources = cache.sources; group('LockFile', () { group('parse()', () { test('returns an empty lockfile if the contents are empty', () { @@ -69,27 +30,33 @@ packages: bar: version: 1.2.3 - source: fake - description: bar desc + source: hosted + description: + name: bar + url: https://bar.com foo: version: 2.3.4 - source: fake - description: foo desc -''', sources); + source: hosted + description: + name: foo + url: https://foo.com +''', cache.sources); expect(lockFile.packages.length, equals(2)); var bar = lockFile.packages['bar']!; expect(bar.name, equals('bar')); expect(bar.version, equals(Version(1, 2, 3))); - expect(bar.source, equals(fakeSource)); - expect(bar.description, equals('bar desc')); + expect(bar.source, equals(cache.hosted)); + expect((bar.description.description as HostedDescription).url, + equals('https://bar.com')); var foo = lockFile.packages['foo']!; expect(foo.name, equals('foo')); expect(foo.version, equals(Version(2, 3, 4))); - expect(foo.source, equals(fakeSource)); - expect(foo.description, equals('foo desc')); + expect(foo.source, equals(cache.hosted)); + expect((foo.description.description as HostedDescription).url, + equals('https://foo.com')); }); test('allows an unknown source', () { @@ -99,9 +66,9 @@ source: bad version: 1.2.3 description: foo desc -''', sources); +''', cache.sources); var foo = lockFile.packages['foo']!; - expect(foo.source, equals(sources['bad'])); + expect(foo.source, equals(sources('bad'))); }); test('allows an empty dependency map', () { @@ -201,8 +168,8 @@ packages: foo: version: 1.2.3 - source: fake - description: foo desc is bad + source: hosted + description: foam ''', sources); }, throwsFormatException); }); @@ -254,8 +221,16 @@ test('serialize() dumps the lockfile to YAML', () { var lockfile = LockFile([ - PackageId('foo', fakeSource, Version.parse('1.2.3'), 'foo desc'), - PackageId('bar', fakeSource, Version.parse('3.2.1'), 'bar desc') + PackageId( + 'foo', + Version.parse('1.2.3'), + ResolvedHostedDescription( + HostedDescription('foo', 'https://foo.com'))), + PackageId( + 'bar', + Version.parse('3.2.1'), + ResolvedHostedDescription( + HostedDescription('bar', 'https://bar.com'))), ], devDependencies: { 'bar' }); @@ -267,14 +242,14 @@ 'packages': { 'foo': { 'version': '1.2.3', - 'source': 'fake', - 'description': 'foo desc', + 'source': 'hosted', + 'description': {'name': 'foo', 'url': 'https://foo.com'}, 'dependency': 'transitive' }, 'bar': { 'version': '3.2.1', - 'source': 'fake', - 'description': 'bar desc', + 'source': 'hosted', + 'description': {'name': 'bar', 'url': 'https://bar.com'}, 'dependency': 'direct dev' } }
diff --git a/test/package_list_files_test.dart b/test/package_list_files_test.dart index 091001b..013f3a1 100644 --- a/test/package_list_files_test.dart +++ b/test/package_list_files_test.dart
@@ -423,6 +423,170 @@ p.join(root, 'pubignoredir', 'b.txt'), }); }); + + group('relative to current directory rules', () { + setUp(ensureGit); + group('delimiter in the beginning', () { + test('ignore directory in exact directory', () async { + final repo = d.git(appPath, [ + d.dir('packages', [ + d.dir('nested', [ + d.file('.gitignore', '/bin/'), + d.appPubspec(), + d.dir('bin', [ + d.file('run.dart'), + ]), + ]), + ]), + ]); + await repo.create(); + createEntrypoint(p.join(appPath, 'packages', 'nested')); + + expect(entrypoint!.root.listFiles(), { + p.join(root, 'pubspec.yaml'), + }); + }); + + test('ignore directory in exact directory', () async { + final repo = d.git(appPath, [ + d.dir('packages', [ + d.dir('nested', [ + d.file('.gitignore', '/bin/'), + d.appPubspec(), + d.file('bin'), + ]), + ]), + ]); + await repo.create(); + createEntrypoint(p.join(appPath, 'packages', 'nested')); + + expect(entrypoint!.root.listFiles(), { + p.join(root, 'pubspec.yaml'), + p.join(root, 'bin'), + }); + }); + + test('ignore file on exact directory', () async { + final repo = d.git(appPath, [ + d.dir('packages', [ + d.dir('nested', [ + d.appPubspec(), + d.dir('bin', [ + d.file('.gitignore', '/run.dart'), + d.file('run.dart'), + ]), + ]), + ]), + ]); + await repo.create(); + createEntrypoint(p.join(appPath, 'packages', 'nested')); + + expect(entrypoint!.root.listFiles(), { + p.join(root, 'pubspec.yaml'), + }); + }); + + test('not ignore files beneath exact directory', () async { + final repo = d.git(appPath, [ + d.dir('packages', [ + d.dir('nested', [ + d.appPubspec(), + d.dir('bin', [ + d.file('.gitignore', '/run.dart'), + d.file('run.dart'), + d.dir('nested_again', [ + d.file('run.dart'), + ]), + ]), + ]), + ]), + ]); + await repo.create(); + createEntrypoint(p.join(appPath, 'packages', 'nested')); + + expect(entrypoint!.root.listFiles(), { + p.join(root, 'pubspec.yaml'), + p.join(root, 'bin', 'nested_again', 'run.dart'), + }); + }); + + test('disable ignore in exact directory', () async { + final repo = d.git(appPath, [ + d.dir('packages', [ + d.file('.gitignore', 'run.dart'), + d.dir('nested', [ + d.appPubspec(), + d.dir('bin', [ + d.file('.gitignore', '!/run.dart'), + d.file('run.dart'), + d.dir('nested_again', [ + d.file('run.dart'), + ]), + ]), + ]), + ]), + ]); + await repo.create(); + createEntrypoint(p.join(appPath, 'packages', 'nested')); + + expect(entrypoint!.root.listFiles(), { + p.join(root, 'pubspec.yaml'), + p.join(root, 'bin', 'run.dart'), + }); + }); + }); + }); + + group('delimiter in the middle', () { + test('should work with route relative to current directory ', () async { + final repo = d.git(appPath, [ + d.dir('packages', [ + d.file('.gitignore', 'nested/bin/run.dart'), + d.dir('nested', [ + d.appPubspec(), + d.dir('bin', [ + d.file('run.dart'), + d.dir('nested_again', [ + d.file('run.dart'), + ]), + ]), + ]), + ]), + ]); + await repo.create(); + createEntrypoint(p.join(appPath, 'packages', 'nested')); + + expect(entrypoint!.root.listFiles(), { + p.join(root, 'pubspec.yaml'), + p.join(root, 'bin', 'nested_again', 'run.dart'), + }); + }); + + test('should not have effect in nested folders', () async { + final repo = d.git(appPath, [ + d.dir('packages', [ + d.file('.gitignore', 'bin/run.dart'), + d.dir('nested', [ + d.appPubspec(), + d.dir('bin', [ + d.file('run.dart'), + d.dir('nested_again', [ + d.file('run.dart'), + ]), + ]), + ]), + ]), + ]); + await repo.create(); + createEntrypoint(p.join(appPath, 'packages', 'nested')); + + expect(entrypoint!.root.listFiles(), { + p.join(root, 'pubspec.yaml'), + p.join(root, 'bin', 'run.dart'), + p.join(root, 'bin', 'nested_again', 'run.dart'), + }); + }); + }); } void createEntrypoint([String? path]) {
diff --git a/test/pub_uploader_test.dart b/test/pub_uploader_test.dart index 47d50d3..643671a 100644 --- a/test/pub_uploader_test.dart +++ b/test/pub_uploader_test.dart
@@ -2,168 +2,34 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -import 'dart:async'; -import 'dart:convert'; - -import 'package:pub/src/exit_codes.dart' as exit_codes; -import 'package:shelf/shelf.dart' as shelf; import 'package:test/test.dart'; -import 'package:test_process/test_process.dart'; import 'descriptor.dart' as d; import 'test_pub.dart'; -Future<TestProcess> startPubUploader(PackageServer server, List<String> args) { - var tokenEndpoint = Uri.parse(server.url).resolve('/token').toString(); - var allArgs = ['uploader', ...args]; - return startPub( - args: allArgs, - tokenEndpoint: tokenEndpoint, - environment: {'PUB_HOSTED_URL': tokenEndpoint}); -} - void main() { - group('displays usage', () { - test('when run with no arguments', () { - return runPub(args: ['uploader'], exitCode: exit_codes.USAGE); - }); + test('displays deprecation notice', () async { + await runPub( + args: ['uploader', 'add'], + error: ''' +Package uploaders are no longer managed from the command line. +Manage uploaders from: - test('when run with only a command', () { - return runPub(args: ['uploader', 'add'], exitCode: exit_codes.USAGE); - }); +https://pub.dev/packages/<packageName>/admin +''', + exitCode: 1, + ); - test('when run with an invalid command', () { - return runPub( - args: ['uploader', 'foo', 'email'], exitCode: exit_codes.USAGE); - }); - }); + await d.appDir().create(); + await runPub( + args: ['uploader', 'add'], + error: ''' +Package uploaders are no longer managed from the command line. +Manage uploaders from: - test('adds an uploader', () async { - await servePackages(); - await d.credentialsFile(globalServer, 'access token').create(); - var pub = await startPubUploader( - globalServer, ['--package', 'pkg', 'add', 'email']); - - globalServer.expect('POST', '/api/packages/pkg/uploaders', (request) { - return request.readAsString().then((body) { - expect(body, equals('email=email')); - - return shelf.Response.ok( - jsonEncode({ - 'success': {'message': 'Good job!'} - }), - headers: {'content-type': 'application/json'}); - }); - }); - - expect(pub.stdout, emits('Good job!')); - await pub.shouldExit(exit_codes.SUCCESS); - }); - - test('removes an uploader', () async { - await servePackages(); - await d.credentialsFile(globalServer, 'access token').create(); - var pub = await startPubUploader( - globalServer, ['--package', 'pkg', 'remove', 'email']); - - globalServer.expect('DELETE', '/api/packages/pkg/uploaders/email', - (request) { - return shelf.Response.ok( - jsonEncode({ - 'success': {'message': 'Good job!'} - }), - headers: {'content-type': 'application/json'}); - }); - - expect(pub.stdout, emits('Good job!')); - await pub.shouldExit(exit_codes.SUCCESS); - }); - - test('defaults to the current package', () async { - await d.validPackage.create(); - - await servePackages(); - await d.credentialsFile(globalServer, 'access token').create(); - var pub = await startPubUploader(globalServer, ['add', 'email']); - - globalServer.expect('POST', '/api/packages/test_pkg/uploaders', (request) { - return shelf.Response.ok( - jsonEncode({ - 'success': {'message': 'Good job!'} - }), - headers: {'content-type': 'application/json'}); - }); - - expect(pub.stdout, emits('Good job!')); - await pub.shouldExit(exit_codes.SUCCESS); - }); - - test('add provides an error', () async { - await servePackages(); - await d.credentialsFile(globalServer, 'access token').create(); - var pub = await startPubUploader( - globalServer, ['--package', 'pkg', 'add', 'email']); - - globalServer.expect('POST', '/api/packages/pkg/uploaders', (request) { - return shelf.Response(400, - body: jsonEncode({ - 'error': {'message': 'Bad job!'} - }), - headers: {'content-type': 'application/json'}); - }); - - expect(pub.stderr, emits('Bad job!')); - await pub.shouldExit(1); - }); - - test('remove provides an error', () async { - await servePackages(); - await d.credentialsFile(globalServer, 'access token').create(); - var pub = await startPubUploader( - globalServer, ['--package', 'pkg', 'remove', 'e/mail']); - - globalServer.expect('DELETE', '/api/packages/pkg/uploaders/e%2Fmail', - (request) { - return shelf.Response(400, - body: jsonEncode({ - 'error': {'message': 'Bad job!'} - }), - headers: {'content-type': 'application/json'}); - }); - - expect(pub.stderr, emits('Bad job!')); - await pub.shouldExit(1); - }); - - test('add provides invalid JSON', () async { - await servePackages(); - await d.credentialsFile(globalServer, 'access token').create(); - var pub = await startPubUploader( - globalServer, ['--package', 'pkg', 'add', 'email']); - - globalServer.expect('POST', '/api/packages/pkg/uploaders', - (request) => shelf.Response.ok('{not json')); - - expect( - pub.stderr, - emitsLines('Invalid server response:\n' - '{not json')); - await pub.shouldExit(1); - }); - - test('remove provides invalid JSON', () async { - await servePackages(); - await d.credentialsFile(globalServer, 'access token').create(); - var pub = await startPubUploader( - globalServer, ['--package', 'pkg', 'remove', 'email']); - - globalServer.expect('DELETE', '/api/packages/pkg/uploaders/email', - (request) => shelf.Response.ok('{not json')); - - expect( - pub.stderr, - emitsLines('Invalid server response:\n' - '{not json')); - await pub.shouldExit(1); +https://pub.dev/packages/myapp/admin +''', + exitCode: 1, + ); }); }
diff --git a/test/pubspec_overrides_test.dart b/test/pubspec_overrides_test.dart new file mode 100644 index 0000000..2e9234f --- /dev/null +++ b/test/pubspec_overrides_test.dart
@@ -0,0 +1,49 @@ +// Copyright (c) 2022, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'package:test/test.dart'; + +import 'descriptor.dart' as d; +import 'test_pub.dart'; + +void main() { + forBothPubGetAndUpgrade((command) { + test('pubspec overrides', () async { + await servePackages() + ..serve('lib', '1.0.0') + ..serve('lib', '2.0.0'); + + await d.dir(appPath, [ + d.appPubspec({'lib': '1.0.0'}), + d.dir('lib'), + d.pubspecOverrides({ + 'dependency_overrides': {'lib': '2.0.0'} + }), + ]).create(); + + await pubCommand( + command, + warning: + 'Warning: pubspec.yaml has overrides from pubspec_overrides.yaml\n' + 'Warning: You are using these overridden dependencies:\n' + '! lib 2.0.0', + ); + + await d.dir(appPath, [ + d.packageConfigFile([ + d.packageConfigEntry( + name: 'lib', + version: '2.0.0', + languageVersion: '2.7', + ), + d.packageConfigEntry( + name: 'myapp', + path: '.', + languageVersion: '0.1', + ), + ]) + ]).validate(); + }); + }); +}
diff --git a/test/pubspec_test.dart b/test/pubspec_test.dart index 62f77c4..1e9c8bd 100644 --- a/test/pubspec_test.dart +++ b/test/pubspec_test.dart
@@ -2,50 +2,18 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -import 'package:pub/src/language_version.dart'; -import 'package:pub/src/package_name.dart'; +import 'dart:io'; + import 'package:pub/src/pubspec.dart'; import 'package:pub/src/sdk.dart'; -import 'package:pub/src/source.dart'; -import 'package:pub/src/source_registry.dart'; +import 'package:pub/src/source/hosted.dart'; import 'package:pub/src/system_cache.dart'; import 'package:pub_semver/pub_semver.dart'; import 'package:test/test.dart'; -class FakeSource extends Source { - @override - final String name = 'fake'; - - @override - BoundSource bind(SystemCache cache) => - throw UnsupportedError('Cannot download fake packages.'); - - @override - PackageRef parseRef(String name, description, - {String? containingPath, LanguageVersion? languageVersion}) { - if (description != 'ok') throw FormatException('Bad'); - return PackageRef(name, this, description); - } - - @override - PackageId parseId(String name, Version version, description, - {String? containingPath}) => - PackageId(name, this, version, description); - - @override - bool descriptionsEqual(description1, description2) => - description1 == description2; - - @override - int hashDescription(description) => description.hashCode; - - String packageName(description) => 'foo'; -} - void main() { group('parse()', () { - var sources = SourceRegistry(); - sources.register(FakeSource()); + final sources = SystemCache().sources; var throwsPubspecException = throwsA(const TypeMatcher<PubspecException>()); @@ -84,7 +52,9 @@ var pubspec = Pubspec.parse(''' dependencies: foo: - fake: ok + hosted: + name: foo + url: https://foo.com version: ">=1.2.3 <3.4.5" ''', sources); @@ -99,7 +69,9 @@ var pubspec = Pubspec.parse(''' dependencies: foo: - fake: ok + hosted: + name: foo + url: https://foo.com version: ">=1.2.3 <0.0.0" ''', sources); @@ -120,7 +92,9 @@ var pubspec = Pubspec.parse(''' dev_dependencies: foo: - fake: ok + hosted: + name: foo + url: https://foo.com version: ">=1.2.3 <3.4.5" ''', sources); @@ -143,7 +117,9 @@ var pubspec = Pubspec.parse(''' dependency_overrides: foo: - fake: ok + hosted: + name: foo + url: https://foo.com version: ">=1.2.3 <3.4.5" ''', sources); @@ -171,7 +147,7 @@ var foo = pubspec.dependencies['foo']!; expect(foo.name, equals('foo')); - expect(foo.source, equals(sources['unknown'])); + expect(foo.source, equals(sources('unknown'))); }); test('allows a default source', () { @@ -183,10 +159,10 @@ var foo = pubspec.dependencies['foo']!; expect(foo.name, equals('foo')); - expect(foo.source, equals(sources['hosted'])); + expect(foo.source, equals(sources('hosted'))); }); - test('throws if it dependes on itself', () { + test('throws if it depends on itself', () { expectPubspecException(''' name: myapp dependencies: @@ -215,9 +191,12 @@ test("throws if the description isn't valid", () { expectPubspecException(''' +name: myapp dependencies: foo: - fake: bad + hosted: + name: foo + url: '::' ''', (pubspec) => pubspec.dependencies); }); @@ -310,11 +289,14 @@ var foo = pubspec.dependencies['foo']!; expect(foo.name, equals('foo')); - expect(foo.source!.name, 'hosted'); - expect(foo.source!.serializeDescription('', foo.description), { - 'url': 'https://example.org/pub/', - 'name': 'bar', - }); + expect(foo.source.name, 'hosted'); + expect( + ResolvedHostedDescription(foo.description as HostedDescription) + .serializeForLockfile(containingDir: null), + { + 'url': 'https://example.org/pub/', + 'name': 'bar', + }); }); test('with url only', () { @@ -333,11 +315,14 @@ var foo = pubspec.dependencies['foo']!; expect(foo.name, equals('foo')); - expect(foo.source!.name, 'hosted'); - expect(foo.source!.serializeDescription('', foo.description), { - 'url': 'https://example.org/pub/', - 'name': 'foo', - }); + expect(foo.source.name, 'hosted'); + expect( + ResolvedHostedDescription(foo.description as HostedDescription) + .serializeForLockfile(containingDir: null), + { + 'url': 'https://example.org/pub/', + 'name': 'foo', + }); }); test('with url as string', () { @@ -355,11 +340,14 @@ var foo = pubspec.dependencies['foo']!; expect(foo.name, equals('foo')); - expect(foo.source!.name, 'hosted'); - expect(foo.source!.serializeDescription('', foo.description), { - 'url': 'https://example.org/pub/', - 'name': 'foo', - }); + expect(foo.source.name, 'hosted'); + expect( + ResolvedHostedDescription(foo.description as HostedDescription) + .serializeForLockfile(containingDir: null), + { + 'url': 'https://example.org/pub/', + 'name': 'foo', + }); }); test('interprets string description as name for older versions', () { @@ -377,11 +365,14 @@ var foo = pubspec.dependencies['foo']!; expect(foo.name, equals('foo')); - expect(foo.source!.name, 'hosted'); - expect(foo.source!.serializeDescription('', foo.description), { - 'url': 'https://pub.dartlang.org', - 'name': 'bar', - }); + expect(foo.source.name, 'hosted'); + expect( + ResolvedHostedDescription(foo.description as HostedDescription) + .serializeForLockfile(containingDir: null), + { + 'url': 'https://pub.dartlang.org', + 'name': 'bar', + }); }); test( @@ -418,11 +409,14 @@ var foo = pubspec.dependencies['foo']!; expect(foo.name, equals('foo')); - expect(foo.source!.name, 'hosted'); - expect(foo.source!.serializeDescription('', foo.description), { - 'url': 'https://pub.dartlang.org', - 'name': 'foo', - }); + expect(foo.source.name, 'hosted'); + expect( + ResolvedHostedDescription(foo.description as HostedDescription) + .serializeForLockfile(containingDir: null), + { + 'url': 'https://pub.dartlang.org', + 'name': 'foo', + }); }); group('throws without a min SDK constraint', () { @@ -685,136 +679,92 @@ }); }); - group('features', () { - test('can be null', () { - var pubspec = Pubspec.parse('features:', sources); - expect(pubspec.features, isEmpty); + group('pubspec overrides', () { + Pubspec parsePubspecOverrides(String overridesContents) { + return Pubspec.parse( + ''' +name: app +environment: + sdk: '>=2.7.0 <3.0.0' +dependency_overrides: + bar: 2.1.0 +''', + sources, + overridesFileContents: overridesContents, + overridesLocation: Uri.parse('file:///pubspec_overrides.yaml'), + ); + } + + void expectPubspecOverridesException( + String contents, + void Function(Pubspec) fn, [ + String? expectedContains, + ]) { + var expectation = isA<PubspecException>(); + if (expectedContains != null) { + expectation = expectation.having((error) => error.toString(), + 'toString()', contains(expectedContains)); + } + + var pubspec = parsePubspecOverrides(contents); + expect(() => fn(pubspec), throwsA(expectation)); + } + + test('allows empty overrides file', () { + var pubspec = parsePubspecOverrides(''); + expect(pubspec.dependencyOverrides['foo'], isNull); + final bar = pubspec.dependencyOverrides['bar']!; + expect(bar.name, equals('bar')); + expect(bar.source, equals(sources('hosted'))); + expect(bar.constraint, VersionConstraint.parse('2.1.0')); }); - test("throws if it's not a map", () { - expectPubspecException('features: 12', (pubspec) => pubspec.features); + test('allows empty dependency_overrides section', () { + final pubspec = parsePubspecOverrides(''' +dependency_overrides: +'''); + expect(pubspec.dependencyOverrides, isEmpty); }); - test('throws if it has non-string keys', () { - expectPubspecException( - 'features: {1: {}}', (pubspec) => pubspec.features); + test('parses dependencies in dependency_overrides section', () { + final pubspec = parsePubspecOverrides(''' +dependency_overrides: + foo: + version: 1.0.0 +'''); + + expect(pubspec.dependencyOverrides['bar'], isNull); + + final foo = pubspec.dependencyOverrides['foo']!; + expect(foo.name, equals('foo')); + expect(foo.source, equals(sources('hosted'))); + expect(foo.constraint, VersionConstraint.parse('1.0.0')); }); - test("throws if a key isn't a Dart identifier", () { - expectPubspecException( - 'features: {foo-bar: {}}', (pubspec) => pubspec.features); + test('throws exception with correct source references', () { + expectPubspecOverridesException( + ''' +dependency_overrides: + foo: + hosted: + name: foo + url: '::' +''', + (pubspecOverrides) => pubspecOverrides.dependencyOverrides, + 'Error on line 4, column 7 of ${Platform.pathSeparator}pubspec_overrides.yaml', + ); }); - test('allows null values', () { - var pubspec = Pubspec.parse(''' -features: - foobar: -''', sources); - expect(pubspec.features, contains('foobar')); - - var feature = pubspec.features['foobar']!; - expect(feature.name, equals('foobar')); - expect(feature.onByDefault, isTrue); - expect(feature.dependencies, isEmpty); + test('throws if overrides contain invalid dependency section', () { + expectPubspecOverridesException(''' +dependency_overrides: false +''', (pubspecOverrides) => pubspecOverrides.dependencyOverrides); }); - test("throws if the value isn't a map", () { - expectPubspecException( - 'features: {foobar: 1}', (pubspec) => pubspec.features); - }); - - test("throws if the value's dependencies aren't valid", () { - expectPubspecException(''' -features: - foobar: - dependencies: - baz: not a version range -''', (pubspec) => pubspec.features); - }); - - test("throws if the environment value isn't a map", () { - expectPubspecException( - 'features: {foobar: 1}', (pubspec) => pubspec.features); - }); - - test('allows a valid environment', () { - var pubspec = Pubspec.parse(''' -features: - foobar: - environment: - sdk: ^1.0.0 - flutter: ^2.0.0 - fuchsia: ^3.0.0 -''', sources); - - expect(pubspec.features, contains('foobar')); - - var feature = pubspec.features['foobar']!; - expect(feature.sdkConstraints, - containsPair('dart', VersionConstraint.parse('^1.0.0'))); - expect(feature.sdkConstraints, - containsPair('flutter', VersionConstraint.parse('>=2.0.0'))); - expect(feature.sdkConstraints, - containsPair('fuchsia', VersionConstraint.parse('^3.0.0'))); - }); - - test("throws if the default value isn't a boolean", () { - expectPubspecException( - 'features: {foobar: {default: 12}}', (pubspec) => pubspec.features); - }); - - test('allows a default boolean', () { - var pubspec = - Pubspec.parse('features: {foobar: {default: false}}', sources); - - expect(pubspec.features, contains('foobar')); - expect(pubspec.features['foobar']!.onByDefault, isFalse); - }); - - test('parses valid dependency specifications', () { - var pubspec = Pubspec.parse(''' -features: - foobar: - dependencies: - baz: 1.0.0 - qux: ^2.0.0 -''', sources); - - expect(pubspec.features, contains('foobar')); - - var feature = pubspec.features['foobar']!; - expect(feature.name, equals('foobar')); - expect(feature.onByDefault, isTrue); - expect(feature.dependencies, hasLength(2)); - - expect(feature.dependencies.first.name, equals(equals('baz'))); - expect(feature.dependencies.first.constraint, equals(Version(1, 0, 0))); - expect(feature.dependencies.last.name, equals('qux')); - expect(feature.dependencies.last.constraint, - equals(VersionConstraint.parse('^2.0.0'))); - }); - - group('requires', () { - test('can be null', () { - var pubspec = - Pubspec.parse('features: {foobar: {requires: null}}', sources); - expect(pubspec.features['foobar']!.requires, isEmpty); - }); - - test('must be a list', () { - expectPubspecException('features: {foobar: {requires: baz}, baz: {}}', - (pubspec) => pubspec.features); - }); - - test('must be a string list', () { - expectPubspecException('features: {foobar: {requires: [12]}}', - (pubspec) => pubspec.features); - }); - - test('must refer to features that exist in the pubspec', () { - expectPubspecException('features: {foobar: {requires: [baz]}}', - (pubspec) => pubspec.features); - }); + test('throws if overrides contain an unknown field', () { + expectPubspecOverridesException(''' +name: 'foo' +''', (pubspecOverrides) => pubspecOverrides.dependencyOverrides); }); }); });
diff --git a/test/real_version_test.dart b/test/real_version_test.dart deleted file mode 100644 index 80afab7..0000000 --- a/test/real_version_test.dart +++ /dev/null
@@ -1,33 +0,0 @@ -// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file -// for details. All rights reserved. Use of this source code is governed by a -// BSD-style license that can be found in the LICENSE file. - -import 'dart:io'; - -import 'package:path/path.dart' as path; -import 'package:pub/src/exit_codes.dart' as exit_codes; -import 'package:pub/src/sdk.dart'; -import 'package:test/test.dart'; -import 'package:test_process/test_process.dart'; - -void main() { - // This test is a bit funny. - // - // Pub parses the "version" file that gets generated and shipped with the SDK. - // We want to make sure that the actual version file that gets created is - // also one pub can parse. If this test fails, it means the version file's - // format has changed in a way pub didn't expect. - // - // Note that this test expects to be invoked from a Dart executable that is - // in the built SDK's "bin" directory. Note also that this invokes pub from - // the built SDK directory, and not the live pub code directly in the repo. - test('parse the real SDK "version" file', () async { - // Get the path to the pub binary in the SDK. - var pubPath = path.join( - sdk.rootDirectory, 'bin', Platform.isWindows ? 'pub.bat' : 'pub'); - - var pub = await TestProcess.start(pubPath, ['version']); - expect(pub.stdout, emits(startsWith('Pub'))); - await pub.shouldExit(exit_codes.SUCCESS); - }); -}
diff --git a/test/reformat_ranges_test.dart b/test/reformat_ranges_test.dart index 35b085f..9a88df6 100644 --- a/test/reformat_ranges_test.dart +++ b/test/reformat_ranges_test.dart
@@ -4,15 +4,19 @@ import 'package:pub/src/package_name.dart'; import 'package:pub/src/solver/reformat_ranges.dart'; +import 'package:pub/src/source/hosted.dart'; import 'package:pub/src/utils.dart'; import 'package:pub_semver/pub_semver.dart'; import 'package:test/test.dart'; void main() { + final description = ResolvedHostedDescription( + HostedDescription('foo', 'https://pub.dartlang.org'), + ); test('reformatMax when max has a build identifier', () { expect( reformatMax( - [PackageId('abc', null, Version.parse('1.2.3'), null)], + [PackageId('abc', Version.parse('1.2.3'), description)], VersionRange( min: Version.parse('0.2.4'), max: Version.parse('1.2.4'), @@ -28,7 +32,13 @@ ); expect( reformatMax( - [PackageId('abc', null, Version.parse('1.2.4-3'), null)], + [ + PackageId( + 'abc', + Version.parse('1.2.4-3'), + description, + ), + ], VersionRange( min: Version.parse('0.2.4'), max: Version.parse('1.2.4'),
diff --git a/test/run/forwards_signal_posix_test.dart b/test/run/forwards_signal_posix_test.dart index 5205145..4300fc1 100644 --- a/test/run/forwards_signal_posix_test.dart +++ b/test/run/forwards_signal_posix_test.dart
@@ -27,11 +27,11 @@ import 'dart:io'; main() { - ProcessSignal.SIGHUP.watch().first.then(print); - ProcessSignal.SIGTERM.watch().first.then(print); - ProcessSignal.SIGUSR1.watch().first.then(print); - ProcessSignal.SIGUSR2.watch().first.then(print); - ProcessSignal.SIGWINCH.watch().first.then(print); + ProcessSignal.sighup.watch().first.then(print); + ProcessSignal.sigterm.watch().first.then(print); + ProcessSignal.sigusr1.watch().first.then(print); + ProcessSignal.sigusr2.watch().first.then(print); + ProcessSignal.sigwinch.watch().first.then(print); print("ready"); }
diff --git a/test/test_pub.dart b/test/test_pub.dart index fbf63de..10b2a76 100644 --- a/test/test_pub.dart +++ b/test/test_pub.dart
@@ -26,7 +26,7 @@ import 'package:pub/src/io.dart'; import 'package:pub/src/lock_file.dart'; import 'package:pub/src/log.dart' as log; -import 'package:pub/src/source_registry.dart'; +import 'package:pub/src/package_name.dart'; import 'package:pub/src/system_cache.dart'; import 'package:pub/src/utils.dart'; import 'package:pub/src/validator.dart'; @@ -371,15 +371,17 @@ Future<PubProcess> startPublish( PackageServer server, { List<String>? args, - String authMethod = 'oauth2', + bool overrideDefaultHostedServer = true, Map<String, String>? environment, String path = '', }) async { var tokenEndpoint = Uri.parse(server.url).resolve('/token').toString(); args = ['lish', ...?args]; return await startPub(args: args, tokenEndpoint: tokenEndpoint, environment: { - 'PUB_HOSTED_URL': server.url + path, - '_PUB_TEST_AUTH_METHOD': authMethod, + if (overrideDefaultHostedServer) + '_PUB_TEST_DEFAULT_HOSTED_URL': server.url + path + else + 'PUB_HOSTED_URL': server.url + path, if (environment != null) ...environment, }); } @@ -607,8 +609,8 @@ Map<String, String>? hosted}) async { var cache = SystemCache(rootDir: _pathInSandbox(cachePath)); - var lockFile = _createLockFile(cache.sources, - sandbox: dependenciesInSandBox, hosted: hosted); + var lockFile = + _createLockFile(cache, sandbox: dependenciesInSandBox, hosted: hosted); await d.dir(package, [ d.file('pubspec.lock', lockFile.serialize(p.join(d.sandbox, package))), @@ -629,8 +631,8 @@ {Iterable<String>? dependenciesInSandBox, Map<String, String>? hosted}) async { var cache = SystemCache(rootDir: _pathInSandbox(cachePath)); - var lockFile = _createLockFile(cache.sources, - sandbox: dependenciesInSandBox, hosted: hosted); + var lockFile = + _createLockFile(cache, sandbox: dependenciesInSandBox, hosted: hosted); await d.dir(package, [ d.file( @@ -651,7 +653,7 @@ /// /// [hosted] is a list of package names to version strings for dependencies on /// hosted packages. -LockFile _createLockFile(SourceRegistry sources, +LockFile _createLockFile(SystemCache cache, {Iterable<String>? sandbox, Map<String, String>? hosted}) { var dependencies = {}; @@ -661,19 +663,14 @@ } } - var packages = dependencies.keys.map((name) { - var dependencyPath = dependencies[name]; - return sources.path.parseId( - name, Version(0, 0, 0), {'path': dependencyPath, 'relative': true}, - containingPath: p.join(d.sandbox, appPath)); - }).toList(); - - if (hosted != null) { - hosted.forEach((name, version) { - var id = sources.hosted.idFor(name, Version.parse(version)); - packages.add(id); - }); - } + final packages = <PackageId>[ + ...dependencies.entries.map((entry) => cache.path.parseId( + entry.key, Version(0, 0, 0), {'path': entry.value, 'relative': true}, + containingDir: p.join(d.sandbox, appPath))), + if (hosted != null) + ...hosted.entries.map( + (entry) => cache.hosted.idFor(entry.key, Version.parse(entry.value))) + ]; return LockFile(packages); } @@ -842,14 +839,23 @@ } /// A function that creates a [Validator] subclass. -typedef ValidatorCreator = Validator Function(Entrypoint entrypoint); +typedef ValidatorCreator = Validator Function(); /// Schedules a single [Validator] to run on the [appPath]. /// /// Returns a scheduled Future that contains the validator after validation. -Future<Validator> validatePackage(ValidatorCreator fn) async { +Future<Validator> validatePackage(ValidatorCreator fn, int? size) async { var cache = SystemCache(rootDir: _pathInSandbox(cachePath)); - var validator = fn(Entrypoint(_pathInSandbox(appPath), cache)); + final entrypoint = Entrypoint(_pathInSandbox(appPath), cache); + var validator = fn(); + validator.context = ValidationContext( + entrypoint, + await Future.value(size ?? 100), + _globalServer == null + ? Uri.parse('https://pub.dev') + : Uri.parse(globalServer.url), + entrypoint.root.listFiles(), + ); await validator.validate(); return validator; } @@ -891,7 +897,8 @@ /// A [StreamMatcher] that matches multiple lines of output. StreamMatcher emitsLines(String output) => emitsInOrder(output.split('\n')); -Iterable<String> _filter(List<String> input) { +/// Removes output from pub known to be unstable. +Iterable<String> filterUnstableLines(List<String> input) { return input // Downloading order is not deterministic, so to avoid flakiness we filter // out these lines. @@ -916,12 +923,18 @@ StringBuffer buffer, { Map<String, String>? environment, String? workingDirectory, + String? stdin, }) async { final process = await startPub( args: args, environment: environment, workingDirectory: workingDirectory, ); + if (stdin != null) { + process.stdin.write(stdin); + await process.stdin.flush(); + await process.stdin.close(); + } final exitCode = await process.exitCode; // TODO(jonasfj): Clean out temporary directory names from env vars... @@ -933,11 +946,12 @@ // .map((e) => '\$ export ${e.key}=${e.value}') // .join('\n')); // } - buffer.writeln(_filter([ - '\$ pub ${args.join(' ')}', + final pipe = stdin == null ? '' : ' echo ${escapeShellArgument(stdin)} |'; + buffer.writeln(filterUnstableLines([ + '\$$pipe pub ${args.map(escapeShellArgument).join(' ')}', ...await process.stdout.rest.toList(), ]).join('\n')); - for (final line in _filter(await process.stderr.rest.toList())) { + for (final line in filterUnstableLines(await process.stderr.rest.toList())) { buffer.writeln('[STDERR] $line'); } if (exitCode != 0) { @@ -970,3 +984,30 @@ }); return server; } + +/// Create temporary folder 'bin/' containing a 'git' script in [sandbox] +/// By adding the bin/ folder to the search `$PATH` we can prevent `pub` from +/// detecting the installed 'git' binary and we can test that it prints +/// a useful error message. +Future<void> setUpFakeGitScript( + {required String bash, required String batch}) async { + await d.dir('bin', [ + if (!Platform.isWindows) d.file('git', bash), + if (Platform.isWindows) d.file('git.bat', batch), + ]).create(); + if (!Platform.isWindows) { + // Make the script executable. + await runProcess('chmod', ['+x', p.join(d.sandbox, 'bin', 'git')]); + } +} + +/// Returns an environment where PATH is extended with `$sandbox/bin`. +Map<String, String> extendedPathEnv() { + final separator = Platform.isWindows ? ';' : ':'; + final binFolder = p.join(d.sandbox, 'bin'); + + return { + // Override 'PATH' to ensure that we can't detect a working "git" binary + 'PATH': '$binFolder$separator${Platform.environment['PATH']}', + }; +}
diff --git a/test/testdata/goldens/dependency_services/dependency_services_test/Adding transitive.txt b/test/testdata/goldens/dependency_services/dependency_services_test/Adding transitive.txt new file mode 100644 index 0000000..7b8290e --- /dev/null +++ b/test/testdata/goldens/dependency_services/dependency_services_test/Adding transitive.txt
@@ -0,0 +1,123 @@ +# GENERATED BY: test/dependency_services/dependency_services_test.dart + +$ cat pubspec.yaml +{"name":"app","dependencies":{"foo":"^1.0.0"},"environment":{"sdk":">=0.1.2 <1.0.0"}} +$ cat pubspec.lock +# Generated by pub +# See https://dart.dev/tools/pub/glossary#lockfile +packages: + foo: + dependency: "direct main" + description: + name: foo + url: "http://localhost:$PORT" + source: hosted + version: "1.2.3" +sdks: + dart: ">=0.1.2 <1.0.0" +-------------------------------- END OF OUTPUT --------------------------------- + +## Section list +$ dependency_services list +{ + "dependencies": [ + { + "name": "foo", + "version": "1.2.3", + "kind": "direct", + "constraint": "^1.0.0" + } + ] +} + +-------------------------------- END OF OUTPUT --------------------------------- + +## Section report +$ dependency_services report +{ + "dependencies": [ + { + "name": "foo", + "version": "1.2.3", + "kind": "direct", + "latest": "2.2.3", + "constraint": "^1.0.0", + "compatible": [], + "singleBreaking": [ + { + "name": "foo", + "version": "2.2.3", + "kind": "direct", + "constraintBumped": "^2.2.3", + "constraintWidened": ">=1.0.0 <3.0.0", + "constraintBumpedIfNeeded": "^2.2.3", + "previousVersion": "1.2.3", + "previousConstraint": "^1.0.0" + }, + { + "name": "transitive", + "version": "1.0.0", + "kind": "transitive", + "constraintBumped": null, + "constraintWidened": null, + "constraintBumpedIfNeeded": null, + "previousVersion": null, + "previousConstraint": null + } + ], + "multiBreaking": [ + { + "name": "foo", + "version": "2.2.3", + "kind": "direct", + "constraintBumped": "^2.2.3", + "constraintWidened": ">=1.0.0 <3.0.0", + "constraintBumpedIfNeeded": "^2.2.3", + "previousVersion": "1.2.3", + "previousConstraint": "^1.0.0" + }, + { + "name": "transitive", + "version": "1.0.0", + "kind": "transitive", + "constraintBumped": null, + "constraintWidened": null, + "constraintBumpedIfNeeded": null, + "previousVersion": null, + "previousConstraint": null + } + ] + } + ] +} + +-------------------------------- END OF OUTPUT --------------------------------- + +## Section apply +$ echo '{"dependencyChanges":[{"name":"foo","version":"2.2.3"},{"name":"transitive","version":"1.0.0"}]}' | dependency_services apply +{"dependencies":[]} + +-------------------------------- END OF OUTPUT --------------------------------- + +$ cat pubspec.yaml +{"name":"app","dependencies":{"foo":^2.2.3},"environment":{"sdk":">=0.1.2 <1.0.0"}} +$ cat pubspec.lock +# Generated by pub +# See https://dart.dev/tools/pub/glossary#lockfile +packages: + foo: + dependency: "direct main" + description: + name: foo + url: "http://localhost:$PORT" + source: hosted + version: "2.2.3" + transitive: + dependency: transitive + description: + name: transitive + url: "http://localhost:$PORT" + source: hosted + version: "1.0.0" +sdks: + dart: ">=0.1.2 <1.0.0"
diff --git a/test/testdata/goldens/dependency_services/dependency_services_test/Compatible.txt b/test/testdata/goldens/dependency_services/dependency_services_test/Compatible.txt new file mode 100644 index 0000000..c133458 --- /dev/null +++ b/test/testdata/goldens/dependency_services/dependency_services_test/Compatible.txt
@@ -0,0 +1,222 @@ +# GENERATED BY: test/dependency_services/dependency_services_test.dart + +$ cat pubspec.yaml +{"name":"app","dependencies":{"foo":"^1.0.0","bar":"^1.0.0","boo":"^1.0.0"},"environment":{"sdk":">=0.1.2 <1.0.0"}} +$ cat pubspec.lock +# Generated by pub +# See https://dart.dev/tools/pub/glossary#lockfile +packages: + bar: + dependency: "direct main" + description: + name: bar + url: "http://localhost:$PORT" + source: hosted + version: "1.2.3" + boo: + dependency: "direct main" + description: + name: boo + url: "http://localhost:$PORT" + source: hosted + version: "1.2.3" + foo: + dependency: "direct main" + description: + name: foo + url: "http://localhost:$PORT" + source: hosted + version: "1.2.3" +sdks: + dart: ">=0.1.2 <1.0.0" +-------------------------------- END OF OUTPUT --------------------------------- + +## Section list +$ dependency_services list +{ + "dependencies": [ + { + "name": "bar", + "version": "1.2.3", + "kind": "direct", + "constraint": "^1.0.0" + }, + { + "name": "boo", + "version": "1.2.3", + "kind": "direct", + "constraint": "^1.0.0" + }, + { + "name": "foo", + "version": "1.2.3", + "kind": "direct", + "constraint": "^1.0.0" + } + ] +} + +-------------------------------- END OF OUTPUT --------------------------------- + +## Section report +$ dependency_services report +{ + "dependencies": [ + { + "name": "bar", + "version": "1.2.3", + "kind": "direct", + "latest": "2.2.3", + "constraint": "^1.0.0", + "compatible": [], + "singleBreaking": [ + { + "name": "bar", + "version": "2.2.3", + "kind": "direct", + "constraintBumped": "^2.2.3", + "constraintWidened": ">=1.0.0 <3.0.0", + "constraintBumpedIfNeeded": "^2.2.3", + "previousVersion": "1.2.3", + "previousConstraint": "^1.0.0" + } + ], + "multiBreaking": [ + { + "name": "bar", + "version": "2.2.3", + "kind": "direct", + "constraintBumped": "^2.2.3", + "constraintWidened": ">=1.0.0 <3.0.0", + "constraintBumpedIfNeeded": "^2.2.3", + "previousVersion": "1.2.3", + "previousConstraint": "^1.0.0" + } + ] + }, + { + "name": "boo", + "version": "1.2.3", + "kind": "direct", + "latest": "1.2.4", + "constraint": "^1.0.0", + "compatible": [ + { + "name": "boo", + "version": "1.2.4", + "kind": "direct", + "constraintBumped": "^1.0.0", + "constraintWidened": "^1.0.0", + "constraintBumpedIfNeeded": "^1.0.0", + "previousVersion": "1.2.3", + "previousConstraint": "^1.0.0" + } + ], + "singleBreaking": [ + { + "name": "boo", + "version": "1.2.4", + "kind": "direct", + "constraintBumped": "^1.2.4", + "constraintWidened": "^1.0.0", + "constraintBumpedIfNeeded": "^1.0.0", + "previousVersion": "1.2.3", + "previousConstraint": "^1.0.0" + } + ], + "multiBreaking": [ + { + "name": "boo", + "version": "1.2.4", + "kind": "direct", + "constraintBumped": "^1.2.4", + "constraintWidened": "^1.0.0", + "constraintBumpedIfNeeded": "^1.0.0", + "previousVersion": "1.2.3", + "previousConstraint": "^1.0.0" + } + ] + }, + { + "name": "foo", + "version": "1.2.3", + "kind": "direct", + "latest": "2.2.3", + "constraint": "^1.0.0", + "compatible": [ + { + "name": "foo", + "version": "1.2.4", + "kind": "direct", + "constraintBumped": "^1.0.0", + "constraintWidened": "^1.0.0", + "constraintBumpedIfNeeded": "^1.0.0", + "previousVersion": "1.2.3", + "previousConstraint": "^1.0.0" + } + ], + "singleBreaking": [ + { + "name": "foo", + "version": "2.2.3", + "kind": "direct", + "constraintBumped": "^2.2.3", + "constraintWidened": ">=1.0.0 <3.0.0", + "constraintBumpedIfNeeded": "^2.2.3", + "previousVersion": "1.2.3", + "previousConstraint": "^1.0.0" + } + ], + "multiBreaking": [ + { + "name": "foo", + "version": "2.2.3", + "kind": "direct", + "constraintBumped": "^2.2.3", + "constraintWidened": ">=1.0.0 <3.0.0", + "constraintBumpedIfNeeded": "^2.2.3", + "previousVersion": "1.2.3", + "previousConstraint": "^1.0.0" + } + ] + } + ] +} + +-------------------------------- END OF OUTPUT --------------------------------- + +## Section apply +$ echo '{"dependencyChanges":[{"name":"foo","version":"1.2.4"}]}' | dependency_services apply +{"dependencies":[]} + +-------------------------------- END OF OUTPUT --------------------------------- + +$ cat pubspec.yaml +{"name":"app","dependencies":{"foo":"^1.0.0","bar":"^1.0.0","boo":"^1.0.0"},"environment":{"sdk":">=0.1.2 <1.0.0"}} +$ cat pubspec.lock +# Generated by pub +# See https://dart.dev/tools/pub/glossary#lockfile +packages: + bar: + dependency: "direct main" + description: + name: bar + url: "http://localhost:$PORT" + source: hosted + version: "1.2.3" + boo: + dependency: "direct main" + description: + name: boo + url: "http://localhost:$PORT" + source: hosted + version: "1.2.3" + foo: + dependency: "direct main" + description: + name: foo + url: "http://localhost:$PORT" + source: hosted + version: "1.2.4" +sdks: + dart: ">=0.1.2 <1.0.0"
diff --git a/test/testdata/goldens/dependency_services/dependency_services_test/Removing transitive.txt b/test/testdata/goldens/dependency_services/dependency_services_test/Removing transitive.txt new file mode 100644 index 0000000..bfc5a78 --- /dev/null +++ b/test/testdata/goldens/dependency_services/dependency_services_test/Removing transitive.txt
@@ -0,0 +1,139 @@ +# GENERATED BY: test/dependency_services/dependency_services_test.dart + +$ cat pubspec.yaml +{"name":"app","dependencies":{"foo":"^1.0.0"},"environment":{"sdk":">=0.1.2 <1.0.0"}} +$ cat pubspec.lock +# Generated by pub +# See https://dart.dev/tools/pub/glossary#lockfile +packages: + foo: + dependency: "direct main" + description: + name: foo + url: "http://localhost:$PORT" + source: hosted + version: "1.2.3" + transitive: + dependency: transitive + description: + name: transitive + url: "http://localhost:$PORT" + source: hosted + version: "1.0.0" +sdks: + dart: ">=0.1.2 <1.0.0" +-------------------------------- END OF OUTPUT --------------------------------- + +## Section list +$ dependency_services list +{ + "dependencies": [ + { + "name": "foo", + "version": "1.2.3", + "kind": "direct", + "constraint": "^1.0.0" + }, + { + "name": "transitive", + "version": "1.0.0", + "kind": "transitive", + "constraint": "null" + } + ] +} + +-------------------------------- END OF OUTPUT --------------------------------- + +## Section report +$ dependency_services report +{ + "dependencies": [ + { + "name": "foo", + "version": "1.2.3", + "kind": "direct", + "latest": "2.2.3", + "constraint": "^1.0.0", + "compatible": [], + "singleBreaking": [ + { + "name": "foo", + "version": "2.2.3", + "kind": "direct", + "constraintBumped": "^2.2.3", + "constraintWidened": ">=1.0.0 <3.0.0", + "constraintBumpedIfNeeded": "^2.2.3", + "previousVersion": "1.2.3", + "previousConstraint": "^1.0.0" + }, + { + "name": "transitive", + "version": null, + "kind": "transitive", + "constraintBumped": null, + "constraintWidened": null, + "constraintBumpedIfNeeded": null, + "previousVersion": "1.0.0", + "previousConstraint": null + } + ], + "multiBreaking": [ + { + "name": "foo", + "version": "2.2.3", + "kind": "direct", + "constraintBumped": "^2.2.3", + "constraintWidened": ">=1.0.0 <3.0.0", + "constraintBumpedIfNeeded": "^2.2.3", + "previousVersion": "1.2.3", + "previousConstraint": "^1.0.0" + }, + { + "name": "transitive", + "version": null, + "kind": "transitive", + "constraintBumped": null, + "constraintWidened": null, + "constraintBumpedIfNeeded": null, + "previousVersion": "1.0.0", + "previousConstraint": null + } + ] + }, + { + "name": "transitive", + "version": "1.0.0", + "kind": "transitive", + "latest": "1.0.0", + "constraint": null, + "compatible": [], + "singleBreaking": [], + "multiBreaking": [] + } + ] +} + +-------------------------------- END OF OUTPUT --------------------------------- + +## Section apply +$ echo '{"dependencyChanges":[{"name":"foo","version":"2.2.3"},{"name":"transitive","version":null}]}' | dependency_services apply +{"dependencies":[]} + +-------------------------------- END OF OUTPUT --------------------------------- + +$ cat pubspec.yaml +{"name":"app","dependencies":{"foo":^2.2.3},"environment":{"sdk":">=0.1.2 <1.0.0"}} +$ cat pubspec.lock +# Generated by pub +# See https://dart.dev/tools/pub/glossary#lockfile +packages: + foo: + dependency: "direct main" + description: + name: foo + url: "http://localhost:$PORT" + source: hosted + version: "2.2.3" +sdks: + dart: ">=0.1.2 <1.0.0"
diff --git a/test/testdata/goldens/dependency_services/dependency_services_test/multibreaking.txt b/test/testdata/goldens/dependency_services/dependency_services_test/multibreaking.txt new file mode 100644 index 0000000..32bc04e --- /dev/null +++ b/test/testdata/goldens/dependency_services/dependency_services_test/multibreaking.txt
@@ -0,0 +1,220 @@ +# GENERATED BY: test/dependency_services/dependency_services_test.dart + +$ cat pubspec.yaml +{"name":"app","dependencies":{"foo":"^1.0.0","bar":"^1.0.0","baz":"1.0.0"},"environment":{"sdk":">=0.1.2 <1.0.0"}} +$ cat pubspec.lock +# Generated by pub +# See https://dart.dev/tools/pub/glossary#lockfile +packages: + bar: + dependency: "direct main" + description: + name: bar + url: "http://localhost:$PORT" + source: hosted + version: "1.0.0" + baz: + dependency: "direct main" + description: + name: baz + url: "http://localhost:$PORT" + source: hosted + version: "1.0.0" + foo: + dependency: "direct main" + description: + name: foo + url: "http://localhost:$PORT" + source: hosted + version: "1.0.0" +sdks: + dart: ">=0.1.2 <1.0.0" +-------------------------------- END OF OUTPUT --------------------------------- + +## Section list +$ dependency_services list +{ + "dependencies": [ + { + "name": "bar", + "version": "1.0.0", + "kind": "direct", + "constraint": "^1.0.0" + }, + { + "name": "baz", + "version": "1.0.0", + "kind": "direct", + "constraint": "1.0.0" + }, + { + "name": "foo", + "version": "1.0.0", + "kind": "direct", + "constraint": "^1.0.0" + } + ] +} + +-------------------------------- END OF OUTPUT --------------------------------- + +## Section report +$ dependency_services report +{ + "dependencies": [ + { + "name": "bar", + "version": "1.0.0", + "kind": "direct", + "latest": "2.0.0", + "constraint": "^1.0.0", + "compatible": [], + "singleBreaking": [], + "multiBreaking": [ + { + "name": "bar", + "version": "2.0.0", + "kind": "direct", + "constraintBumped": "^2.0.0", + "constraintWidened": ">=1.0.0 <3.0.0", + "constraintBumpedIfNeeded": "^2.0.0", + "previousVersion": "1.0.0", + "previousConstraint": "^1.0.0" + }, + { + "name": "foo", + "version": "3.0.1", + "kind": "direct", + "constraintBumped": "^3.0.1", + "constraintWidened": ">=1.0.0 <4.0.0", + "constraintBumpedIfNeeded": "^3.0.1", + "previousVersion": "1.0.0", + "previousConstraint": "^1.0.0" + } + ] + }, + { + "name": "baz", + "version": "1.0.0", + "kind": "direct", + "latest": "1.1.0", + "constraint": "1.0.0", + "compatible": [], + "singleBreaking": [ + { + "name": "baz", + "version": "1.1.0", + "kind": "direct", + "constraintBumped": "^1.1.0", + "constraintWidened": "^1.0.0", + "constraintBumpedIfNeeded": "^1.1.0", + "previousVersion": "1.0.0", + "previousConstraint": "1.0.0" + } + ], + "multiBreaking": [ + { + "name": "baz", + "version": "1.1.0", + "kind": "direct", + "constraintBumped": "^1.1.0", + "constraintWidened": "^1.0.0", + "constraintBumpedIfNeeded": "^1.1.0", + "previousVersion": "1.0.0", + "previousConstraint": "1.0.0" + } + ] + }, + { + "name": "foo", + "version": "1.0.0", + "kind": "direct", + "latest": "3.0.1", + "constraint": "^1.0.0", + "compatible": [ + { + "name": "foo", + "version": "1.5.0", + "kind": "direct", + "constraintBumped": "^1.0.0", + "constraintWidened": "^1.0.0", + "constraintBumpedIfNeeded": "^1.0.0", + "previousVersion": "1.0.0", + "previousConstraint": "^1.0.0" + } + ], + "singleBreaking": [ + { + "name": "foo", + "version": "2.0.0", + "kind": "direct", + "constraintBumped": "^2.0.0", + "constraintWidened": ">=1.0.0 <3.0.0", + "constraintBumpedIfNeeded": "^2.0.0", + "previousVersion": "1.0.0", + "previousConstraint": "^1.0.0" + } + ], + "multiBreaking": [ + { + "name": "foo", + "version": "3.0.1", + "kind": "direct", + "constraintBumped": "^3.0.1", + "constraintWidened": ">=1.0.0 <4.0.0", + "constraintBumpedIfNeeded": "^3.0.1", + "previousVersion": "1.0.0", + "previousConstraint": "^1.0.0" + }, + { + "name": "bar", + "version": "2.0.0", + "kind": "direct", + "constraintBumped": "^2.0.0", + "constraintWidened": ">=1.0.0 <3.0.0", + "constraintBumpedIfNeeded": "^2.0.0", + "previousVersion": "1.0.0", + "previousConstraint": "^1.0.0" + } + ] + } + ] +} + +-------------------------------- END OF OUTPUT --------------------------------- + +## Section apply +$ echo '{"dependencyChanges":[{"name":"foo","version":"3.0.1","constraint":"^3.0.0"},{"name":"bar","version":"2.0.0"}]}' | dependency_services apply +{"dependencies":[]} + +-------------------------------- END OF OUTPUT --------------------------------- + +$ cat pubspec.yaml +{"name":"app","dependencies":{"foo":^3.0.0,"bar":^2.0.0,"baz":"1.0.0"},"environment":{"sdk":">=0.1.2 <1.0.0"}} +$ cat pubspec.lock +# Generated by pub +# See https://dart.dev/tools/pub/glossary#lockfile +packages: + bar: + dependency: "direct main" + description: + name: bar + url: "http://localhost:$PORT" + source: hosted + version: "2.0.0" + baz: + dependency: "direct main" + description: + name: baz + url: "http://localhost:$PORT" + source: hosted + version: "1.0.0" + foo: + dependency: "direct main" + description: + name: foo + url: "http://localhost:$PORT" + source: hosted + version: "3.0.1" +sdks: + dart: ">=0.1.2 <1.0.0"
diff --git a/test/testdata/goldens/directory_option_test/commands taking a --directory~-C parameter work.txt b/test/testdata/goldens/directory_option_test/commands taking a --directory~-C parameter work.txt index 237b3b5..9585341 100644 --- a/test/testdata/goldens/directory_option_test/commands taking a --directory~-C parameter work.txt +++ b/test/testdata/goldens/directory_option_test/commands taking a --directory~-C parameter work.txt
@@ -17,7 +17,7 @@ -------------------------------- END OF OUTPUT --------------------------------- ## Section 2 -$ pub -C myapp/example get --directory=myapp bar +$ pub -C 'myapp/example' get --directory=myapp bar Resolving dependencies in myapp... Got dependencies in myapp! @@ -40,7 +40,7 @@ -------------------------------- END OF OUTPUT --------------------------------- ## Section 5 -$ pub get bar -C myapp/example +$ pub get bar -C 'myapp/example' Resolving dependencies in myapp/example... + foo 1.0.0 + test_pkg 1.0.0 from path myapp @@ -49,7 +49,7 @@ -------------------------------- END OF OUTPUT --------------------------------- ## Section 6 -$ pub get bar -C myapp/example2 +$ pub get bar -C 'myapp/example2' Resolving dependencies in myapp/example2... [STDERR] Error on line 1, column 9 of myapp/pubspec.yaml: "name" field doesn't match expected name "myapp". [STDERR] â•· @@ -61,7 +61,7 @@ -------------------------------- END OF OUTPUT --------------------------------- ## Section 7 -$ pub get bar -C myapp/broken_dir +$ pub get bar -C 'myapp/broken_dir' [STDERR] Could not find a file named "pubspec.yaml" in "$SANDBOX/myapp/broken_dir". [EXIT CODE] 66 @@ -84,7 +84,7 @@ -------------------------------- END OF OUTPUT --------------------------------- ## Section 10 -$ pub run -C myapp bin/app.dart +$ pub run -C myapp 'bin/app.dart' Building package executable... Built test_pkg:app. Hi @@ -114,7 +114,11 @@ ## Section 12 $ pub uploader -C myapp add sigurdm@google.com -Good job! +[STDERR] Package uploaders are no longer managed from the command line. +[STDERR] Manage uploaders from: +[STDERR] +[STDERR] https://pub.dev/packages/test_pkg/admin +[EXIT CODE] 1 -------------------------------- END OF OUTPUT ---------------------------------
diff --git a/test/testdata/goldens/help_test/pub global activate --help.txt b/test/testdata/goldens/help_test/pub global activate --help.txt index 5341266..29aab49 100644 --- a/test/testdata/goldens/help_test/pub global activate --help.txt +++ b/test/testdata/goldens/help_test/pub global activate --help.txt
@@ -8,6 +8,8 @@ -h, --help Print this usage information. -s, --source The source used to find the package. [git, hosted (default), path] + --git-path Path of git package in repository + --git-ref Git branch or commit to be retrieved --no-executables Do not put executables on PATH. -x, --executable Executable(s) to place on PATH. --overwrite Overwrite executables from other packages with the same
diff --git a/test/testdata/goldens/help_test/pub uploader --help.txt b/test/testdata/goldens/help_test/pub uploader --help.txt deleted file mode 100644 index 10c9d9d..0000000 --- a/test/testdata/goldens/help_test/pub uploader --help.txt +++ /dev/null
@@ -1,15 +0,0 @@ -# GENERATED BY: test/help_test.dart - -## Section 0 -$ pub uploader --help -Manage uploaders for a package on pub.dartlang.org. - -Usage: pub uploader [options] {add/remove} <email> --h, --help Print this usage information. - --package The package whose uploaders will be modified. - (defaults to the current package) --C, --directory=<dir> Run this in the directory<dir>. - -Run "pub help" to see global options. -See https://dart.dev/tools/pub/cmd/pub-uploader for detailed documentation. -
diff --git a/test/testdata/goldens/upgrade/example_warns_about_major_versions_test/pub upgrade --major-versions does not update major versions in example~.txt b/test/testdata/goldens/upgrade/example_warns_about_major_versions_test/pub upgrade --major-versions does not update major versions in example~.txt index 2cc96c0..ae304ea 100644 --- a/test/testdata/goldens/upgrade/example_warns_about_major_versions_test/pub upgrade --major-versions does not update major versions in example~.txt +++ b/test/testdata/goldens/upgrade/example_warns_about_major_versions_test/pub upgrade --major-versions does not update major versions in example~.txt
@@ -17,9 +17,7 @@ ## Section 1 $ pub upgrade --major-versions --directory example Resolving dependencies in example... - bar 2.0.0 > foo 2.0.0 (was 1.0.0) - myapp 0.0.0 from path . Changed 1 dependency in example! Changed 1 constraint in pubspec.yaml:
diff --git a/test/token/add_token_test.dart b/test/token/add_token_test.dart index ca68c41..65f7d56 100644 --- a/test/token/add_token_test.dart +++ b/test/token/add_token_test.dart
@@ -150,4 +150,20 @@ includeParentEnvironment: false, ); }); + + test('with https://pub.dev rewrites to https://pub.dartlang.org', () async { + await runPub( + args: ['token', 'add', 'https://pub.dev'], + input: ['auth-token'], + silent: contains( + 'Using https://pub.dartlang.org instead of https://pub.dev.'), + ); + + await d.tokensFile({ + 'version': 1, + 'hosted': [ + {'url': 'https://pub.dartlang.org', 'token': 'auth-token'} + ] + }).validate(); + }); }
diff --git a/test/token/token_authentication_test.dart b/test/token/token_authentication_test.dart index c020ac0..350bca4 100644 --- a/test/token/token_authentication_test.dart +++ b/test/token/token_authentication_test.dart
@@ -19,8 +19,11 @@ {'url': globalServer.url, 'env': 'TOKEN'}, ] }).create(); - var pub = await startPublish(globalServer, - authMethod: 'token', environment: {'TOKEN': 'access token'}); + var pub = await startPublish( + globalServer, + overrideDefaultHostedServer: false, + environment: {'TOKEN': 'access token'}, + ); await confirmPublish(pub); handleUploadForm(globalServer); @@ -36,7 +39,10 @@ {'url': globalServer.url, 'token': 'access token'}, ] }).create(); - var pub = await startPublish(globalServer, authMethod: 'token'); + var pub = await startPublish( + globalServer, + overrideDefaultHostedServer: false, + ); await confirmPublish(pub); handleUploadForm(globalServer);
diff --git a/test/token/when_receives_401_removes_token_test.dart b/test/token/when_receives_401_removes_token_test.dart index 86c0559..add619b 100644 --- a/test/token/when_receives_401_removes_token_test.dart +++ b/test/token/when_receives_401_removes_token_test.dart
@@ -19,7 +19,7 @@ {'url': server.url, 'token': 'access token'}, ] }).create(); - var pub = await startPublish(server, authMethod: 'token'); + var pub = await startPublish(server, overrideDefaultHostedServer: false); await confirmPublish(pub); server.expect('GET', '/api/packages/versions/new', (request) {
diff --git a/test/token/when_receives_403_persists_saved_token_test.dart b/test/token/when_receives_403_persists_saved_token_test.dart index 45fc7a4..6db5538 100644 --- a/test/token/when_receives_403_persists_saved_token_test.dart +++ b/test/token/when_receives_403_persists_saved_token_test.dart
@@ -19,7 +19,7 @@ {'url': server.url, 'token': 'access token'}, ] }).create(); - var pub = await startPublish(server, authMethod: 'token'); + var pub = await startPublish(server, overrideDefaultHostedServer: false); await confirmPublish(pub); server.expect('GET', '/api/packages/versions/new', (request) {
diff --git a/test/upgrade/upgrade_major_versions_test.dart b/test/upgrade/upgrade_major_versions_test.dart index 8c3f40a..932ca17 100644 --- a/test/upgrade/upgrade_major_versions_test.dart +++ b/test/upgrade/upgrade_major_versions_test.dart
@@ -100,11 +100,10 @@ }); test('upgrades only the selected package', () async { - await servePackages() + final server = await servePackages() ..serve('foo', '1.0.0') ..serve('foo', '2.0.0') - ..serve('bar', '0.1.0') - ..serve('bar', '0.2.0'); + ..serve('bar', '0.1.0'); await d.appDir({ 'foo': '^1.0.0', @@ -113,6 +112,8 @@ await pubGet(); + server.serve('bar', '0.1.1'); + // 1 constraint should be updated await pubUpgrade( args: ['--major-versions', 'foo'],
diff --git a/test/validator/changelog_test.dart b/test/validator/changelog_test.dart index 5bef662..397723d 100644 --- a/test/validator/changelog_test.dart +++ b/test/validator/changelog_test.dart
@@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -import 'package:pub/src/entrypoint.dart'; import 'package:pub/src/validator.dart'; import 'package:pub/src/validator/changelog.dart'; import 'package:test/test.dart'; @@ -11,7 +10,7 @@ import '../test_pub.dart'; import 'utils.dart'; -Validator changelog(Entrypoint entrypoint) => ChangelogValidator(entrypoint); +Validator changelog() => ChangelogValidator(); void main() { group('should consider a package valid if it', () {
diff --git a/test/validator/compiled_dartdoc_test.dart b/test/validator/compiled_dartdoc_test.dart index f1d3d6d..da02005 100644 --- a/test/validator/compiled_dartdoc_test.dart +++ b/test/validator/compiled_dartdoc_test.dart
@@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -import 'package:pub/src/entrypoint.dart'; import 'package:pub/src/validator.dart'; import 'package:pub/src/validator/compiled_dartdoc.dart'; import 'package:test/test.dart'; @@ -11,8 +10,7 @@ import '../test_pub.dart'; import 'utils.dart'; -Validator compiledDartdoc(Entrypoint entrypoint) => - CompiledDartdocValidator(entrypoint); +Validator compiledDartdoc() => CompiledDartdocValidator(); void main() { setUp(d.validPackage.create);
diff --git a/test/validator/dependency_override_test.dart b/test/validator/dependency_override_test.dart index 21a437d..b801ad9 100644 --- a/test/validator/dependency_override_test.dart +++ b/test/validator/dependency_override_test.dart
@@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -import 'package:pub/src/entrypoint.dart'; import 'package:pub/src/validator.dart'; import 'package:pub/src/validator/dependency_override.dart'; import 'package:test/test.dart'; @@ -11,8 +10,7 @@ import '../test_pub.dart'; import 'utils.dart'; -Validator dependencyOverride(Entrypoint entrypoint) => - DependencyOverrideValidator(entrypoint); +Validator dependencyOverride() => DependencyOverrideValidator(); void main() { test(
diff --git a/test/validator/dependency_test.dart b/test/validator/dependency_test.dart index db9db15..f90ed1d 100644 --- a/test/validator/dependency_test.dart +++ b/test/validator/dependency_test.dart
@@ -5,162 +5,134 @@ import 'dart:async'; import 'dart:convert'; -import 'package:http/http.dart' as http; -import 'package:http/testing.dart'; import 'package:path/path.dart' as path; -import 'package:pub/src/entrypoint.dart'; -import 'package:pub/src/validator.dart'; -import 'package:pub/src/validator/dependency.dart'; + +import 'package:pub/src/exit_codes.dart'; import 'package:test/test.dart'; import '../descriptor.dart' as d; import '../test_pub.dart'; -import 'utils.dart'; -Validator dependency(Entrypoint entrypoint) => DependencyValidator(entrypoint); - -Future<void> expectDependencyValidationError(String substring) => - expectValidation(dependency, errors: anyElement(contains(substring))); - -Future<void> expectDependencyValidationWarning(String substring) => - expectValidation(dependency, warnings: anyElement(contains(substring))); - -/// Sets up a test package with dependency [dep] and mocks a server with -/// [hostedVersions] of the package available. -Future setUpDependency(Map dep, {List<String>? hostedVersions}) { - useMockClient(MockClient((request) { - expect(request.method, equals('GET')); - expect(request.url.path, equals('/api/packages/foo')); - - if (hostedVersions == null) { - return Future.value(http.Response('not found', 404)); - } else { - return Future.value(http.Response( - jsonEncode({ - 'name': 'foo', - 'uploaders': ['nweiz@google.com'], - 'versions': hostedVersions - .map((version) => packageVersionApiMap( - 'https://pub.dartlang.org', packageMap('foo', version))) - .toList() - }), - 200)); - } - })); - +d.DirectoryDescriptor package( + {String version = '1.0.0', Map? deps, String? sdk}) { return d.dir(appPath, [ - d.libPubspec('test_pkg', '1.0.0', deps: {'foo': dep}) - ]).create(); + d.libPubspec('test_pkg', version, + sdk: sdk ?? '>=1.8.0 <=2.0.0', deps: deps), + d.file('LICENSE', 'Eh, do what you want.'), + d.file('README.md', "This package isn't real."), + d.file('CHANGELOG.md', '# $version\nFirst version\n'), + d.dir('lib', [d.file('test_pkg.dart', 'int i = 1;')]) + ]); +} + +Future<void> expectValidation({error, int exitCode = 0}) async { + await runPub( + error: error ?? contains('Package has 0 warnings.'), + args: ['publish', '--dry-run'], + // workingDirectory: d.path(appPath), + exitCode: exitCode, + ); +} + +Future<void> expectValidationWarning(error) async { + if (error is String) error = contains(error); + await expectValidation( + error: allOf([error, contains('Package has 1 warning.')]), + exitCode: DATA); +} + +Future<void> expectValidationError(String text) async { + await expectValidation( + error: allOf([ + contains(text), + contains('Package validation found the following error:') + ]), + exitCode: DATA); +} + +Future<void> setUpDependency(dep, + {List<String> hostedVersions = const []}) async { + final server = await servePackages(); + for (final version in hostedVersions) { + server.serve('foo', version); + } + await package(deps: {'foo': dep}).create(); } void main() { group('should consider a package valid if it', () { test('looks normal', () async { - await d.validPackage.create(); - await expectValidation(dependency); + await package().create(); + await expectValidation(); }); test('has a ^ constraint with an appropriate SDK constraint', () async { - await d.dir(appPath, [ - d.libPubspec('test_pkg', '1.0.0', - deps: {'foo': '^1.2.3'}, sdk: '>=1.8.0 <2.0.0') - ]).create(); - await expectValidation(dependency); + await package(deps: {'foo': '^1.2.3'}).create(); + await expectValidation(); }); test('with a dependency on a pre-release while being one', () async { - await d.dir(appPath, [ - d.libPubspec( - 'test_pkg', - '1.0.0-dev', - deps: {'foo': '^1.2.3-dev'}, - sdk: '>=1.19.0 <2.0.0', - ) - ]).create(); + await package(version: '1.0.0-dev', deps: {'foo': '^1.2.3-dev'}).create(); - await expectValidation(dependency); + await expectValidation(); }); test('has a git path dependency with an appropriate SDK constraint', () async { - // Ensure we don't report anything from the real pub.dev. - await setUpDependency({}); - await d.dir(appPath, [ - d.libPubspec('test_pkg', '1.0.0', - deps: { - 'foo': { - 'git': { - 'url': 'git://github.com/dart-lang/foo', - 'path': 'subdir' - } - } - }, - sdk: '>=2.0.0 <3.0.0') - ]).create(); + await servePackages(); + await package(deps: { + 'foo': { + 'git': {'url': 'git://github.com/dart-lang/foo', 'path': 'subdir'} + } + }, sdk: '>=2.0.0 <3.0.0') + .create(); // We should get a warning for using a git dependency, but not an error. - await expectDependencyValidationWarning(' foo: any'); + await expectValidationWarning(' foo: any'); }); test('depends on Flutter from an SDK source', () async { - await d.dir(appPath, [ - d.pubspec({ - 'name': 'test_pkg', - 'version': '1.0.0', - 'environment': {'sdk': '>=1.19.0 <2.0.0'}, - 'dependencies': { - 'flutter': {'sdk': 'flutter'} - } - }) - ]).create(); + await package(deps: { + 'flutter': {'sdk': 'flutter'} + }, sdk: '>=1.19.0 <2.0.0') + .create(); - await expectValidation(dependency); + await expectValidation(); }); test( 'depends on a package from Flutter with an appropriate Dart SDK ' 'constraint', () async { - await d.dir(appPath, [ - d.pubspec({ - 'name': 'test_pkg', - 'version': '1.0.0', - 'environment': {'sdk': '>=1.19.0 <2.0.0'}, - 'dependencies': { - 'foo': {'sdk': 'flutter', 'version': '>=1.2.3 <2.0.0'} - } - }) - ]).create(); + await package( + deps: { + 'foo': {'sdk': 'flutter', 'version': '>=1.2.3 <2.0.0'}, + }, + sdk: '>=1.19.0 <2.0.0', + ).create(); - await expectValidation(dependency); + await expectValidation(); }); test( 'depends on a package from Fuchsia with an appropriate Dart SDK ' 'constraint', () async { - await d.dir(appPath, [ - d.pubspec({ - 'name': 'test_pkg', - 'version': '1.0.0', - 'environment': {'sdk': '>=2.0.0-dev.51.0 <2.0.0'}, - 'dependencies': { - 'foo': {'sdk': 'fuchsia', 'version': '>=1.2.3 <2.0.0'} - } - }) - ]).create(); + await package(sdk: '>=2.0.0-dev.51.0 <2.0.0', deps: { + 'foo': {'sdk': 'fuchsia', 'version': '>=1.2.3 <2.0.0'} + }).create(); + await d.validPackage.create(); - await expectValidation(dependency); + await expectValidation(); }); }); group('should consider a package invalid if it', () { - setUp(d.validPackage.create); - + setUp(package().create); group('has a git dependency', () { group('where a hosted version exists', () { test('and should suggest the hosted primary version', () async { await setUpDependency({'git': 'git://github.com/dart-lang/foo'}, hostedVersions: ['3.0.0-pre', '2.0.0', '1.0.0']); - await expectDependencyValidationWarning(' foo: ^2.0.0'); + await expectValidationWarning(' foo: ^2.0.0'); }); test( @@ -168,7 +140,7 @@ "it's the only version available", () async { await setUpDependency({'git': 'git://github.com/dart-lang/foo'}, hostedVersions: ['3.0.0-pre', '2.0.0-pre']); - await expectDependencyValidationWarning(' foo: ^3.0.0-pre'); + await expectValidationWarning(' foo: ^3.0.0-pre'); }); test( @@ -176,7 +148,7 @@ 'pre-1.0.0', () async { await setUpDependency({'git': 'git://github.com/dart-lang/foo'}, hostedVersions: ['0.0.1', '0.0.2']); - await expectDependencyValidationWarning(' foo: ^0.0.2'); + await expectValidationWarning(' foo: ^0.0.2'); }); }); @@ -186,7 +158,7 @@ 'git': 'git://github.com/dart-lang/foo', 'version': '>=1.0.0 <2.0.0' }); - await expectDependencyValidationWarning(' foo: ">=1.0.0 <2.0.0"'); + await expectValidationWarning(' foo: ">=1.0.0 <2.0.0"'); }); test( @@ -194,7 +166,7 @@ 'concrete', () async { await setUpDependency( {'git': 'git://github.com/dart-lang/foo', 'version': '0.2.3'}); - await expectDependencyValidationWarning(' foo: 0.2.3'); + await expectValidationWarning(' foo: 0.2.3'); }); }); }); @@ -204,7 +176,7 @@ test('and should suggest the hosted primary version', () async { await setUpDependency({'path': path.join(d.sandbox, 'foo')}, hostedVersions: ['3.0.0-pre', '2.0.0', '1.0.0']); - await expectDependencyValidationError(' foo: ^2.0.0'); + await expectValidationError(' foo: ^2.0.0'); }); test( @@ -212,7 +184,7 @@ "it's the only version available", () async { await setUpDependency({'path': path.join(d.sandbox, 'foo')}, hostedVersions: ['3.0.0-pre', '2.0.0-pre']); - await expectDependencyValidationError(' foo: ^3.0.0-pre'); + await expectValidationError(' foo: ^3.0.0-pre'); }); test( @@ -220,7 +192,7 @@ 'pre-1.0.0', () async { await setUpDependency({'path': path.join(d.sandbox, 'foo')}, hostedVersions: ['0.0.1', '0.0.2']); - await expectDependencyValidationError(' foo: ^0.0.2'); + await expectValidationError(' foo: ^0.0.2'); }); }); @@ -230,7 +202,7 @@ 'path': path.join(d.sandbox, 'foo'), 'version': '>=1.0.0 <2.0.0' }); - await expectDependencyValidationError(' foo: ">=1.0.0 <2.0.0"'); + await expectValidationError(' foo: ">=1.0.0 <2.0.0"'); }); test( @@ -238,7 +210,7 @@ 'concrete', () async { await setUpDependency( {'path': path.join(d.sandbox, 'foo'), 'version': '0.2.3'}); - await expectDependencyValidationError(' foo: 0.2.3'); + await expectValidationError(' foo: 0.2.3'); }); }); }); @@ -250,13 +222,11 @@ d.libPubspec('test_pkg', '1.0.0', deps: {'foo': 'any'}) ]).create(); - await expectValidation(dependency, - warnings: everyElement(isNot(contains('\n foo:')))); + await expectValidationWarning(isNot(contains('\n foo:'))); }); - test( - "if the lockfile doesn't have an entry for the " - 'dependency', () async { + test("if the lockfile doesn't have an entry for the dependency", + () async { await d.dir(appPath, [ d.libPubspec('test_pkg', '1.0.0', deps: {'foo': 'any'}), d.file( @@ -275,8 +245,7 @@ })) ]).create(); - await expectValidation(dependency, - warnings: everyElement(isNot(contains('\n foo:')))); + await expectValidationWarning(isNot(contains('\n foo:'))); }); }); @@ -302,7 +271,7 @@ })) ]).create(); - await expectDependencyValidationWarning(' foo: ^1.2.3'); + await expectValidationWarning(' foo: ^1.2.3'); }); test( @@ -326,7 +295,7 @@ })) ]).create(); - await expectDependencyValidationWarning(' foo: ^0.1.2'); + await expectValidationWarning(' foo: ^0.1.2'); }); }); }); @@ -341,8 +310,7 @@ ) ]).create(); - await expectDependencyValidationWarning( - 'Packages dependent on a pre-release'); + await expectValidationWarning('Packages dependent on a pre-release'); }); test( 'with a single-version dependency and it should suggest a ' @@ -351,7 +319,7 @@ d.libPubspec('test_pkg', '1.0.0', deps: {'foo': '1.2.3'}) ]).create(); - await expectDependencyValidationWarning(' foo: ^1.2.3'); + await expectValidationWarning(' foo: ^1.2.3'); }); group('has a dependency without a lower bound', () { @@ -361,8 +329,7 @@ d.libPubspec('test_pkg', '1.0.0', deps: {'foo': '<3.0.0'}) ]).create(); - await expectValidation(dependency, - warnings: everyElement(isNot(contains('\n foo:')))); + await expectValidationWarning(isNot(contains('\n foo:'))); }); test( @@ -386,8 +353,7 @@ })) ]).create(); - await expectValidation(dependency, - warnings: everyElement(isNot(contains('\n foo:')))); + await expectValidationWarning(isNot(contains('\n foo:'))); }); }); @@ -413,7 +379,7 @@ })) ]).create(); - await expectDependencyValidationWarning(' foo: ">=1.2.3 <3.0.0"'); + await expectValidationWarning(' foo: ">=1.2.3 <3.0.0"'); }); test('and it should preserve the upper-bound operator', () async { @@ -435,7 +401,7 @@ })) ]).create(); - await expectDependencyValidationWarning(' foo: ">=1.2.3 <=3.0.0"'); + await expectValidationWarning(' foo: ">=1.2.3 <=3.0.0"'); }); test( @@ -459,7 +425,7 @@ })) ]).create(); - await expectDependencyValidationWarning(' foo: ^1.2.3'); + await expectValidationWarning(' foo: ^1.2.3'); }); }); }); @@ -471,7 +437,7 @@ d.libPubspec('test_pkg', '1.0.0', deps: {'foo': '>=1.2.3'}) ]).create(); - await expectDependencyValidationWarning(' foo: ^1.2.3'); + await expectValidationWarning(' foo: ^1.2.3'); }); test('and it should preserve the lower-bound operator', () async { @@ -479,7 +445,7 @@ d.libPubspec('test_pkg', '1.0.0', deps: {'foo': '>1.2.3'}) ]).create(); - await expectDependencyValidationWarning(' foo: ">1.2.3 <2.0.0"'); + await expectValidationWarning(' foo: ">1.2.3 <2.0.0"'); }); }); @@ -489,7 +455,7 @@ d.libPubspec('integration_pkg', '1.0.0', deps: {'foo': '^1.2.3'}) ]).create(); - await expectDependencyValidationError(' sdk: ">=1.8.0 <2.0.0"'); + await expectValidationError(' sdk: ">=1.8.0 <2.0.0"'); }); test('with a too-broad SDK constraint', () async { @@ -498,7 +464,7 @@ deps: {'foo': '^1.2.3'}, sdk: '>=1.5.0 <2.0.0') ]).create(); - await expectDependencyValidationError(' sdk: ">=1.8.0 <2.0.0"'); + await expectValidationError(' sdk: ">=1.8.0 <2.0.0"'); }); }); @@ -514,9 +480,11 @@ }) ]).create(); - await expectValidation(dependency, - errors: anyElement(contains(' sdk: ">=2.0.0 <3.0.0"')), - warnings: anyElement(contains(' foo: any'))); + await expectValidation( + error: allOf( + contains(' sdk: ">=2.0.0 <3.0.0"'), contains(' foo: any')), + exitCode: DATA, + ); }); test('with a too-broad SDK constraint', () async { @@ -535,124 +503,70 @@ sdk: '>=1.24.0 <3.0.0') ]).create(); - await expectValidation(dependency, - errors: anyElement(contains(' sdk: ">=2.0.0 <3.0.0"')), - warnings: anyElement(contains(' foo: any'))); + await expectValidation( + error: allOf([ + contains(' sdk: ">=2.0.0 <3.0.0"'), + contains(' foo: any'), + ]), + exitCode: DATA, + ); }); }); - test('has a feature dependency', () async { - await d.dir(appPath, [ - d.libPubspec('test_pkg', '1.0.0', deps: { - 'foo': { - 'version': '^1.2.3', - 'features': {'stuff': true} - } - }) - ]).create(); - - await expectDependencyValidationError( - 'Packages with package features may not be published yet.'); - }); - - test('declares a feature', () async { - await d.dir(appPath, [ - d.pubspec({ - 'name': 'test_pkg', - 'version': '1.0.0', - 'features': { - 'stuff': { - 'dependencies': {'foo': '^1.0.0'} - } - } - }) - ]).create(); - - await expectDependencyValidationError( - 'Packages with package features may not be published yet.'); - }); - test('depends on Flutter from a non-SDK source', () async { await d.dir(appPath, [ d.libPubspec('test_pkg', '1.0.0', deps: {'flutter': '>=1.2.3 <2.0.0'}) ]).create(); - await expectDependencyValidationError('sdk: >=1.2.3 <2.0.0'); + await expectValidationError('sdk: >=1.2.3 <2.0.0'); }); test('depends on a Flutter package from an unknown SDK', () async { - await d.dir(appPath, [ - d.pubspec({ - 'name': 'test_pkg', - 'version': '1.0.0', - 'dependencies': { - 'foo': {'sdk': 'fblthp', 'version': '>=1.2.3 <2.0.0'} - } - }) - ]).create(); + await package(deps: { + 'foo': {'sdk': 'fblthp', 'version': '>=1.2.3 <2.0.0'} + }).create(); - await expectDependencyValidationError( - 'Unknown SDK "fblthp" for dependency "foo".'); + await expectValidationError('Unknown SDK "fblthp" for dependency "foo".'); }); test('depends on a Flutter package with a too-broad SDK constraint', () async { - await d.dir(appPath, [ - d.pubspec({ - 'name': 'test_pkg', - 'version': '1.0.0', - 'environment': {'sdk': '>=1.18.0 <2.0.0'}, - 'dependencies': { - 'foo': {'sdk': 'flutter', 'version': '>=1.2.3 <2.0.0'} - } - }) - ]).create(); + await package( + deps: { + 'foo': {'sdk': 'flutter', 'version': '>=1.2.3 <2.0.0'} + }, + sdk: '>=1.18.0 <2.0.0', + ).create(); - await expectDependencyValidationError('sdk: ">=1.19.0 <2.0.0"'); + await expectValidationError('sdk: ">=1.19.0 <2.0.0"'); }); test('depends on a Flutter package with no SDK constraint', () async { - await d.dir(appPath, [ - d.pubspec({ - 'name': 'test_pkg', - 'version': '1.0.0', - 'dependencies': { - 'foo': {'sdk': 'flutter', 'version': '>=1.2.3 <2.0.0'} - } - }) - ]).create(); + await package(sdk: '>=0.0.0 <=0.0.1', deps: { + 'foo': {'sdk': 'flutter', 'version': '>=1.2.3 <2.0.0'} + }).create(); - await expectDependencyValidationError('sdk: ">=1.19.0 <2.0.0"'); + await expectValidationError('sdk: ">=1.19.0 <2.0.0"'); }); test('depends on a Fuchsia package with a too-broad SDK constraint', () async { - await d.dir(appPath, [ - d.pubspec({ - 'name': 'test_pkg', - 'version': '1.0.0', - 'environment': {'sdk': '>=2.0.0-dev.50.0 <2.0.0'}, - 'dependencies': { - 'foo': {'sdk': 'fuchsia', 'version': '>=1.2.3 <2.0.0'} - } - }) - ]).create(); + await package( + sdk: '>=2.0.0-dev.50.0 <2.0.0', + deps: { + 'foo': {'sdk': 'fuchsia', 'version': '>=1.2.3 <2.0.0'} + }, + ).create(); - await expectDependencyValidationError('sdk: ">=2.0.0 <3.0.0"'); + await expectValidationError('sdk: ">=2.0.0 <3.0.0"'); }); test('depends on a Fuchsia package with no SDK constraint', () async { - await d.dir(appPath, [ - d.pubspec({ - 'name': 'test_pkg', - 'version': '1.0.0', - 'dependencies': { - 'foo': {'sdk': 'fuchsia', 'version': '>=1.2.3 <2.0.0'} - } - }) - ]).create(); + await package(sdk: '>=0.0.0 <1.0.0', deps: { + 'foo': {'sdk': 'fuchsia', 'version': '>=1.2.3 <2.0.0'} + }).create(); - await expectDependencyValidationError('sdk: ">=2.0.0 <3.0.0"'); + await expectValidationError('sdk: ">=2.0.0 <3.0.0"'); }); }); }
diff --git a/test/validator/deprecated_fields_test.dart b/test/validator/deprecated_fields_test.dart index ee4551d..65f3ca0 100644 --- a/test/validator/deprecated_fields_test.dart +++ b/test/validator/deprecated_fields_test.dart
@@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -import 'package:pub/src/entrypoint.dart'; import 'package:pub/src/validator.dart'; import 'package:pub/src/validator/deprecated_fields.dart'; import 'package:test/test.dart'; @@ -11,8 +10,7 @@ import '../test_pub.dart'; import 'utils.dart'; -Validator deprecatedFields(Entrypoint entrypoint) => - DeprecatedFieldsValidator(entrypoint); +Validator deprecatedFields() => DeprecatedFieldsValidator(); void main() { setUp(d.validPackage.create);
diff --git a/test/validator/directory_test.dart b/test/validator/directory_test.dart index 7cc53b6..1ccebde 100644 --- a/test/validator/directory_test.dart +++ b/test/validator/directory_test.dart
@@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -import 'package:pub/src/entrypoint.dart'; import 'package:pub/src/validator.dart'; import 'package:pub/src/validator/directory.dart'; import 'package:test/test.dart'; @@ -11,7 +10,7 @@ import '../test_pub.dart'; import 'utils.dart'; -Validator directory(Entrypoint entrypoint) => DirectoryValidator(entrypoint); +Validator directory() => DirectoryValidator(); void main() { group('should consider a package valid if it', () { @@ -21,7 +20,19 @@ test('has a nested directory named "tools"', () async { await d.dir(appPath, [ - d.dir('foo', [d.dir('tools')]) + d.dir('foo', [ + d.dir('tools', [d.file('empty')]) + ]) + ]).create(); + await expectValidation(directory); + }); + + test('is pubignoring the folder', () async { + await d.dir(appPath, [ + d.file('.pubignore', 'tools/\n'), + d.dir('foo', [ + d.dir('tools', [d.file('empty')]) + ]) ]).create(); await expectValidation(directory); }); @@ -44,7 +55,9 @@ for (var name in names) { test('"$name"', () async { - await d.dir(appPath, [d.dir(name)]).create(); + await d.dir(appPath, [ + d.dir(name, [d.file('empty')]) + ]).create(); await expectValidation(directory, warnings: isNotEmpty); }); }
diff --git a/test/validator/executable_test.dart b/test/validator/executable_test.dart index d9973e7..c1f2a50 100644 --- a/test/validator/executable_test.dart +++ b/test/validator/executable_test.dart
@@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -import 'package:pub/src/entrypoint.dart'; import 'package:pub/src/validator.dart'; import 'package:pub/src/validator/executable.dart'; import 'package:test/test.dart'; @@ -11,7 +10,7 @@ import '../test_pub.dart'; import 'utils.dart'; -Validator executable(Entrypoint entrypoint) => ExecutableValidator(entrypoint); +Validator executable() => ExecutableValidator(); void main() { setUp(d.validPackage.create);
diff --git a/test/validator/flutter_plugin_format_test.dart b/test/validator/flutter_plugin_format_test.dart index 0c24bda..0a5b28c 100644 --- a/test/validator/flutter_plugin_format_test.dart +++ b/test/validator/flutter_plugin_format_test.dart
@@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -import 'package:pub/src/entrypoint.dart'; import 'package:pub/src/validator.dart'; import 'package:pub/src/validator/flutter_plugin_format.dart'; import 'package:test/test.dart'; @@ -11,8 +10,7 @@ import '../test_pub.dart'; import 'utils.dart'; -Validator flutterPluginFormat(Entrypoint entrypoint) => - FlutterPluginFormatValidator(entrypoint); +Validator flutterPluginFormat() => FlutterPluginFormatValidator(); void main() { group('should consider a package valid if it', () {
diff --git a/test/validator/gitignore_test.dart b/test/validator/gitignore_test.dart index f7b02fe..3cd4a11 100644 --- a/test/validator/gitignore_test.dart +++ b/test/validator/gitignore_test.dart
@@ -14,12 +14,13 @@ Future<void> expectValidation( error, int exitCode, { + Map<String, String> environment = const {}, String? workingDirectory, }) async { await runPub( error: error, args: ['publish', '--dry-run'], - environment: {'_PUB_TEST_SDK_VERSION': '2.12.0'}, + environment: {'_PUB_TEST_SDK_VERSION': '2.12.0', ...environment}, workingDirectory: workingDirectory ?? d.path(appPath), exitCode: exitCode, ); @@ -52,6 +53,20 @@ exit_codes.DATA); }); + test('should not fail on missing git', () async { + await d.git('myapp', [ + ...d.validPackage.contents, + d.file('.gitignore', '*.txt'), + d.file('foo.txt'), + ]).create(); + + await pubGet(environment: {'_PUB_TEST_SDK_VERSION': '1.12.0'}); + await setUpFakeGitScript(bash: 'echo "Not git"', batch: 'echo "Not git"'); + await expectValidation( + allOf([contains('Package has 0 warnings.')]), exit_codes.SUCCESS, + environment: extendedPathEnv()); + }); + test('Should also consider gitignores from above the package root', () async { await d.git('reporoot', [ d.dir(
diff --git a/test/validator/language_version_test.dart b/test/validator/language_version_test.dart index 39c9465..fc2351e 100644 --- a/test/validator/language_version_test.dart +++ b/test/validator/language_version_test.dart
@@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -import 'package:pub/src/entrypoint.dart'; import 'package:pub/src/validator.dart'; import 'package:pub/src/validator/language_version.dart'; import 'package:test/test.dart'; @@ -11,8 +10,7 @@ import '../test_pub.dart'; import 'utils.dart'; -Validator validator(Entrypoint entrypoint) => - LanguageVersionValidator(entrypoint); +Validator validator() => LanguageVersionValidator(); Future<void> setup( {required String sdkConstraint, String? libraryLanguageVersion}) async {
diff --git a/test/validator/leak_detection_test.dart b/test/validator/leak_detection_test.dart index c055e59..fc64c68 100644 --- a/test/validator/leak_detection_test.dart +++ b/test/validator/leak_detection_test.dart
@@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -import 'package:pub/src/entrypoint.dart'; import 'package:pub/src/validator.dart'; import 'package:pub/src/validator/leak_detection.dart'; import 'package:test/test.dart'; @@ -11,8 +10,7 @@ import '../test_pub.dart'; import 'utils.dart'; -Validator leakDetection(Entrypoint entrypoint) => - LeakDetectionValidator(entrypoint); +Validator leakDetection() => LeakDetectionValidator(); void main() { group('should consider a package valid if it', () {
diff --git a/test/validator/license_test.dart b/test/validator/license_test.dart index 8717c75..17aeec5 100644 --- a/test/validator/license_test.dart +++ b/test/validator/license_test.dart
@@ -3,7 +3,6 @@ // BSD-style license that can be found in the LICENSE file. import 'package:path/path.dart' as path; -import 'package:pub/src/entrypoint.dart'; import 'package:pub/src/io.dart'; import 'package:pub/src/validator.dart'; import 'package:pub/src/validator/license.dart'; @@ -13,7 +12,7 @@ import '../test_pub.dart'; import 'utils.dart'; -Validator license(Entrypoint entrypoint) => LicenseValidator(entrypoint); +Validator license() => LicenseValidator(); void main() { group('should consider a package valid if it', () {
diff --git a/test/validator/name_test.dart b/test/validator/name_test.dart index 7cbb51c..2e8f8af 100644 --- a/test/validator/name_test.dart +++ b/test/validator/name_test.dart
@@ -3,7 +3,6 @@ // BSD-style license that can be found in the LICENSE file. import 'package:path/path.dart' as path; -import 'package:pub/src/entrypoint.dart'; import 'package:pub/src/io.dart'; import 'package:pub/src/validator.dart'; import 'package:pub/src/validator/name.dart'; @@ -13,7 +12,7 @@ import '../test_pub.dart'; import 'utils.dart'; -Validator name(Entrypoint entrypoint) => NameValidator(entrypoint); +Validator name() => NameValidator(); void main() { group('should consider a package valid if it', () {
diff --git a/test/validator/pubspec_field_test.dart b/test/validator/pubspec_field_test.dart index 8e6fefc..43db483 100644 --- a/test/validator/pubspec_field_test.dart +++ b/test/validator/pubspec_field_test.dart
@@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -import 'package:pub/src/entrypoint.dart'; import 'package:pub/src/validator.dart'; import 'package:pub/src/validator/pubspec_field.dart'; import 'package:test/test.dart'; @@ -11,8 +10,7 @@ import '../test_pub.dart'; import 'utils.dart'; -Validator pubspecField(Entrypoint entrypoint) => - PubspecFieldValidator(entrypoint); +Validator pubspecField() => PubspecFieldValidator(); void main() { group('should consider a package valid if it', () {
diff --git a/test/validator/pubspec_test.dart b/test/validator/pubspec_test.dart index 9942220..ea0f5af 100644 --- a/test/validator/pubspec_test.dart +++ b/test/validator/pubspec_test.dart
@@ -13,7 +13,7 @@ test('should consider a package valid if it has a pubspec', () async { await d.validPackage.create(); - await expectValidation((entrypoint) => PubspecValidator(entrypoint)); + await expectValidation(() => PubspecValidator()); }); test('should consider a package invalid if it has a .gitignored pubspec', @@ -22,7 +22,6 @@ await d.validPackage.create(); await repo.create(); - await expectValidation((entrypoint) => PubspecValidator(entrypoint), - errors: isNotEmpty); + await expectValidation(() => PubspecValidator(), errors: isNotEmpty); }); }
diff --git a/test/validator/pubspec_typo_test.dart b/test/validator/pubspec_typo_test.dart index ab2a547..bbe7db3 100644 --- a/test/validator/pubspec_typo_test.dart +++ b/test/validator/pubspec_typo_test.dart
@@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -import 'package:pub/src/entrypoint.dart'; import 'package:pub/src/validator.dart'; import 'package:pub/src/validator/pubspec_typo.dart'; import 'package:test/test.dart'; @@ -11,8 +10,7 @@ import '../test_pub.dart'; import 'utils.dart'; -Validator pubspecTypo(Entrypoint entrypoint) => - PubspecTypoValidator(entrypoint); +Validator pubspecTypo() => PubspecTypoValidator(); void main() { group('should consider a package valid if it', () {
diff --git a/test/validator/readme_test.dart b/test/validator/readme_test.dart index 6519c6f..17b0602 100644 --- a/test/validator/readme_test.dart +++ b/test/validator/readme_test.dart
@@ -3,7 +3,6 @@ // BSD-style license that can be found in the LICENSE file. import 'package:path/path.dart' as p; -import 'package:pub/src/entrypoint.dart'; import 'package:pub/src/io.dart'; import 'package:pub/src/validator.dart'; import 'package:pub/src/validator/readme.dart'; @@ -13,7 +12,7 @@ import '../test_pub.dart'; import 'utils.dart'; -Validator readme(Entrypoint entrypoint) => ReadmeValidator(entrypoint); +Validator readme() => ReadmeValidator(); void main() { setUp(d.validPackage.create);
diff --git a/test/validator/relative_version_numbering_test.dart b/test/validator/relative_version_numbering_test.dart index 2f3da5c..9a8bca0 100644 --- a/test/validator/relative_version_numbering_test.dart +++ b/test/validator/relative_version_numbering_test.dart
@@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -import 'package:pub/src/entrypoint.dart'; import 'package:pub/src/validator.dart'; import 'package:pub/src/validator/relative_version_numbering.dart'; import 'package:test/test.dart'; @@ -11,10 +10,7 @@ import '../test_pub.dart'; import 'utils.dart'; -Validator validator(Entrypoint entrypoint) => RelativeVersionNumberingValidator( - entrypoint, - Uri.parse(globalServer.url), - ); +Validator validator() => RelativeVersionNumberingValidator(); Future<void> setup({required String sdkConstraint}) async { await d.validPackage.create();
diff --git a/test/validator/sdk_constraint_test.dart b/test/validator/sdk_constraint_test.dart index c24adf2..7e82aef 100644 --- a/test/validator/sdk_constraint_test.dart +++ b/test/validator/sdk_constraint_test.dart
@@ -2,7 +2,6 @@ // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. -import 'package:pub/src/entrypoint.dart'; import 'package:pub/src/validator.dart'; import 'package:pub/src/validator/sdk_constraint.dart'; import 'package:test/test.dart'; @@ -11,8 +10,7 @@ import '../test_pub.dart'; import 'utils.dart'; -Validator sdkConstraint(Entrypoint entrypoint) => - SdkConstraintValidator(entrypoint); +Validator sdkConstraint() => SdkConstraintValidator(); void main() { group('should consider a package valid if it', () {
diff --git a/test/validator/size_test.dart b/test/validator/size_test.dart index 3a22e60..ba5151d 100644 --- a/test/validator/size_test.dart +++ b/test/validator/size_test.dart
@@ -11,13 +11,10 @@ import '../test_pub.dart'; import 'utils.dart'; -ValidatorCreator size(int size) { - return (entrypoint) => SizeValidator(entrypoint, Future.value(size)); -} - Future<void> expectSizeValidationError(Matcher matcher) async { await expectValidation( - size(100 * 1048577 /*2^20 +1*/), + () => SizeValidator(), + size: 100 * (1 << 20) + 1, errors: contains(matcher), ); } @@ -26,8 +23,8 @@ test('considers a package valid if it is <= 100 MB', () async { await d.validPackage.create(); - await expectValidation(size(100)); - await expectValidation(size(100 * 1048576 /*2^20*/)); + await expectValidation(() => SizeValidator(), size: 100); + await expectValidation(() => SizeValidator(), size: 100 * (1 << 20)); }); group('considers a package invalid if it is more than 100 MB', () {
diff --git a/test/validator/strict_dependencies_test.dart b/test/validator/strict_dependencies_test.dart index 7b7962a..a6b0833 100644 --- a/test/validator/strict_dependencies_test.dart +++ b/test/validator/strict_dependencies_test.dart
@@ -3,7 +3,6 @@ // BSD-style license that can be found in the LICENSE file. import 'package:path/path.dart' as path; -import 'package:pub/src/entrypoint.dart'; import 'package:pub/src/validator.dart'; import 'package:pub/src/validator/strict_dependencies.dart'; import 'package:test/test.dart'; @@ -12,8 +11,7 @@ import '../test_pub.dart'; import 'utils.dart'; -Validator strictDeps(Entrypoint entrypoint) => - StrictDependenciesValidator(entrypoint); +Validator strictDeps() => StrictDependenciesValidator(); void main() { group('should consider a package valid if it', () {
diff --git a/test/validator/utils.dart b/test/validator/utils.dart index 5123683..8a1c0e2 100644 --- a/test/validator/utils.dart +++ b/test/validator/utils.dart
@@ -10,8 +10,8 @@ // That would make them more robust, and test actual end2end behaviour. Future<void> expectValidation(ValidatorCreator fn, - {hints, warnings, errors}) async { - final validator = await validatePackage(fn); + {hints, warnings, errors, int? size}) async { + final validator = await validatePackage(fn, size); expect(validator.errors, errors ?? isEmpty); expect(validator.warnings, warnings ?? isEmpty); expect(validator.hints, hints ?? isEmpty);
diff --git a/test/version_solver_test.dart b/test/version_solver_test.dart index d36a16f..25212a7 100644 --- a/test/version_solver_test.dart +++ b/test/version_solver_test.dart
@@ -9,7 +9,7 @@ import 'package:pub/src/lock_file.dart'; import 'package:pub/src/pubspec.dart'; import 'package:pub/src/source/hosted.dart'; -import 'package:pub/src/source_registry.dart'; +import 'package:pub/src/system_cache.dart'; import 'package:test/test.dart'; import 'descriptor.dart' as d; @@ -1926,6 +1926,44 @@ await expectResolves(result: {'foo': '1.2.3', 'bar': '0.0.1'}); }); + + test('overrides in pubspec_overrides.yaml', () async { + await servePackages() + ..serve('a', '1.0.0') + ..serve('a', '2.0.0'); + + await d.dir(appPath, [ + d.pubspec({ + 'name': 'myapp', + 'dependencies': {'a': '1.0.0'}, + }), + d.pubspecOverrides({ + 'dependency_overrides': {'a': '2.0.0'} + }), + ]).create(); + + await expectResolves(result: {'a': '2.0.0'}); + }); + + test('pubspec_overrides.yaml takes precedence over pubspec.yaml', () async { + await servePackages() + ..serve('a', '1.0.0') + ..serve('a', '2.0.0') + ..serve('a', '3.0.0'); + + await d.dir(appPath, [ + d.pubspec({ + 'name': 'myapp', + 'dependencies': {'a': '1.0.0'}, + 'dependency_overrides': {'a': '2.0.0'} + }), + d.pubspecOverrides({ + 'dependency_overrides': {'a': '3.0.0'} + }), + ]).create(); + + await expectResolves(result: {'a': '3.0.0'}); + }); } void downgrade() { @@ -2859,7 +2897,8 @@ if (result == null) return; - var registry = SourceRegistry(); + var cache = SystemCache(); + var registry = cache.sources; var lockFile = LockFile.load(p.join(d.sandbox, appPath, 'pubspec.lock'), registry); var resultPubspec = Pubspec.fromMap({'dependencies': result}, registry); @@ -2868,13 +2907,13 @@ for (var dep in resultPubspec.dependencies.values) { expect(ids, contains(dep.name)); var id = ids.remove(dep.name); - final source = dep.source; - - if (source is HostedSource && (dep.description.uri == source.defaultUrl)) { + final description = dep.description; + if (description is HostedDescription && + (description.url == SystemCache().hosted.defaultUrl)) { // If the dep uses the default hosted source, grab it from the test // package server rather than pub.dartlang.org. - dep = registry.hosted - .refFor(dep.name, url: Uri.parse(globalServer.url)) + dep = cache.hosted + .refFor(dep.name, url: globalServer.url) .withConstraint(dep.constraint); } expect(dep.allows(id), isTrue, reason: 'Expected $id to match $dep.');
diff --git a/tool/test.dart b/tool/test.dart index e7877a2..7d98156 100755 --- a/tool/test.dart +++ b/tool/test.dart
@@ -29,7 +29,7 @@ await precompile( executablePath: path.join('bin', 'pub.dart'), outputPath: pubSnapshotFilename, - incrementalDillOutputPath: pubSnapshotIncrementalFilename, + incrementalDillPath: pubSnapshotIncrementalFilename, name: 'bin/pub.dart', packageConfigPath: path.join('.dart_tool', 'package_config.json')); testProcess = await Process.start(