Migrate to language 3.7, reformat, upgrade (#4525)
diff --git a/analysis_options.yaml b/analysis_options.yaml
index 5e2f708..108b5ff 100644
--- a/analysis_options.yaml
+++ b/analysis_options.yaml
@@ -24,7 +24,6 @@
- no_runtimeType_toString
- prefer_const_declarations
- prefer_final_locals
- - require_trailing_commas
- unawaited_futures
- unreachable_from_main
- use_enums
diff --git a/bin/dependency_services.dart b/bin/dependency_services.dart
index a39b5ff..34f6eb4 100644
--- a/bin/dependency_services.dart
+++ b/bin/dependency_services.dart
@@ -44,11 +44,11 @@
}
_DependencyServicesCommandRunner()
- : super(
- 'dependency_services',
- 'Support for automatic upgrades',
- usageLineLength: lineLength,
- ) {
+ : super(
+ 'dependency_services',
+ 'Support for automatic upgrades',
+ usageLineLength: lineLength,
+ ) {
argParser.addFlag(
'verbose',
abbr: 'v',
diff --git a/lib/src/ascii_tree.dart b/lib/src/ascii_tree.dart
index 7927244..0b26869 100644
--- a/lib/src/ascii_tree.dart
+++ b/lib/src/ascii_tree.dart
@@ -82,8 +82,9 @@
}
var directory = root;
for (var part in parts) {
- directory = directory.putIfAbsent(part, () => <String, Map>{})
- as Map<String, Map>;
+ directory =
+ directory.putIfAbsent(part, () => <String, Map>{})
+ as Map<String, Map>;
}
}
diff --git a/lib/src/authentication/client.dart b/lib/src/authentication/client.dart
index 5d8fdaa..1a8823b 100644
--- a/lib/src/authentication/client.dart
+++ b/lib/src/authentication/client.dart
@@ -69,8 +69,9 @@
if (response.headers.containsKey(HttpHeaders.wwwAuthenticateHeader)) {
try {
final header = response.headers[HttpHeaders.wwwAuthenticateHeader]!;
- final challenge =
- AuthenticationChallenge.parseHeader(header).firstWhereOrNull(
+ final challenge = AuthenticationChallenge.parseHeader(
+ header,
+ ).firstWhereOrNull(
(challenge) =>
challenge.scheme == 'bearer' &&
challenge.parameters['realm'] == 'pub' &&
diff --git a/lib/src/authentication/credential.dart b/lib/src/authentication/credential.dart
index a69beef..9526ea6 100644
--- a/lib/src/authentication/credential.dart
+++ b/lib/src/authentication/credential.dart
@@ -46,14 +46,14 @@
/// Create credential that stores clear text token.
Credential.token(this.url, this.token)
- : env = null,
- unknownFields = const <String, dynamic>{};
+ : env = null,
+ unknownFields = const <String, dynamic>{};
/// Create credential that stores environment variable name that stores token
/// value.
Credential.env(this.url, this.env)
- : token = null,
- unknownFields = const <String, dynamic>{};
+ : token = null,
+ unknownFields = const <String, dynamic>{};
/// Deserialize [json] into [Credential] type.
///
@@ -146,8 +146,10 @@
tokenValue = token!;
}
if (!isValidBearerToken(tokenValue)) {
- dataError('Credential token for $url is not a valid Bearer token. '
- 'It should match `^[a-zA-Z0-9._~+/=-]+\$`');
+ dataError(
+ 'Credential token for $url is not a valid Bearer token. '
+ 'It should match `^[a-zA-Z0-9._~+/=-]+\$`',
+ );
}
return Future.value('Bearer $tokenValue');
diff --git a/lib/src/command.dart b/lib/src/command.dart
index a703522..976ce53 100644
--- a/lib/src/command.dart
+++ b/lib/src/command.dart
@@ -160,9 +160,10 @@
/// Short description of how the arguments should be provided in `invocation`.
///
/// Override for giving a more detailed description.
- String get argumentsDescription => subcommands.isEmpty
- ? '<subcommand> [arguments...]'
- : (takesArguments ? '[arguments...]' : '');
+ String get argumentsDescription =>
+ subcommands.isEmpty
+ ? '<subcommand> [arguments...]'
+ : (takesArguments ? '[arguments...]' : '');
/// If not `null` this overrides the default exit-code [exit_codes.SUCCESS]
/// when exiting successfully.
@@ -316,17 +317,20 @@
if (!argResults.wasParsed('color')) {
forceColors = ForceColorOption.auto;
} else {
- forceColors = argResults.flag('color')
- ? ForceColorOption.always
- : ForceColorOption.never;
+ forceColors =
+ argResults.flag('color')
+ ? ForceColorOption.always
+ : ForceColorOption.never;
}
}
static void _computeCommand(ArgResults argResults) {
final list = <String?>[];
- for (var command = argResults.command;
- command != null;
- command = command.command) {
+ for (
+ var command = argResults.command;
+ command != null;
+ command = command.command
+ ) {
var commandName = command.name;
if (list.isEmpty) {
@@ -356,7 +360,8 @@
static void addColorFlag(ArgParser argParser) {
argParser.addFlag(
'color',
- help: 'Use colors in terminal output.\n'
+ help:
+ 'Use colors in terminal output.\n'
'Defaults to color when connected to a '
'terminal, and no-color otherwise.',
);
diff --git a/lib/src/command/add.dart b/lib/src/command/add.dart
index d080f42..31d4a11 100644
--- a/lib/src/command/add.dart
+++ b/lib/src/command/add.dart
@@ -106,11 +106,7 @@
// To avoid breaking changes we keep supporting them, but hide them from
// --help to discourage further use. Combining these with new syntax will
// fail.
- argParser.addOption(
- 'git-url',
- help: 'Git URL of the package',
- hide: true,
- );
+ argParser.addOption('git-url', help: 'Git URL of the package', hide: true);
argParser.addOption(
'git-ref',
help: 'Git branch or commit to be retrieved',
@@ -218,8 +214,10 @@
);
} on GitException {
final name = updates.first.ref.name;
- dataError('Unable to resolve package "$name" with the given '
- 'git parameters.');
+ dataError(
+ 'Unable to resolve package "$name" with the given '
+ 'git parameters.',
+ );
} on SolveFailure catch (e) {
dataError(e.message);
} on WrappedException catch (e) {
@@ -231,8 +229,9 @@
for (final update in updates) {
final ref = update.ref;
final name = ref.name;
- final resultPackage = solveResult.packages
- .firstWhere((packageId) => packageId.name == name);
+ final resultPackage = solveResult.packages.firstWhere(
+ (packageId) => packageId.name == name,
+ );
/// Assert that [resultPackage] is within the original user's
/// expectations.
@@ -240,9 +239,11 @@
if (constraint != null && !constraint.allows(resultPackage.version)) {
final dependencyOverrides = resolutionPubspec.dependencyOverrides;
if (dependencyOverrides.isNotEmpty) {
- dataError('"$name" resolved to "${resultPackage.version}" which '
- 'does not satisfy constraint "$constraint". This could be '
- 'caused by "dependency_overrides".');
+ dataError(
+ '"$name" resolved to "${resultPackage.version}" which '
+ 'does not satisfy constraint "$constraint". This could be '
+ 'caused by "dependency_overrides".',
+ );
}
}
}
@@ -294,8 +295,10 @@
}
if (isOffline) {
- log.warning('Warning: Packages added when offline may not resolve to '
- 'the latest compatible version available.');
+ log.warning(
+ 'Warning: Packages added when offline may not resolve to '
+ 'the latest compatible version available.',
+ );
}
}
@@ -311,17 +314,21 @@
final dependencyOverrides = [...original.dependencyOverrides.values];
final dependencyNames = dependencies.map((dependency) => dependency.name);
- final devDependencyNames =
- devDependencies.map((devDependency) => devDependency.name);
- final range =
- package.ref.withConstraint(package.constraint ?? VersionConstraint.any);
+ final devDependencyNames = devDependencies.map(
+ (devDependency) => devDependency.name,
+ );
+ final range = package.ref.withConstraint(
+ package.constraint ?? VersionConstraint.any,
+ );
if (package.isOverride) {
dependencyOverrides.add(range);
} else if (package.isDev) {
if (devDependencyNames.contains(name)) {
- log.message('"$name" is already in "dev_dependencies". '
- 'Will try to update the constraint.');
+ log.message(
+ '"$name" is already in "dev_dependencies". '
+ 'Will try to update the constraint.',
+ );
devDependencies.removeWhere((element) => element.name == name);
}
@@ -330,9 +337,11 @@
/// remove the package from dependencies, since it might cause the user's
/// code to break.
if (dependencyNames.contains(name)) {
- dataError('"$name" is already in "dependencies". '
- 'Use "pub remove $name" to remove it before adding it '
- 'to "dev_dependencies"');
+ dataError(
+ '"$name" is already in "dependencies". '
+ 'Use "pub remove $name" to remove it before adding it '
+ 'to "dev_dependencies"',
+ );
}
devDependencies.add(range);
@@ -349,8 +358,10 @@
/// dependencies, we remove the package from dev_dependencies, since it is
/// now redundant.
if (devDependencyNames.contains(name)) {
- log.message('"$name" was found in dev_dependencies. '
- 'Removing "$name" and adding it to dependencies instead.');
+ log.message(
+ '"$name" was found in dev_dependencies. '
+ 'Removing "$name" and adding it to dependencies instead.',
+ );
devDependencies.removeWhere((element) => element.name == name);
}
@@ -394,9 +405,10 @@
'The only allowed prefixes are "dev:" and "override:"',
);
} else {
- final packageName = match2.namedGroup('descriptor') == null
- ? match2.namedGroup('prefix')
- : match2.namedGroup('name');
+ final packageName =
+ match2.namedGroup('descriptor') == null
+ ? match2.namedGroup('prefix')
+ : match2.namedGroup('name');
usageException('Not a valid package name: "$packageName"');
}
}
@@ -481,16 +493,18 @@
['sdk'],
];
- for (final flag
- in conflictingFlagSets.expand((s) => s).where(argResults.wasParsed)) {
+ for (final flag in conflictingFlagSets
+ .expand((s) => s)
+ .where(argResults.wasParsed)) {
final conflictingFlag = conflictingFlagSets
.where((s) => !s.contains(flag))
.expand((s) => s)
.firstWhereOrNull(argResults.wasParsed);
if (conflictingFlag != null) {
usageException(
- 'Packages can only have one source, "pub add" flags "--$flag" and '
- '"--$conflictingFlag" are conflicting.');
+ 'Packages can only have one source, "pub add" flags "--$flag" and '
+ '"--$conflictingFlag" are conflicting.',
+ );
}
}
@@ -632,12 +646,8 @@
try {
dummyPubspec = Pubspec.fromMap(
{
- 'dependencies': {
- packageName: parsedDescriptor,
- },
- 'environment': {
- 'sdk': sdk.version.toString(),
- },
+ 'dependencies': {packageName: parsedDescriptor},
+ 'environment': {'sdk': sdk.version.toString()},
},
cache.sources,
// Resolve relative paths relative to current, not where the
@@ -654,7 +664,8 @@
} else {
ref = range.toRef();
}
- final hasExplicitConstraint = parsedDescriptor is String ||
+ final hasExplicitConstraint =
+ parsedDescriptor is String ||
(parsedDescriptor is Map &&
parsedDescriptor.containsKey('version'));
// If the descriptor has an explicit constraint, use that. Otherwise we
@@ -682,15 +693,17 @@
List<PackageId> resultPackages,
List<_ParseResult> updates,
) {
- final yamlEditor =
- YamlEditor(readTextFile(entrypoint.workPackage.pubspecPath));
+ final yamlEditor = YamlEditor(
+ readTextFile(entrypoint.workPackage.pubspecPath),
+ );
log.io('Reading ${entrypoint.workPackage.pubspecPath}.');
log.fine('Contents:\n$yamlEditor');
for (final update in updates) {
- final dependencyKey = update.isDev
- ? 'dev_dependencies'
- : (update.isOverride ? 'dependency_overrides' : 'dependencies');
+ final dependencyKey =
+ update.isDev
+ ? 'dev_dependencies'
+ : (update.isOverride ? 'dependency_overrides' : 'dependencies');
final constraint = update.constraint;
final ref = update.ref;
final name = ref.name;
@@ -707,19 +720,17 @@
entrypoint.workPackage,
);
- if (yamlEditor.parseAt(
- [dependencyKey],
- orElse: () => YamlScalar.wrap(null),
- ).value ==
+ if (yamlEditor.parseAt([
+ dependencyKey,
+ ], orElse: () => YamlScalar.wrap(null)).value ==
null) {
// Handle the case where [dependencyKey] does not already exist.
// We ensure it is in Block-style by default.
yamlEditor.update(
[dependencyKey],
- wrapAsYamlNode(
- {name: description},
- collectionStyle: CollectionStyle.BLOCK,
- ),
+ wrapAsYamlNode({
+ name: description,
+ }, collectionStyle: CollectionStyle.BLOCK),
);
} else {
final packagePath = [dependencyKey, name];
@@ -731,8 +742,9 @@
/// dependencies. Refer to [_addPackageToPubspec] for additional
/// discussion.
if (!update.isDev && !update.isOverride) {
- final devDependenciesNode = yamlEditor
- .parseAt(['dev_dependencies'], orElse: () => YamlScalar.wrap(null));
+ final devDependenciesNode = yamlEditor.parseAt([
+ 'dev_dependencies',
+ ], orElse: () => YamlScalar.wrap(null));
if (devDependenciesNode is YamlMap &&
devDependenciesNode.containsKey(name)) {
diff --git a/lib/src/command/bump.dart b/lib/src/command/bump.dart
index 906ca2b..9f55427 100644
--- a/lib/src/command/bump.dart
+++ b/lib/src/command/bump.dart
@@ -30,8 +30,9 @@
}
String? _versionLines(YamlMap map, String text, String prefix) {
- final entry = map.nodes.entries
- .firstWhereOrNull((e) => (e.key as YamlNode).value == 'version');
+ final entry = map.nodes.entries.firstWhereOrNull(
+ (e) => (e.key as YamlNode).value == 'version',
+ );
if (entry == null) return null;
final firstLine = (entry.key as YamlNode).span.start.line;
@@ -50,8 +51,9 @@
final newVersion = updateVersion(currentVersion);
- final originalPubspecText =
- readTextFile(entrypoint.workPackage.pubspecPath);
+ final originalPubspecText = readTextFile(
+ entrypoint.workPackage.pubspecPath,
+ );
final yamlEditor = YamlEditor(originalPubspecText);
yamlEditor.update(['version'], newVersion.toString());
final updatedPubspecText = yamlEditor.toString();
@@ -81,10 +83,7 @@
log.message(afterText);
log.message('\nRemember to update `CHANGELOG.md` before publishing.');
}
- writeTextFile(
- entrypoint.workPackage.pubspecPath,
- yamlEditor.toString(),
- );
+ writeTextFile(entrypoint.workPackage.pubspecPath, yamlEditor.toString());
}
}
}
diff --git a/lib/src/command/cache.dart b/lib/src/command/cache.dart
index 43e8c5a..4a370f7 100644
--- a/lib/src/command/cache.dart
+++ b/lib/src/command/cache.dart
@@ -23,8 +23,6 @@
addSubcommand(CacheListCommand());
addSubcommand(CacheCleanCommand());
addSubcommand(CacheRepairCommand());
- addSubcommand(
- CachePreloadCommand(),
- );
+ addSubcommand(CachePreloadCommand());
}
}
diff --git a/lib/src/command/cache_add.dart b/lib/src/command/cache_add.dart
index 693631f..635be80 100644
--- a/lib/src/command/cache_add.dart
+++ b/lib/src/command/cache_add.dart
@@ -64,9 +64,10 @@
final source = cache.hosted;
// TODO(rnystrom): Allow specifying the server.
- final ids = (await cache.getVersions(source.refFor(package)))
- .where((id) => constraint.allows(id.version))
- .toList();
+ final ids =
+ (await cache.getVersions(
+ source.refFor(package),
+ )).where((id) => constraint.allows(id.version)).toList();
if (ids.isEmpty) {
// TODO(rnystrom): Show most recent unmatching version?
diff --git a/lib/src/command/cache_clean.dart b/lib/src/command/cache_clean.dart
index 3d1b9d2..8dbd0cc 100644
--- a/lib/src/command/cache_clean.dart
+++ b/lib/src/command/cache_clean.dart
@@ -27,7 +27,8 @@
@override
Future<void> runProtected() async {
if (dirExists(cache.rootDir)) {
- if (argResults.flag('force') || await confirm('''
+ if (argResults.flag('force') ||
+ await confirm('''
This will remove everything inside ${cache.rootDir}.
You will have to run `$topLevelProgram pub get` again in each project.
Are you sure?''')) {
diff --git a/lib/src/command/cache_repair.dart b/lib/src/command/cache_repair.dart
index 35d0840..e5ac233 100644
--- a/lib/src/command/cache_repair.dart
+++ b/lib/src/command/cache_repair.dart
@@ -27,11 +27,11 @@
cache.deleteTempDir();
// Repair every cached source.
final repairResults = (await Future.wait(
- <CachedSource>[cache.hosted, cache.git].map(
- (source) => source.repairCachedPackages(cache),
- ),
- ))
- .expand((x) => x);
+ <CachedSource>[
+ cache.hosted,
+ cache.git,
+ ].map((source) => source.repairCachedPackages(cache)),
+ )).expand((x) => x);
final successes = repairResults.where((result) => result.success);
final failures = repairResults.where((result) => !result.success);
diff --git a/lib/src/command/dependency_services.dart b/lib/src/command/dependency_services.dart
index 9ae3aee..fddbb1c 100644
--- a/lib/src/command/dependency_services.dart
+++ b/lib/src/command/dependency_services.dart
@@ -38,7 +38,8 @@
@override
String get name => 'report';
@override
- String get description => 'Output a machine-digestible '
+ String get description =>
+ 'Output a machine-digestible '
'report of the upgrade options for each dependency.';
@override
String get argumentsDescription => '[options]';
@@ -59,16 +60,18 @@
Future<void> runProtected() async {
_checkAtRoot(entrypoint);
final stdinString = await utf8.decodeStream(stdin);
- final input = json.decode(stdinString.isEmpty ? '{}' : stdinString)
- as Map<String, Object?>;
+ final input =
+ json.decode(stdinString.isEmpty ? '{}' : stdinString)
+ as Map<String, Object?>;
final additionalConstraints = _parseDisallowed(input, cache);
final targetPackageName = input['target'];
if (targetPackageName is! String?) {
throw const FormatException('"target" should be a String.');
}
- final compatibleWorkspace = entrypoint.workspaceRoot
- .transformWorkspace((p) => stripDependencyOverrides(p.pubspec));
+ final compatibleWorkspace = entrypoint.workspaceRoot.transformWorkspace(
+ (p) => stripDependencyOverrides(p.pubspec),
+ );
final breakingWorkspace = compatibleWorkspace.transformWorkspace(
(p) => stripVersionBounds(p.pubspec),
@@ -94,30 +97,34 @@
final targetPackage =
targetPackageName == null ? null : currentPackages[targetPackageName];
- for (final package in targetPackage == null
- ? currentPackages.values
- : <PackageId>[targetPackage]) {
- final compatibleVersion = compatiblePackagesResult
- ?.firstWhereOrNull((element) => element.name == package.name);
- final multiBreakingVersion = breakingPackagesResult
- ?.firstWhereOrNull((element) => element.name == package.name);
+ for (final package
+ in targetPackage == null
+ ? currentPackages.values
+ : <PackageId>[targetPackage]) {
+ final compatibleVersion = compatiblePackagesResult?.firstWhereOrNull(
+ (element) => element.name == package.name,
+ );
+ final multiBreakingVersion = breakingPackagesResult?.firstWhereOrNull(
+ (element) => element.name == package.name,
+ );
final kind = _kindString(compatibleWorkspace, package.name);
PackageId? singleBreakingVersion;
if (kind != 'transitive') {
- final singleBreakingWorkspace = compatibleWorkspace.transformWorkspace(
- (p) {
- final r = stripVersionBounds(p.pubspec, stripOnly: [package.name]);
- return r;
- },
- );
+ final singleBreakingWorkspace = compatibleWorkspace.transformWorkspace((
+ p,
+ ) {
+ final r = stripVersionBounds(p.pubspec, stripOnly: [package.name]);
+ return r;
+ });
final singleBreakingPackagesResult = await _tryResolve(
singleBreakingWorkspace,
cache,
);
- singleBreakingVersion = singleBreakingPackagesResult
- ?.firstWhereOrNull((element) => element.name == package.name);
+ singleBreakingVersion = singleBreakingPackagesResult?.firstWhereOrNull(
+ (element) => element.name == package.name,
+ );
}
PackageId? smallestUpgrade;
if (additionalConstraints.any(
@@ -138,8 +145,9 @@
additionalConstraints: additionalConstraints,
);
- smallestUpgrade = smallestUpgradeResult
- ?.firstWhereOrNull((element) => element.name == package.name);
+ smallestUpgrade = smallestUpgradeResult?.firstWhereOrNull(
+ (element) => element.name == package.name,
+ );
}
Future<List<Object>> computeUpgradeSet(
@@ -163,26 +171,33 @@
'kind': kind,
'source': _source(package, containingDir: directory),
'latest':
- (await cache.getLatest(package.toRef(), version: package.version))
- ?.versionOrHash(),
- 'constraint': _constraintIntersection(compatibleWorkspace, package.name)
- ?.toString(),
+ (await cache.getLatest(
+ package.toRef(),
+ version: package.version,
+ ))?.versionOrHash(),
+ 'constraint':
+ _constraintIntersection(
+ compatibleWorkspace,
+ package.name,
+ )?.toString(),
'compatible': await computeUpgradeSet(
compatibleVersion,
_UpgradeType.compatible,
),
- 'singleBreaking': kind != 'transitive' && singleBreakingVersion == null
- ? <Object>[]
- : await computeUpgradeSet(
- singleBreakingVersion,
- _UpgradeType.singleBreaking,
- ),
- 'multiBreaking': kind != 'transitive' && multiBreakingVersion != null
- ? await computeUpgradeSet(
- multiBreakingVersion,
- _UpgradeType.multiBreaking,
- )
- : <Object>[],
+ 'singleBreaking':
+ kind != 'transitive' && singleBreakingVersion == null
+ ? <Object>[]
+ : await computeUpgradeSet(
+ singleBreakingVersion,
+ _UpgradeType.singleBreaking,
+ ),
+ 'multiBreaking':
+ kind != 'transitive' && multiBreakingVersion != null
+ ? await computeUpgradeSet(
+ multiBreakingVersion,
+ _UpgradeType.multiBreaking,
+ )
+ : <Object>[],
if (smallestUpgrade != null)
'smallestUpdate': await computeUpgradeSet(
smallestUpgrade,
@@ -217,13 +232,11 @@
@override
Future<void> runProtected() async {
_checkAtRoot(entrypoint);
- final currentPackages = fileExists(entrypoint.lockFilePath)
- ? entrypoint.lockFile.packages.values.toList()
- : (await _tryResolve(
- entrypoint.workspaceRoot,
- cache,
- ) ??
- <PackageId>[]);
+ final currentPackages =
+ fileExists(entrypoint.lockFilePath)
+ ? entrypoint.lockFile.packages.values.toList()
+ : (await _tryResolve(entrypoint.workspaceRoot, cache) ??
+ <PackageId>[]);
final dependencies = <Object>[];
final result = <String, Object>{'dependencies': dependencies};
@@ -234,8 +247,10 @@
'version': package.versionOrHash(),
'kind': _kindString(entrypoint.workspaceRoot, package.name),
'constraint':
- _constraintIntersection(entrypoint.workspaceRoot, package.name)
- ?.toString(),
+ _constraintIntersection(
+ entrypoint.workspaceRoot,
+ package.name,
+ )?.toString(),
'source': _source(package, containingDir: directory),
});
}
@@ -323,9 +338,10 @@
final targetConstraint = p.constraint;
final targetPackage = p.name;
final targetVersion = p.version;
- late final section = pubspec.dependencies[targetPackage] != null
- ? 'dependencies'
- : pubspec.devDependencies[targetPackage] != null
+ late final section =
+ pubspec.dependencies[targetPackage] != null
+ ? 'dependencies'
+ : pubspec.devDependencies[targetPackage] != null
? 'dev_dependencies'
: null;
if (section != null) {
@@ -333,15 +349,16 @@
final packageConfig =
pubspecEditor.parseAt([section, targetPackage]).value;
if (packageConfig == null || packageConfig is String) {
- pubspecEditor.update(
- [section, targetPackage],
- targetConstraint.toString(),
- );
+ pubspecEditor.update([
+ section,
+ targetPackage,
+ ], targetConstraint.toString());
} else if (packageConfig is Map) {
- pubspecEditor.update(
- [section, targetPackage, 'version'],
- targetConstraint.toString(),
- );
+ pubspecEditor.update([
+ section,
+ targetPackage,
+ 'version',
+ ], targetConstraint.toString());
} else {
fail(
'The dependency $targetPackage does not have a '
@@ -351,19 +368,20 @@
} else if (targetVersion != null) {
final constraint = _constraintOf(pubspec, targetPackage);
if (constraint != null && !constraint.allows(targetVersion)) {
- pubspecEditor.update(
- [section, targetPackage],
- VersionConstraint.compatibleWith(targetVersion).toString(),
- );
+ pubspecEditor.update([
+ section,
+ targetPackage,
+ ], VersionConstraint.compatibleWith(targetVersion).toString());
}
}
}
updatedPubspecs[package.dir] = pubspecEditor;
}
}
- final lockFile = fileExists(entrypoint.lockFilePath)
- ? readTextFile(entrypoint.lockFilePath)
- : null;
+ final lockFile =
+ fileExists(entrypoint.lockFilePath)
+ ? readTextFile(entrypoint.lockFilePath)
+ : null;
final lockFileYaml = lockFile == null ? null : loadYaml(lockFile);
final lockFileEditor = lockFile == null ? null : YamlEditor(lockFile);
@@ -380,19 +398,28 @@
}
if (targetVersion != null &&
(lockFileYaml['packages'] as Map).containsKey(targetPackage)) {
- lockFileEditor.update(
- ['packages', targetPackage, 'version'],
- targetVersion.toString(),
- );
+ lockFileEditor.update([
+ 'packages',
+ targetPackage,
+ 'version',
+ ], targetVersion.toString());
// Remove the now outdated content-hash - it will be restored below
// after resolution.
- final packageMap = lockFileEditor
- .parseAt(['packages', targetPackage, 'description']).value as Map;
+ final packageMap =
+ lockFileEditor.parseAt([
+ 'packages',
+ targetPackage,
+ 'description',
+ ]).value
+ as Map;
final hasSha = packageMap.containsKey('sha256');
if (hasSha) {
- lockFileEditor.remove(
- ['packages', targetPackage, 'description', 'sha256'],
- );
+ lockFileEditor.remove([
+ 'packages',
+ targetPackage,
+ 'description',
+ 'sha256',
+ ]);
}
} else if (targetRevision != null &&
(lockFileYaml['packages'] as Map).containsKey(targetPackage)) {
@@ -417,14 +444,17 @@
// GitSource can only return a single version.
assert(versions.length == 1);
- lockFileEditor.update(
- ['packages', targetPackage, 'version'],
- versions.single.version.toString(),
- );
- lockFileEditor.update(
- ['packages', targetPackage, 'description', 'resolved-ref'],
- targetRevision,
- );
+ lockFileEditor.update([
+ 'packages',
+ targetPackage,
+ 'version',
+ ], versions.single.version.toString());
+ lockFileEditor.update([
+ 'packages',
+ targetPackage,
+ 'description',
+ 'resolved-ref',
+ ], targetRevision);
} else if (targetVersion == null &&
targetRevision == null &&
!(lockFileYaml['packages'] as Map).containsKey(targetPackage)) {
@@ -436,118 +466,114 @@
}
}
- final updatedLockfile = lockFileEditor == null
- ? null
- : LockFile.parse(
- lockFileEditor.toString(),
- cache.sources,
- filePath: entrypoint.lockFilePath,
- );
- await log.errorsOnlyUnlessTerminal(
- () async {
- final updatedWorkspace = entrypoint.workspaceRoot.transformWorkspace(
- (package) => Pubspec.parse(
- updatedPubspecs[package.dir].toString(),
- cache.sources,
- location: toUri(package.pubspecPath),
- containingDescription: RootDescription(package.dir),
- ),
- );
- // Resolve versions, this will update transitive dependencies that were
- // not passed in the input. And also counts as a validation of the input
- // by ensuring the resolution is valid.
- //
- // We don't use `acquireDependencies` as that downloads all the archives
- // to cache.
- // TODO: Handle HTTP exceptions gracefully!
- final solveResult = await resolveVersions(
- SolveType.get,
- cache,
- updatedWorkspace,
- lockFile: updatedLockfile,
- );
- for (final package in entrypoint.workspaceRoot.transitiveWorkspace) {
- final updatedPubspec = updatedPubspecs[package.dir]!;
- if (updatedPubspec.edits.isNotEmpty) {
- writeTextFile(
- package.pubspecPath,
- updatedPubspec.toString(),
+ final updatedLockfile =
+ lockFileEditor == null
+ ? null
+ : LockFile.parse(
+ lockFileEditor.toString(),
+ cache.sources,
+ filePath: entrypoint.lockFilePath,
);
- }
+ await log.errorsOnlyUnlessTerminal(() async {
+ final updatedWorkspace = entrypoint.workspaceRoot.transformWorkspace(
+ (package) => Pubspec.parse(
+ updatedPubspecs[package.dir].toString(),
+ cache.sources,
+ location: toUri(package.pubspecPath),
+ containingDescription: RootDescription(package.dir),
+ ),
+ );
+ // Resolve versions, this will update transitive dependencies that were
+ // not passed in the input. And also counts as a validation of the input
+ // by ensuring the resolution is valid.
+ //
+ // We don't use `acquireDependencies` as that downloads all the archives
+ // to cache.
+ // TODO: Handle HTTP exceptions gracefully!
+ final solveResult = await resolveVersions(
+ SolveType.get,
+ cache,
+ updatedWorkspace,
+ lockFile: updatedLockfile,
+ );
+ for (final package in entrypoint.workspaceRoot.transitiveWorkspace) {
+ final updatedPubspec = updatedPubspecs[package.dir]!;
+ if (updatedPubspec.edits.isNotEmpty) {
+ writeTextFile(package.pubspecPath, updatedPubspec.toString());
}
- // Only if we originally had a lock-file we write the resulting lockfile
- // back.
- if (updatedLockfile != null) {
- final updatedPackages = <PackageId>[];
- for (var package in solveResult.packages) {
- if (package.isRoot) continue;
- final description = package.description;
- // Handle content-hashes of hosted dependencies.
- if (description is ResolvedHostedDescription) {
- // Ensure we get content-hashes if the original lock-file had
- // them.
- if (hasContentHashes) {
- if (description.sha256 == null) {
- // We removed the hash above before resolution - as we get the
- // locked id back we need to find the content-hash from the
- // version listing.
- //
- // `pub get` gets this version-listing from the downloaded
- // archive but we don't want to download all archives - so we
- // copy it from the version listing.
- package = (await cache.getVersions(package.toRef()))
- .firstWhere((id) => id == package, orElse: () => package);
- if ((package.description as ResolvedHostedDescription)
- .sha256 ==
- null) {
- // This happens when we resolved a package from a legacy
- // server not providing archive_sha256. As a side-effect of
- // downloading the package we compute and store the sha256.
- package = (await cache.downloadPackage(package)).packageId;
- }
+ }
+ // Only if we originally had a lock-file we write the resulting lockfile
+ // back.
+ if (updatedLockfile != null) {
+ final updatedPackages = <PackageId>[];
+ for (var package in solveResult.packages) {
+ if (package.isRoot) continue;
+ final description = package.description;
+ // Handle content-hashes of hosted dependencies.
+ if (description is ResolvedHostedDescription) {
+ // Ensure we get content-hashes if the original lock-file had
+ // them.
+ if (hasContentHashes) {
+ if (description.sha256 == null) {
+ // We removed the hash above before resolution - as we get the
+ // locked id back we need to find the content-hash from the
+ // version listing.
+ //
+ // `pub get` gets this version-listing from the downloaded
+ // archive but we don't want to download all archives - so we
+ // copy it from the version listing.
+ package = (await cache.getVersions(
+ package.toRef(),
+ )).firstWhere((id) => id == package, orElse: () => package);
+ if ((package.description as ResolvedHostedDescription).sha256 ==
+ null) {
+ // This happens when we resolved a package from a legacy
+ // server not providing archive_sha256. As a side-effect of
+ // downloading the package we compute and store the sha256.
+ package = (await cache.downloadPackage(package)).packageId;
}
- } else {
- // The original pubspec.lock did not have content-hashes. Remove
- // any content hash, so we don't start adding them.
- package = PackageId(
- package.name,
- package.version,
- description.withSha256(null),
- );
}
- // Keep using https://pub.dartlang.org if the original lockfile
- // used it. This is to support lockfiles from old sdks.
- if (!usesPubDev &&
- HostedSource.isPubDevUrl(description.description.url)) {
- package = PackageId(
- package.name,
- package.version,
- ResolvedHostedDescription(
- HostedDescription.raw(
- package.name,
- HostedSource.pubDartlangUrl,
- ),
- sha256: (package.description as ResolvedHostedDescription)
- .sha256,
- ),
- );
- }
+ } else {
+ // The original pubspec.lock did not have content-hashes. Remove
+ // any content hash, so we don't start adding them.
+ package = PackageId(
+ package.name,
+ package.version,
+ description.withSha256(null),
+ );
}
- updatedPackages.add(package);
+ // Keep using https://pub.dartlang.org if the original lockfile
+ // used it. This is to support lockfiles from old sdks.
+ if (!usesPubDev &&
+ HostedSource.isPubDevUrl(description.description.url)) {
+ package = PackageId(
+ package.name,
+ package.version,
+ ResolvedHostedDescription(
+ HostedDescription.raw(
+ package.name,
+ HostedSource.pubDartlangUrl,
+ ),
+ sha256:
+ (package.description as ResolvedHostedDescription).sha256,
+ ),
+ );
+ }
}
-
- final newLockFile = LockFile(
- updatedPackages,
- sdkConstraints: updatedLockfile.sdkConstraints,
- mainDependencies: entrypoint.lockFile.mainDependencies,
- devDependencies: entrypoint.lockFile.devDependencies,
- overriddenDependencies: entrypoint.lockFile.overriddenDependencies,
- );
-
- newLockFile.writeToFile(entrypoint.lockFilePath, cache);
+ updatedPackages.add(package);
}
- },
- );
+
+ final newLockFile = LockFile(
+ updatedPackages,
+ sdkConstraints: updatedLockfile.sdkConstraints,
+ mainDependencies: entrypoint.lockFile.mainDependencies,
+ devDependencies: entrypoint.lockFile.devDependencies,
+ overriddenDependencies: entrypoint.lockFile.overriddenDependencies,
+ );
+
+ newLockFile.writeToFile(entrypoint.lockFilePath, cache);
+ }
+ });
// Dummy message.
log.message(json.encode({'dependencies': <Object>[]}));
}
@@ -565,10 +591,8 @@
String? gitRevision;
VersionConstraint? constraint;
_PackageVersion(this.name, String? versionOrHash, this.constraint)
- : version =
- versionOrHash == null ? null : _tryParseVersion(versionOrHash),
- gitRevision =
- versionOrHash == null ? null : _tryParseHash(versionOrHash);
+ : version = versionOrHash == null ? null : _tryParseVersion(versionOrHash),
+ gitRevision = versionOrHash == null ? null : _tryParseHash(versionOrHash);
}
Version? _tryParseVersion(String v) {
@@ -593,8 +617,8 @@
return pubspec.dependencies.containsKey(package.name)
? pubspec.dependencies
: pubspec.devDependencies.containsKey(package.name)
- ? pubspec.devDependencies
- : null;
+ ? pubspec.devDependencies
+ : null;
}
/// Return a constraint compatible with [newVersion].
@@ -712,9 +736,10 @@
Package workspace,
String packageName,
) {
- final constraints = workspace.transitiveWorkspace
- .map((p) => _constraintOf(p.pubspec, packageName))
- .nonNulls;
+ final constraints =
+ workspace.transitiveWorkspace
+ .map((p) => _constraintOf(p.pubspec, packageName))
+ .nonNulls;
if (constraints.isEmpty) {
return null;
}
@@ -730,20 +755,23 @@
}
String _kindString(Package workspace, String packageName) {
- return workspace.transitiveWorkspace
- .any((p) => p.dependencies.containsKey(packageName))
+ return workspace.transitiveWorkspace.any(
+ (p) => p.dependencies.containsKey(packageName),
+ )
? 'direct'
- : workspace.transitiveWorkspace
- .any((p) => p.devDependencies.containsKey(packageName))
- ? 'dev'
- : 'transitive';
+ : workspace.transitiveWorkspace.any(
+ (p) => p.devDependencies.containsKey(packageName),
+ )
+ ? 'dev'
+ : 'transitive';
}
Map<String, Object?> _source(PackageId id, {required String containingDir}) {
return {
'type': id.source.name,
- 'description':
- id.description.serializeForLockfile(containingDir: containingDir),
+ 'description': id.description.serializeForLockfile(
+ containingDir: containingDir,
+ ),
};
}
@@ -758,7 +786,8 @@
if (fileExists(entrypoint.lockFilePath)) {
currentPackages = Map<String, PackageId>.from(entrypoint.lockFile.packages);
} else {
- final resolution = await _tryResolve(entrypoint.workspaceRoot, cache) ??
+ final resolution =
+ await _tryResolve(entrypoint.workspaceRoot, cache) ??
(throw DataException('Failed to resolve pubspec'));
currentPackages = Map<String, PackageId>.fromIterable(
resolution,
@@ -782,17 +811,19 @@
}) async {
if (package == null) return [];
final lockFile = entrypoint.lockFile;
- final upgradedWorkspace = (upgradeType == _UpgradeType.multiBreaking ||
- upgradeType == _UpgradeType.smallestUpdate)
- ? workspace.transformWorkspace((p) => stripVersionBounds(p.pubspec))
- : workspace.transformWorkspace((p) => p.pubspec.copyWith());
+ final upgradedWorkspace =
+ (upgradeType == _UpgradeType.multiBreaking ||
+ upgradeType == _UpgradeType.smallestUpdate)
+ ? workspace.transformWorkspace((p) => stripVersionBounds(p.pubspec))
+ : workspace.transformWorkspace((p) => p.pubspec.copyWith());
for (final p in upgradedWorkspace.transitiveWorkspace) {
final dependencySet = _dependencySetOfPackage(p.pubspec, package);
if (dependencySet != null) {
// Force the version to be the new version.
- dependencySet[package.name] =
- package.toRef().withConstraint(package.toRange().constraint);
+ dependencySet[package.name] = package.toRef().withConstraint(
+ package.toRange().constraint,
+ );
}
}
@@ -810,55 +841,70 @@
if (resolution == null) {
return [];
}
- final workspaceNames = {
- ...workspace.transitiveWorkspace.map((p) => p.name),
- };
+ final workspaceNames = {...workspace.transitiveWorkspace.map((p) => p.name)};
return [
- ...resolution.packages.where((r) {
- if (workspaceNames.contains(r.name)) return false;
- final originalVersion = currentPackages[r.name];
- return originalVersion == null || r != originalVersion;
- }).map((p) {
- final constraintIntersection = _constraintIntersection(workspace, p.name);
- final currentPackage = currentPackages[p.name];
- return {
- 'name': p.name,
- 'version': p.versionOrHash(),
- 'kind': _kindString(workspace, p.name),
- 'source': _source(p, containingDir: entrypoint.workspaceRoot.dir),
- 'constraintBumped': constraintIntersection == null
- ? null
- : upgradeType == _UpgradeType.compatible
- ? constraintIntersection.toString()
- : _bumpConstraint(constraintIntersection, p.version).toString(),
- 'constraintWidened': constraintIntersection == null
- ? null
- : upgradeType == _UpgradeType.compatible
- ? constraintIntersection.toString()
- : _widenConstraint(constraintIntersection, p.version)
- .toString(),
- 'constraintBumpedIfNeeded': constraintIntersection == null
- ? null
- : upgradeType == _UpgradeType.compatible
- ? constraintIntersection.toString()
- : constraintIntersection.allows(p.version)
+ ...resolution.packages
+ .where((r) {
+ if (workspaceNames.contains(r.name)) return false;
+ final originalVersion = currentPackages[r.name];
+ return originalVersion == null || r != originalVersion;
+ })
+ .map((p) {
+ final constraintIntersection = _constraintIntersection(
+ workspace,
+ p.name,
+ );
+ final currentPackage = currentPackages[p.name];
+ return {
+ 'name': p.name,
+ 'version': p.versionOrHash(),
+ 'kind': _kindString(workspace, p.name),
+ 'source': _source(p, containingDir: entrypoint.workspaceRoot.dir),
+ 'constraintBumped':
+ constraintIntersection == null
+ ? null
+ : upgradeType == _UpgradeType.compatible
? constraintIntersection.toString()
- : _bumpConstraint(constraintIntersection, p.version)
- .toString(),
- 'previousVersion': currentPackage?.versionOrHash(),
- 'previousConstraint': constraintIntersection?.toString(),
- 'previousSource': currentPackage == null
- ? null
- : _source(
- currentPackage,
- containingDir: entrypoint.workspaceRoot.dir,
- ),
- };
- }),
+ : _bumpConstraint(
+ constraintIntersection,
+ p.version,
+ ).toString(),
+ 'constraintWidened':
+ constraintIntersection == null
+ ? null
+ : upgradeType == _UpgradeType.compatible
+ ? constraintIntersection.toString()
+ : _widenConstraint(
+ constraintIntersection,
+ p.version,
+ ).toString(),
+ 'constraintBumpedIfNeeded':
+ constraintIntersection == null
+ ? null
+ : upgradeType == _UpgradeType.compatible
+ ? constraintIntersection.toString()
+ : constraintIntersection.allows(p.version)
+ ? constraintIntersection.toString()
+ : _bumpConstraint(
+ constraintIntersection,
+ p.version,
+ ).toString(),
+ 'previousVersion': currentPackage?.versionOrHash(),
+ 'previousConstraint': constraintIntersection?.toString(),
+ 'previousSource':
+ currentPackage == null
+ ? null
+ : _source(
+ currentPackage,
+ containingDir: entrypoint.workspaceRoot.dir,
+ ),
+ };
+ }),
// Find packages that were removed by the resolution
for (final oldPackageName in lockFile.packages.keys)
- if (!resolution.packages
- .any((newPackage) => newPackage.name == oldPackageName))
+ if (!resolution.packages.any(
+ (newPackage) => newPackage.name == oldPackageName,
+ ))
{
'name': oldPackageName,
'version': null,
@@ -900,13 +946,7 @@
if (url is! String) {
throw const FormatException('"url" should be a string.');
}
- final ref = PackageRef(
- name,
- HostedDescription(
- name,
- url,
- ),
- );
+ final ref = PackageRef(name, HostedDescription(name, url));
final constraints = disallowed['versions'];
if (constraints is! List) {
throw const FormatException('"versions" should be a list.');
diff --git a/lib/src/command/deps.dart b/lib/src/command/deps.dart
index b374eb4..c3cd18f 100644
--- a/lib/src/command/deps.dart
+++ b/lib/src/command/deps.dart
@@ -107,20 +107,23 @@
final currentPackage =
(await entrypoint.packageGraph).packages[current]!;
final isRoot = workspacePackageNames.contains(currentPackage.name);
- final next = (isRoot
- ? currentPackage.immediateDependencies
- : currentPackage.dependencies)
- .keys
- .toList();
- final dependencyType =
- entrypoint.workspaceRoot.pubspec.dependencyType(current);
- final kind = isRoot
- ? 'root'
- : (dependencyType == DependencyType.direct
- ? 'direct'
- : (dependencyType == DependencyType.dev
- ? 'dev'
- : 'transitive'));
+ final next =
+ (isRoot
+ ? currentPackage.immediateDependencies
+ : currentPackage.dependencies)
+ .keys
+ .toList();
+ final dependencyType = entrypoint.workspaceRoot.pubspec.dependencyType(
+ current,
+ );
+ final kind =
+ isRoot
+ ? 'root'
+ : (dependencyType == DependencyType.direct
+ ? 'direct'
+ : (dependencyType == DependencyType.dev
+ ? 'dev'
+ : 'transitive'));
final source =
entrypoint.lockFile.packages[current]?.source.name ?? 'root';
packagesJson.add({
@@ -131,11 +134,12 @@
// This field is kept for backwards compatibility with dart 3.5 and
// before. Clients should opt to consume directDependencies and
// devDependencies separately instead.
- 'dependencies': (isRoot
- ? currentPackage.immediateDependencies
- : currentPackage.dependencies)
- .keys
- .toList(),
+ 'dependencies':
+ (isRoot
+ ? currentPackage.immediateDependencies
+ : currentPackage.dependencies)
+ .keys
+ .toList(),
'directDependencies': currentPackage.dependencies.keys.toList(),
if (isRoot)
'devDependencies': currentPackage.devDependencies.keys.toList(),
@@ -145,29 +149,29 @@
final executables = [
for (final package in [
entrypoint.workspaceRoot,
- ...entrypoint.workspaceRoot.immediateDependencies.keys
- .map((name) => graph.packages[name]),
+ ...entrypoint.workspaceRoot.immediateDependencies.keys.map(
+ (name) => graph.packages[name],
+ ),
])
...package!.executableNames.map(
- (name) => package == entrypoint.workspaceRoot
- ? ':$name'
- : (package.name == name ? name : '${package.name}:$name'),
+ (name) =>
+ package == entrypoint.workspaceRoot
+ ? ':$name'
+ : (package.name == name ? name : '${package.name}:$name'),
),
];
buffer.writeln(
- const JsonEncoder.withIndent(' ').convert(
- {
- 'root': entrypoint.workspaceRoot.name,
- 'packages': packagesJson,
- 'sdks': [
- for (var sdk in sdks.values)
- if (sdk.version != null)
- {'name': sdk.name, 'version': sdk.version.toString()},
- ],
- 'executables': executables,
- },
- ),
+ const JsonEncoder.withIndent(' ').convert({
+ 'root': entrypoint.workspaceRoot.name,
+ 'packages': packagesJson,
+ 'sdks': [
+ for (var sdk in sdks.values)
+ if (sdk.version != null)
+ {'name': sdk.name, 'version': sdk.version.toString()},
+ ],
+ 'executables': executables,
+ }),
);
} else {
if (argResults.flag('executables')) {
@@ -201,9 +205,7 @@
/// For each dependency listed, *that* package's immediate dependencies are
/// shown. Unlike [_outputList], this prints all of these dependencies on one
/// line.
- Future<void> _outputCompact(
- StringBuffer buffer,
- ) async {
+ Future<void> _outputCompact(StringBuffer buffer) async {
var first = true;
for (final root in entrypoint.workspaceRoot.transitiveWorkspace) {
if (!first) {
@@ -327,9 +329,7 @@
/// dependency), later ones are not traversed. This is done in breadth-first
/// fashion so that a package will always be expanded at the shallowest
/// depth that it appears at.
- Future<void> _outputTree(
- StringBuffer buffer,
- ) async {
+ Future<void> _outputTree(StringBuffer buffer) async {
// The work list for the breadth-first traversal. It contains the package
// being added to the tree, and the parent map that will receive that
// package.
@@ -344,8 +344,9 @@
final immediateDependencies =
entrypoint.workspaceRoot.immediateDependencies.keys.toSet();
if (!_includeDev) {
- immediateDependencies
- .removeAll(entrypoint.workspaceRoot.devDependencies.keys);
+ immediateDependencies.removeAll(
+ entrypoint.workspaceRoot.devDependencies.keys,
+ );
}
for (var name in workspacePackageNames) {
toWalk.add((await _getPackage(name), packageTree));
@@ -425,8 +426,10 @@
Future<Package> _getPackage(String name) async {
final package = (await entrypoint.packageGraph).packages[name];
if (package != null) return package;
- dataError('The pubspec.yaml file has changed since the pubspec.lock file '
- 'was generated, please run "$topLevelProgram pub get" again.');
+ dataError(
+ 'The pubspec.yaml file has changed since the pubspec.lock file '
+ 'was generated, please run "$topLevelProgram pub get" again.',
+ );
}
/// Outputs all executables reachable from [entrypoint].
@@ -435,9 +438,9 @@
final packages = {
for (final p in entrypoint.workspaceRoot.transitiveWorkspace) ...[
graph.packages[p.name]!,
- ...(_includeDev ? p.immediateDependencies : p.dependencies)
- .keys
- .map((name) => graph.packages[name]!),
+ ...(_includeDev ? p.immediateDependencies : p.dependencies).keys.map(
+ (name) => graph.packages[name]!,
+ ),
],
};
diff --git a/lib/src/command/downgrade.dart b/lib/src/command/downgrade.dart
index ae724dc..1bdd092 100644
--- a/lib/src/command/downgrade.dart
+++ b/lib/src/command/downgrade.dart
@@ -102,8 +102,10 @@
}
if (isOffline) {
- log.warning('Warning: Downgrading when offline may not update you to '
- 'the oldest versions of your dependencies.');
+ log.warning(
+ 'Warning: Downgrading when offline may not update you to '
+ 'the oldest versions of your dependencies.',
+ );
}
}
}
diff --git a/lib/src/command/get.dart b/lib/src/command/get.dart
index 545a877..4e522c0 100644
--- a/lib/src/command/get.dart
+++ b/lib/src/command/get.dart
@@ -37,7 +37,8 @@
argParser.addFlag(
'enforce-lockfile',
negatable: false,
- help: 'Enforce pubspec.lock. '
+ help:
+ 'Enforce pubspec.lock. '
'Fail `pub get` if the current `pubspec.lock` '
'does not exactly specify a valid resolution of `pubspec.yaml` '
'or if any content hash of a hosted package has changed.\n'
diff --git a/lib/src/command/global_activate.dart b/lib/src/command/global_activate.dart
index c0a445d..06e8cd3 100644
--- a/lib/src/command/global_activate.dart
+++ b/lib/src/command/global_activate.dart
@@ -73,7 +73,8 @@
argParser.addOption(
'hosted-url',
abbr: 'u',
- help: 'A custom pub server URL for the package. '
+ help:
+ 'A custom pub server URL for the package. '
'Only applies when using the `hosted` source.',
);
}
@@ -137,8 +138,10 @@
PackageRef ref;
try {
- ref = cache.hosted
- .refFor(package, url: argResults.option('hosted-url'));
+ ref = cache.hosted.refFor(
+ package,
+ url: argResults.option('hosted-url'),
+ );
} on FormatException catch (e) {
usageException('Invalid hosted-url: $e');
}
@@ -156,10 +159,11 @@
validateNoExtraArgs();
if (!packageNameRegExp.hasMatch(package)) {
- final suggestion = dirExists(package)
- ? '\n\nDid you mean `$topLevelProgram pub global activate '
- '--source path ${escapeShellArgument(package)}`?'
- : '';
+ final suggestion =
+ dirExists(package)
+ ? '\n\nDid you mean `$topLevelProgram pub global activate '
+ '--source path ${escapeShellArgument(package)}`?'
+ : '';
usageException('Not a valid package name: "$package"$suggestion');
}
diff --git a/lib/src/command/global_run.dart b/lib/src/command/global_run.dart
index 31a209f..93059d4 100644
--- a/lib/src/command/global_run.dart
+++ b/lib/src/command/global_run.dart
@@ -30,7 +30,8 @@
argParser.addFlag('checked', abbr: 'c', hide: true);
argParser.addMultiOption(
'enable-experiment',
- help: 'Runs the executable in a VM with the given experiments enabled. '
+ help:
+ 'Runs the executable in a VM with the given experiments enabled. '
'(Will disable snapshotting, resulting in slower startup).',
valueHelp: 'experiment',
);
@@ -64,8 +65,10 @@
final args = argResults.rest.skip(1).toList();
if (p.split(executable).length > 1) {
- usageException('Cannot run an executable in a subdirectory of a global '
- 'package.');
+ usageException(
+ 'Cannot run an executable in a subdirectory of a global '
+ 'package.',
+ );
}
if (argResults.wasParsed('mode')) {
@@ -81,9 +84,10 @@
vmArgs: vmArgs,
enableAsserts:
argResults.flag('enable-asserts') || argResults.flag('checked'),
- recompile: (executable) => log.errorsOnlyUnlessTerminal(
- () => globalEntrypoint.precompileExecutable(executable),
- ),
+ recompile:
+ (executable) => log.errorsOnlyUnlessTerminal(
+ () => globalEntrypoint.precompileExecutable(executable),
+ ),
alwaysUseSubprocess: alwaysUseSubprocess,
);
overrideExitCode(exitCode);
diff --git a/lib/src/command/lish.dart b/lib/src/command/lish.dart
index 892dc38..804287a 100644
--- a/lib/src/command/lish.dart
+++ b/lib/src/command/lish.dart
@@ -94,7 +94,8 @@
argParser.addFlag(
'skip-validation',
negatable: false,
- help: 'Publish without validation and resolution '
+ help:
+ 'Publish without validation and resolution '
'(this will ignore errors).',
);
argParser.addOption(
@@ -110,7 +111,8 @@
);
argParser.addOption(
'from-archive',
- help: 'Publish from a .tar.gz archive instead of current folder. '
+ help:
+ 'Publish from a .tar.gz archive instead of current folder. '
'Implies `--skip-validation`.',
valueHelp: '[archive.tar.gz]',
hide: true,
@@ -134,65 +136,79 @@
try {
await log.progress('Uploading', () async {
/// 1. Initiate upload
- final parametersResponse =
- await retryForHttp('initiating upload', () async {
- final request =
- http.Request('GET', host.resolve('api/packages/versions/new'));
- request.attachPubApiHeaders();
- request.attachMetadataHeaders();
- return await client.fetch(request);
- });
+ final parametersResponse = await retryForHttp(
+ 'initiating upload',
+ () async {
+ final request = http.Request(
+ 'GET',
+ host.resolve('api/packages/versions/new'),
+ );
+ request.attachPubApiHeaders();
+ request.attachMetadataHeaders();
+ return await client.fetch(request);
+ },
+ );
final parameters = parseJsonResponse(parametersResponse);
/// 2. Upload package
final url = _expectField(parameters, 'url', parametersResponse);
if (url is! String) invalidServerResponse(parametersResponse);
cloudStorageUrl = Uri.parse(url);
- final uploadResponse =
- await retryForHttp('uploading package', () async {
- // TODO(nweiz): Cloud Storage can provide an XML-formatted error. We
- // should report that error and exit.
- final request = http.MultipartRequest('POST', cloudStorageUrl!);
+ final uploadResponse = await retryForHttp(
+ 'uploading package',
+ () async {
+ // TODO(nweiz): Cloud Storage can provide an XML-formatted error. We
+ // should report that error and exit.
+ final request = http.MultipartRequest('POST', cloudStorageUrl!);
- final fields = _expectField(parameters, 'fields', parametersResponse);
- if (fields is! Map) invalidServerResponse(parametersResponse);
- fields.forEach((key, value) {
- if (value is! String) invalidServerResponse(parametersResponse);
- request.fields[key as String] = value;
- });
+ final fields = _expectField(
+ parameters,
+ 'fields',
+ parametersResponse,
+ );
+ if (fields is! Map) invalidServerResponse(parametersResponse);
+ fields.forEach((key, value) {
+ if (value is! String) invalidServerResponse(parametersResponse);
+ request.fields[key as String] = value;
+ });
- request.followRedirects = false;
- request.files.add(
- http.MultipartFile.fromBytes(
- 'file',
- packageBytes,
- filename: 'package.tar.gz',
- ),
- );
- return await client.fetch(request);
- });
+ request.followRedirects = false;
+ request.files.add(
+ http.MultipartFile.fromBytes(
+ 'file',
+ packageBytes,
+ filename: 'package.tar.gz',
+ ),
+ );
+ return await client.fetch(request);
+ },
+ );
/// 3. Finalize publish
final location = uploadResponse.headers['location'];
if (location == null) throw PubHttpResponseException(uploadResponse);
- final finalizeResponse =
- await retryForHttp('finalizing publish', () async {
- final request = http.Request('GET', Uri.parse(location));
- request.attachPubApiHeaders();
- request.attachMetadataHeaders();
- return await client.fetch(request);
- });
+ final finalizeResponse = await retryForHttp(
+ 'finalizing publish',
+ () async {
+ final request = http.Request('GET', Uri.parse(location));
+ request.attachPubApiHeaders();
+ request.attachMetadataHeaders();
+ return await client.fetch(request);
+ },
+ );
handleJsonSuccess(finalizeResponse);
});
} on AuthenticationException catch (error) {
var msg = '';
if (error.statusCode == 401) {
- msg += '$host package repository requested authentication!\n'
+ msg +=
+ '$host package repository requested authentication!\n'
'You can provide credentials using:\n'
' $topLevelProgram pub token add $host\n';
}
if (error.statusCode == 403) {
- msg += 'Insufficient permissions to the resource at the $host '
+ msg +=
+ 'Insufficient permissions to the resource at the $host '
'package repository.\nYou can modify credentials using:\n'
' $topLevelProgram pub token add $host\n';
}
@@ -262,11 +278,9 @@
Future<void> _validateArgs() async {
if (argResults.wasParsed('server')) {
await log.errorsOnlyUnlessTerminal(() {
- log.message(
- '''
+ log.message('''
The --server option is deprecated. Use `publish_to` in your pubspec.yaml or set
-the \$PUB_HOSTED_URL environment variable.''',
- );
+the \$PUB_HOSTED_URL environment variable.''');
});
}
@@ -291,9 +305,11 @@
if (!dryRun &&
_toArchive == null &&
entrypoint.workPackage.pubspec.isPrivate) {
- dataError('A private package cannot be published.\n'
- 'You can enable this by changing the "publish_to" field in your '
- 'pubspec.');
+ dataError(
+ 'A private package cannot be published.\n'
+ 'You can enable this by changing the "publish_to" field in your '
+ 'pubspec.',
+ );
}
if (skipValidation) {
log.warning(
@@ -337,15 +353,14 @@
'Publishing ${package.name} ${package.version} to $host:\n$fileTree',
);
- final packageBytes = await createTarGz(
- filesAndDirs,
- baseDir: entrypoint.workPackage.dir,
- ).toBytes();
+ final packageBytes =
+ await createTarGz(
+ filesAndDirs,
+ baseDir: entrypoint.workPackage.dir,
+ ).toBytes();
final size = _readableFileSize(packageBytes.length);
- log.message(
- '\nTotal compressed archive size: $size.\n',
- );
+ log.message('\nTotal compressed archive size: $size.\n');
final validationResult =
skipValidation ? null : await _validate(packageBytes, files, host);
@@ -368,9 +383,7 @@
packageBytes = readBinaryFile(archive);
} on FileSystemException catch (e) {
- dataError(
- 'Failed reading archive file: $e)',
- );
+ dataError('Failed reading archive file: $e)');
}
final Pubspec pubspec;
try {
@@ -388,9 +401,11 @@
dataError('Failed to read pubspec.yaml from archive: ${e.message}');
}
if (!dryRun && _toArchive == null && pubspec.isPrivate) {
- dataError('A private package cannot be published.\n'
- 'You can enable this by changing the "publish_to" field in your '
- 'pubspec.');
+ dataError(
+ 'A private package cannot be published.\n'
+ 'You can enable this by changing the "publish_to" field in your '
+ 'pubspec.',
+ );
}
final host = computeHost(pubspec);
log.message('Publishing ${pubspec.name} ${pubspec.version} to $host.');
@@ -431,10 +446,12 @@
);
if (errors.isNotEmpty) {
- dataError('Sorry, your package is missing '
- "${(errors.length > 1) ? 'some requirements' : 'a requirement'} "
- "and can't be published yet.\nFor more information, see: "
- 'https://dart.dev/tools/pub/cmd/pub-lish.\n');
+ dataError(
+ 'Sorry, your package is missing '
+ "${(errors.length > 1) ? 'some requirements' : 'a requirement'} "
+ "and can't be published yet.\nFor more information, see: "
+ 'https://dart.dev/tools/pub/cmd/pub-lish.\n',
+ );
}
return (warningsCount: warnings.length, hintsCount: hints.length);
@@ -446,10 +463,13 @@
/// Throws if user didn't confirm.
Future<void> _confirmUpload(_Publication package, Uri host) async {
if (force) return;
- log.message('\nPublishing is forever; packages cannot be unpublished.'
- '\nPolicy details are available at https://pub.dev/policy\n');
+ log.message(
+ '\nPublishing is forever; packages cannot be unpublished.'
+ '\nPolicy details are available at https://pub.dev/policy\n',
+ );
- var message = 'Do you want to publish '
+ var message =
+ 'Do you want to publish '
'${package.pubspec.name} ${package.pubspec.version} to $host';
if (package.hintCount != 0 || package.warningCount != 0) {
message = '${package.warningsCountMessage}. $message';
@@ -462,9 +482,10 @@
@override
Future runProtected() async {
await _validateArgs();
- final publication = await (_fromArchive == null
- ? _publicationFromEntrypoint()
- : _publicationFromArchive(_fromArchive));
+ final publication =
+ await (_fromArchive == null
+ ? _publicationFromEntrypoint()
+ : _publicationFromArchive(_fromArchive));
if (dryRun) {
log.message(publication.warningsCountMessage);
if (publication.warningCount != 0) {
diff --git a/lib/src/command/login.dart b/lib/src/command/login.dart
index 5b1311b..04e765b 100644
--- a/lib/src/command/login.dart
+++ b/lib/src/command/login.dart
@@ -27,21 +27,27 @@
if (credentials == null) {
final userInfo = await _retrieveUserInfo();
if (userInfo == null) {
- log.warning('Could not retrieve your user-details.\n'
- 'You might have to run `$topLevelProgram pub logout` '
- 'to delete your credentials and try again.');
+ log.warning(
+ 'Could not retrieve your user-details.\n'
+ 'You might have to run `$topLevelProgram pub logout` '
+ 'to delete your credentials and try again.',
+ );
} else {
log.message('You are now logged in as $userInfo');
}
} else {
final userInfo = await _retrieveUserInfo();
if (userInfo == null) {
- log.warning('Your credentials seems broken.\n'
- 'Run `$topLevelProgram pub logout` '
- 'to delete your credentials and try again.');
+ log.warning(
+ 'Your credentials seems broken.\n'
+ 'Run `$topLevelProgram pub logout` '
+ 'to delete your credentials and try again.',
+ );
}
- log.warning('You are already logged in as $userInfo\n'
- 'Run `$topLevelProgram pub logout` to log out and try again.');
+ log.warning(
+ 'You are already logged in as $userInfo\n'
+ 'Run `$topLevelProgram pub logout` to log out and try again.',
+ );
}
}
@@ -50,9 +56,7 @@
final discovery = await oauth2.fetchOidcDiscoveryDocument();
final userInfoEndpoint = discovery['userinfo_endpoint'];
if (userInfoEndpoint is! String) {
- log.fine(
- 'Bad discovery document. userinfo_endpoint not a String',
- );
+ log.fine('Bad discovery document. userinfo_endpoint not a String');
return null;
}
final userInfoRequest = await client.get(Uri.parse(userInfoEndpoint));
diff --git a/lib/src/command/outdated.dart b/lib/src/command/outdated.dart
index 424bad4..ac74da6 100644
--- a/lib/src/command/outdated.dart
+++ b/lib/src/command/outdated.dart
@@ -99,7 +99,8 @@
argParser.addFlag(
'up-to-date',
hide: true,
- help: 'Include dependencies that are already at the '
+ help:
+ 'Include dependencies that are already at the '
'latest version. Alias of --show-all.',
);
argParser.addFlag(
@@ -126,23 +127,27 @@
final includeDevDependencies = argResults.flag('dev-dependencies');
final includeDependencyOverrides = argResults.flag('dependency-overrides');
if (argResults.flag('json') && argResults.wasParsed('transitive')) {
- usageException('Cannot specify both `--json` and `--transitive`\n'
- 'The json report always includes transitive dependencies.');
+ usageException(
+ 'Cannot specify both `--json` and `--transitive`\n'
+ 'The json report always includes transitive dependencies.',
+ );
}
/// The workspace root with dependency overrides removed if requested.
- final baseWorkspace = includeDependencyOverrides
- ? entrypoint.workspaceRoot
- : entrypoint.workspaceRoot.transformWorkspace(
- (package) => stripDependencyOverrides(package.pubspec),
- );
+ final baseWorkspace =
+ includeDependencyOverrides
+ ? entrypoint.workspaceRoot
+ : entrypoint.workspaceRoot.transformWorkspace(
+ (package) => stripDependencyOverrides(package.pubspec),
+ );
/// [baseWorkspace] with dev-dependencies removed if requested.
- final upgradableWorkspace = includeDevDependencies
- ? baseWorkspace
- : baseWorkspace.transformWorkspace(
- (package) => stripDevDependencies(package.pubspec),
- );
+ final upgradableWorkspace =
+ includeDevDependencies
+ ? baseWorkspace
+ : baseWorkspace.transformWorkspace(
+ (package) => stripDevDependencies(package.pubspec),
+ );
/// [upgradableWorkspace] with upper bounds removed.
final resolvableWorkspace = upgradableWorkspace.transformWorkspace(
@@ -153,27 +158,23 @@
late bool hasUpgradableResolution;
late bool hasResolvableResolution;
- await log.spinner(
- 'Resolving',
- () async {
- final upgradablePackagesResult = await _tryResolve(
- upgradableWorkspace,
- cache,
- lockFile: entrypoint.lockFile,
- );
- hasUpgradableResolution = upgradablePackagesResult != null;
- upgradablePackages = upgradablePackagesResult ?? [];
+ await log.spinner('Resolving', () async {
+ final upgradablePackagesResult = await _tryResolve(
+ upgradableWorkspace,
+ cache,
+ lockFile: entrypoint.lockFile,
+ );
+ hasUpgradableResolution = upgradablePackagesResult != null;
+ upgradablePackages = upgradablePackagesResult ?? [];
- final resolvablePackagesResult = await _tryResolve(
- resolvableWorkspace,
- cache,
- lockFile: entrypoint.lockFile,
- );
- hasResolvableResolution = resolvablePackagesResult != null;
- resolvablePackages = resolvablePackagesResult ?? [];
- },
- condition: _shouldShowSpinner,
- );
+ final resolvablePackagesResult = await _tryResolve(
+ resolvableWorkspace,
+ cache,
+ lockFile: entrypoint.lockFile,
+ );
+ hasResolvableResolution = resolvablePackagesResult != null;
+ resolvablePackages = resolvablePackagesResult ?? [];
+ }, condition: _shouldShowSpinner);
// This list will be empty if there is no lock file.
final currentPackages = entrypoint.lockFile.packages.values;
@@ -200,10 +201,12 @@
final name = packageRef.name;
final current = entrypoint.lockFile.packages[name];
- final upgradable =
- upgradablePackages.firstWhereOrNull((id) => id.name == name);
- final resolvable =
- resolvablePackages.firstWhereOrNull((id) => id.name == name);
+ final upgradable = upgradablePackages.firstWhereOrNull(
+ (id) => id.name == name,
+ );
+ final resolvable = resolvablePackages.firstWhereOrNull(
+ (id) => id.name == name,
+ );
// Find the latest version, and if it's overridden.
var latestIsOverridden = false;
@@ -218,15 +221,15 @@
}
// If present as a dependency or dev_dependency we use this
latest ??= await cache.getLatest(
- allDependencies(baseWorkspace)
- .firstWhereOrNull((r) => r.name == name)
- ?.toRef(),
+ allDependencies(
+ baseWorkspace,
+ ).firstWhereOrNull((r) => r.name == name)?.toRef(),
allowPrereleases: prereleases,
);
latest ??= await cache.getLatest(
- allDevDependencies(baseWorkspace)
- .firstWhereOrNull((r) => r.name == name)
- ?.toRef(),
+ allDevDependencies(
+ baseWorkspace,
+ ).firstWhereOrNull((r) => r.name == name)?.toRef(),
allowPrereleases: prereleases,
);
// If not overridden and present in either upgradable or resolvable we
@@ -267,8 +270,12 @@
);
final id = current ?? upgradable ?? resolvable ?? latest;
- var packageAdvisories = await id?.source
- .getAdvisoriesForPackage(id, cache, const Duration(days: 3)) ??
+ var packageAdvisories =
+ await id?.source.getAdvisoriesForPackage(
+ id,
+ cache,
+ const Duration(days: 3),
+ ) ??
[];
final discontinued =
@@ -303,15 +310,15 @@
if (currentVersionDetails != null) {
// Filter out advisories added to `ignored_advisores` in the root
// pubspec.
- packageAdvisories = packageAdvisories
- .where(
- (adv) => entrypoint.workspaceRoot.pubspec.ignoredAdvisories
- .intersection({
- ...adv.aliases,
- adv.id,
- }).isEmpty,
- )
- .toList();
+ packageAdvisories =
+ packageAdvisories
+ .where(
+ (adv) =>
+ entrypoint.workspaceRoot.pubspec.ignoredAdvisories
+ .intersection({...adv.aliases, adv.id})
+ .isEmpty,
+ )
+ .toList();
for (final advisory in packageAdvisories) {
if (advisory.affectedVersions.contains(
currentVersionDetails._pubspec.version.canonicalizedVersion,
@@ -340,8 +347,9 @@
final rows = <_PackageDetails>[];
final visited = {
- ...entrypoint.workspaceRoot.transitiveWorkspace
- .map((package) => package.name),
+ ...entrypoint.workspaceRoot.transitiveWorkspace.map(
+ (package) => package.name,
+ ),
};
// Add all dependencies from the lockfile.
for (final id in [
@@ -421,11 +429,7 @@
if (id == null) {
return null;
}
- return _VersionDetails(
- await cache.describe(id),
- id,
- isOverridden,
- );
+ return _VersionDetails(await cache.describe(id), id, isOverridden);
}
/// Computes the closure of the graph of dependencies (not including
@@ -488,8 +492,10 @@
required bool showAll,
required bool includeDevDependencies,
}) async {
- final markedRows =
- Map.fromIterables(rows, await mode.markVersionDetails(rows));
+ final markedRows = Map.fromIterables(
+ rows,
+ await mode.markVersionDetails(rows),
+ );
if (!showAll) {
rows.removeWhere((row) => row.isLatest);
}
@@ -510,26 +516,24 @@
}
log.message(
- const JsonEncoder.withIndent(' ').convert(
- {
- 'packages': [
- ...(rows..sort((a, b) => a.name.compareTo(b.name))).map(
- (packageDetails) => {
- 'package': packageDetails.name,
- 'kind': kindString(packageDetails.kind),
- 'isDiscontinued': packageDetails.isDiscontinued,
- 'isCurrentRetracted': packageDetails.isCurrentRetracted,
- 'isCurrentAffectedByAdvisory':
- packageDetails.isCurrentAffectedBySecurityAdvisory,
- 'current': markedRows[packageDetails]![0].toJson(),
- 'upgradable': markedRows[packageDetails]![1].toJson(),
- 'resolvable': markedRows[packageDetails]![2].toJson(),
- 'latest': markedRows[packageDetails]![3].toJson(),
- },
- ),
- ],
- },
- ),
+ const JsonEncoder.withIndent(' ').convert({
+ 'packages': [
+ ...(rows..sort((a, b) => a.name.compareTo(b.name))).map(
+ (packageDetails) => {
+ 'package': packageDetails.name,
+ 'kind': kindString(packageDetails.kind),
+ 'isDiscontinued': packageDetails.isDiscontinued,
+ 'isCurrentRetracted': packageDetails.isCurrentRetracted,
+ 'isCurrentAffectedByAdvisory':
+ packageDetails.isCurrentAffectedBySecurityAdvisory,
+ 'current': markedRows[packageDetails]![0].toJson(),
+ 'upgradable': markedRows[packageDetails]![1].toJson(),
+ 'resolvable': markedRows[packageDetails]![2].toJson(),
+ 'latest': markedRows[packageDetails]![3].toJson(),
+ },
+ ),
+ ],
+ }),
);
}
@@ -549,13 +553,15 @@
}) async {
final directoryDesc = directory == '.' ? '' : ' in $directory';
log.message('${mode.explanation(directoryDesc)}\n');
- final markedRows =
- Map.fromIterables(rows, await mode.markVersionDetails(rows));
+ final markedRows = Map.fromIterables(
+ rows,
+ await mode.markVersionDetails(rows),
+ );
List<FormattedString> formatted(_PackageDetails package) => [
- FormattedString(package.name),
- ...markedRows[package]!.map((m) => m.toHuman()),
- ];
+ FormattedString(package.name),
+ ...markedRows[package]!.map((m) => m.toHuman()),
+ ];
if (!showAll) {
rows.removeWhere((row) => row.isLatest);
@@ -570,15 +576,21 @@
final directRows = rows.where(hasKind(_DependencyKind.direct)).map(formatted);
final devRows = rows.where(hasKind(_DependencyKind.dev)).map(formatted);
- final transitiveRows =
- rows.where(hasKind(_DependencyKind.transitive)).map(formatted);
- final devTransitiveRows =
- rows.where(hasKind(_DependencyKind.devTransitive)).map(formatted);
+ final transitiveRows = rows
+ .where(hasKind(_DependencyKind.transitive))
+ .map(formatted);
+ final devTransitiveRows = rows
+ .where(hasKind(_DependencyKind.devTransitive))
+ .map(formatted);
final formattedRows = <List<FormattedString>>[
- ['Package Name', 'Current', 'Upgradable', 'Resolvable', 'Latest']
- .map((s) => format(s, log.bold))
- .toList(),
+ [
+ 'Package Name',
+ 'Current',
+ 'Upgradable',
+ 'Resolvable',
+ 'Latest',
+ ].map((s) => format(s, log.bold)).toList(),
if (hasDirectDependencies) ...[
[
if (directRows.isEmpty)
@@ -613,44 +625,44 @@
log.message(line);
}
- final upgradable = rows.where(
- (row) {
- final current = row.current;
- final upgradable = row.upgradable;
- return current != null &&
- upgradable != null &&
- current < upgradable &&
- // Include transitive only, if we show them
- (showTransitiveDependencies ||
- hasKind(_DependencyKind.direct)(row) ||
- hasKind(_DependencyKind.dev)(row));
- },
- ).length;
+ final upgradable =
+ rows.where((row) {
+ final current = row.current;
+ final upgradable = row.upgradable;
+ return current != null &&
+ upgradable != null &&
+ current < upgradable &&
+ // Include transitive only, if we show them
+ (showTransitiveDependencies ||
+ hasKind(_DependencyKind.direct)(row) ||
+ hasKind(_DependencyKind.dev)(row));
+ }).length;
- final notAtResolvable = rows.where(
- (row) {
- final current = row.current;
- final upgradable = row.upgradable;
- final resolvable = row.resolvable;
- return (current != null || !lockFileExists) &&
- resolvable != null &&
- upgradable != null &&
- upgradable < resolvable &&
- // Include transitive only, if we show them
- (showTransitiveDependencies ||
- hasKind(_DependencyKind.direct)(row) ||
- hasKind(_DependencyKind.dev)(row));
- },
- ).length;
+ final notAtResolvable =
+ rows.where((row) {
+ final current = row.current;
+ final upgradable = row.upgradable;
+ final resolvable = row.resolvable;
+ return (current != null || !lockFileExists) &&
+ resolvable != null &&
+ upgradable != null &&
+ upgradable < resolvable &&
+ // Include transitive only, if we show them
+ (showTransitiveDependencies ||
+ hasKind(_DependencyKind.direct)(row) ||
+ hasKind(_DependencyKind.dev)(row));
+ }).length;
if (!hasUpgradableResolution || !hasResolvableResolution) {
log.message(mode.noResolutionText);
} else if (lockFileExists) {
if (upgradable != 0) {
if (upgradable == 1) {
- log.message('\n1 upgradable dependency is locked (in pubspec.lock) to '
- 'an older version.\n'
- 'To update it, use `$topLevelProgram pub upgrade`.');
+ log.message(
+ '\n1 upgradable dependency is locked (in pubspec.lock) to '
+ 'an older version.\n'
+ 'To update it, use `$topLevelProgram pub upgrade`.',
+ );
} else {
log.message(
'\n$upgradable upgradable dependencies are locked '
@@ -673,36 +685,45 @@
log.message(mode.allSafe);
}
} else {
- log.message('\nNo pubspec.lock found. There are no Current versions.\n'
- 'Run `$topLevelProgram pub get` to create a pubspec.lock '
- 'with versions matching your '
- 'pubspec.yaml.');
+ log.message(
+ '\nNo pubspec.lock found. There are no Current versions.\n'
+ 'Run `$topLevelProgram pub get` to create a pubspec.lock '
+ 'with versions matching your '
+ 'pubspec.yaml.',
+ );
}
if (notAtResolvable != 0) {
if (notAtResolvable == 1) {
- log.message('\n1 dependency is constrained to a '
- 'version that is older than a resolvable version.\n'
- 'To update it, ${mode.upgradeConstrained}.');
+ log.message(
+ '\n1 dependency is constrained to a '
+ 'version that is older than a resolvable version.\n'
+ 'To update it, ${mode.upgradeConstrained}.',
+ );
} else {
- log.message('\n$notAtResolvable dependencies are constrained to '
- 'versions that are older than a resolvable version.\n'
- 'To update these dependencies, ${mode.upgradeConstrained}.');
+ log.message(
+ '\n$notAtResolvable dependencies are constrained to '
+ 'versions that are older than a resolvable version.\n'
+ 'To update these dependencies, ${mode.upgradeConstrained}.',
+ );
}
}
List<Advisory> advisoriesWithAffectedVersions(_PackageDetails package) {
return package.advisories
.where(
- (advisory) => advisory.affectedVersions
- .intersection(
- [
- package.current,
- package.upgradable,
- package.resolvable,
- package.latest,
- ].map((e) => e?._pubspec.version.canonicalizedVersion).toSet(),
- )
- .isNotEmpty,
+ (advisory) =>
+ advisory.affectedVersions
+ .intersection(
+ [
+ package.current,
+ package.upgradable,
+ package.resolvable,
+ package.latest,
+ ]
+ .map((e) => e?._pubspec.version.canonicalizedVersion)
+ .toSet(),
+ )
+ .isNotEmpty,
)
.toList();
}
@@ -721,9 +742,10 @@
for (var package in rows.where(displayExtraInfo)) {
log.message(log.bold(package.name));
if (package.isDiscontinued) {
- final replacedByText = package.discontinuedReplacedBy != null
- ? ', replaced by ${package.discontinuedReplacedBy}.'
- : '.';
+ final replacedByText =
+ package.discontinuedReplacedBy != null
+ ? ', replaced by ${package.discontinuedReplacedBy}.'
+ : '.';
log.message(
' Package ${package.name} has been discontinued$replacedByText '
'See https://dart.dev/go/package-discontinue',
@@ -737,9 +759,10 @@
}
final displayedAdvisories = advisoriesToDisplay[package.name]!;
if (displayedAdvisories.isNotEmpty) {
- final advisoriesText = displayedAdvisories.length > 1
- ? 'security advisories'
- : 'a security advisory';
+ final advisoriesText =
+ displayedAdvisories.length > 1
+ ? 'security advisories'
+ : 'a security advisory';
log.message(
' Package ${package.name} is affected by $advisoriesText. '
'See https://dart.dev//go/pub-security-advisories',
@@ -800,7 +823,8 @@
'''No resolution was found. Try running `$topLevelProgram pub upgrade --dry-run` to explore why.''';
@override
- String get upgradeConstrained => 'edit pubspec.yaml, or run '
+ String get upgradeConstrained =>
+ 'edit pubspec.yaml, or run '
'`$topLevelProgram pub upgrade --major-versions`';
@override
@@ -837,13 +861,14 @@
}
}
final advisories = packageDetails.advisories;
- final hasAdvisory = advisories
- .where(
- (advisory) => advisory.affectedVersions.contains(
- versionDetails._pubspec.version.canonicalizedVersion,
- ),
- )
- .isNotEmpty;
+ final hasAdvisory =
+ advisories
+ .where(
+ (advisory) => advisory.affectedVersions.contains(
+ versionDetails._pubspec.version.canonicalizedVersion,
+ ),
+ )
+ .isNotEmpty;
if (hasAdvisory) {
suffix = '${suffix ?? ''} (advisory)';
}
@@ -900,9 +925,9 @@
}
Map<String, Object> toJson() => {
- 'version': _pubspec.version.toString(),
- if (_overridden) 'overridden': true,
- };
+ 'version': _pubspec.version.toString(),
+ if (_overridden) 'overridden': true,
+ };
@override
bool operator ==(Object other) =>
@@ -1024,18 +1049,18 @@
String? prefix = '',
String? suffix = '',
MapEntry<String, Object>? jsonExplanation,
- }) : _format = format,
- _prefix = prefix,
- _suffix = suffix,
- _jsonExplanation = jsonExplanation;
+ }) : _format = format,
+ _prefix = prefix,
+ _suffix = suffix,
+ _jsonExplanation = jsonExplanation;
@override
FormattedString toHuman() => FormattedString(
- _versionDetails?.describe ?? '-',
- format: _format,
- prefix: _prefix,
- suffix: _suffix,
- );
+ _versionDetails?.describe ?? '-',
+ format: _format,
+ prefix: _prefix,
+ suffix: _suffix,
+ );
@override
Object? toJson() {
@@ -1057,15 +1082,17 @@
/// Whether the package [name] is depended on directly anywhere in the workspace
/// rooted at [workspaceRoot].
bool hasDependency(Package workspaceRoot, String name) {
- return workspaceRoot.transitiveWorkspace
- .any((p) => p.dependencies.containsKey(name));
+ return workspaceRoot.transitiveWorkspace.any(
+ (p) => p.dependencies.containsKey(name),
+ );
}
/// Whether the package [name] is dev-depended on directly anywhere in the
/// workspace rooted at [workspaceRoot].
bool hasDevDependency(Package workspaceRoot, String name) {
- return workspaceRoot.transitiveWorkspace
- .any((p) => p.devDependencies.containsKey(name));
+ return workspaceRoot.transitiveWorkspace.any(
+ (p) => p.devDependencies.containsKey(name),
+ );
}
Iterable<PackageRange> allDependencies(Package workspaceRoot) =>
diff --git a/lib/src/command/remove.dart b/lib/src/command/remove.dart
index a1425c9..32e7590 100644
--- a/lib/src/command/remove.dart
+++ b/lib/src/command/remove.dart
@@ -90,7 +90,9 @@
final workPubspec = entrypoint.workPackage.pubspec;
final newPubspec = _removePackagesFromPubspec(workPubspec, targets);
- await entrypoint.withWorkPubspec(newPubspec).acquireDependencies(
+ await entrypoint
+ .withWorkPubspec(newPubspec)
+ .acquireDependencies(
SolveType.get,
precompile: !isDryRun && argResults.flag('precompile'),
dryRun: isDryRun,
@@ -133,21 +135,24 @@
void _writeRemovalToPubspec(Iterable<_PackageRemoval> packages) {
ArgumentError.checkNotNull(packages, 'packages');
- final yamlEditor =
- YamlEditor(readTextFile(entrypoint.workPackage.pubspecPath));
+ final yamlEditor = YamlEditor(
+ readTextFile(entrypoint.workPackage.pubspecPath),
+ );
for (final package in packages) {
- final dependencyKeys = package.removeFromOverride
- ? ['dependency_overrides']
- : ['dependencies', 'dev_dependencies'];
+ final dependencyKeys =
+ package.removeFromOverride
+ ? ['dependency_overrides']
+ : ['dependencies', 'dev_dependencies'];
var found = false;
final name = package.name;
/// There may be packages where the dependency is declared both in
/// dependencies and dev_dependencies - remove it from both in that case.
for (final dependencyKey in dependencyKeys) {
- final dependenciesNode = yamlEditor
- .parseAt([dependencyKey], orElse: () => YamlScalar.wrap(null));
+ final dependenciesNode = yamlEditor.parseAt([
+ dependencyKey,
+ ], orElse: () => YamlScalar.wrap(null));
if (dependenciesNode is YamlMap && dependenciesNode.containsKey(name)) {
yamlEditor.remove([dependencyKey, name]);
@@ -161,9 +166,7 @@
}
if (!found) {
final pubspecPath = entrypoint.workPackage.pubspecPath;
- log.warning(
- 'Package "$name" was not found in $pubspecPath!',
- );
+ log.warning('Package "$name" was not found in $pubspecPath!');
}
}
diff --git a/lib/src/command/run.dart b/lib/src/command/run.dart
index 3373ed7..55ca865 100644
--- a/lib/src/command/run.dart
+++ b/lib/src/command/run.dart
@@ -30,15 +30,13 @@
final bool deprecated;
final bool alwaysUseSubprocess;
- RunCommand({
- this.deprecated = false,
- this.alwaysUseSubprocess = false,
- }) {
+ RunCommand({this.deprecated = false, this.alwaysUseSubprocess = false}) {
argParser.addFlag('enable-asserts', help: 'Enable assert statements.');
argParser.addFlag('checked', abbr: 'c', hide: true);
argParser.addMultiOption(
'enable-experiment',
- help: 'Runs the executable in a VM with the given experiments enabled.\n'
+ help:
+ 'Runs the executable in a VM with the given experiments enabled.\n'
'(Will disable snapshotting, resulting in slower startup).',
valueHelp: 'experiment',
);
@@ -105,9 +103,10 @@
args,
enableAsserts:
argResults.flag('enable-asserts') || argResults.flag('checked'),
- recompile: (executable) => log.errorsOnlyUnlessTerminal(
- () => entrypoint.precompileExecutable(executable),
- ),
+ recompile:
+ (executable) => log.errorsOnlyUnlessTerminal(
+ () => entrypoint.precompileExecutable(executable),
+ ),
vmArgs: vmArgs,
alwaysUseSubprocess: alwaysUseSubprocess,
);
diff --git a/lib/src/command/token_add.dart b/lib/src/command/token_add.dart
index 0d4a2c1..9359e81 100644
--- a/lib/src/command/token_add.dart
+++ b/lib/src/command/token_add.dart
@@ -42,7 +42,8 @@
TokenAddCommand() {
argParser.addOption(
'env-var',
- help: 'Read the secret token from this environment variable when '
+ help:
+ 'Read the secret token from this environment variable when '
'making requests.',
valueHelp: 'VARIABLE',
);
@@ -61,11 +62,16 @@
try {
final hostedUrl = validateAndNormalizeHostedUrl(rawHostedUrl);
- final isLocalhost =
- ['localhost', '127.0.0.1', '::1'].contains(hostedUrl.host);
+ final isLocalhost = [
+ 'localhost',
+ '127.0.0.1',
+ '::1',
+ ].contains(hostedUrl.host);
if (!hostedUrl.isScheme('HTTPS') && !isLocalhost) {
- throw const FormatException('url must be https://, '
- 'insecure repositories cannot use authentication.');
+ throw const FormatException(
+ 'url must be https://, '
+ 'insecure repositories cannot use authentication.',
+ );
}
if (envVar == null) {
@@ -74,8 +80,10 @@
await _addEnvVarToken(hostedUrl, envVar!);
}
} on FormatException catch (e) {
- usageException('Invalid [hosted-url]: "$rawHostedUrl"\n'
- '${e.message}');
+ usageException(
+ 'Invalid [hosted-url]: "$rawHostedUrl"\n'
+ '${e.message}',
+ );
}
}
@@ -86,8 +94,10 @@
}
if (!Credential.isValidBearerToken(token)) {
- dataError('The entered token is not a valid Bearer token. '
- 'A token may only contain `a-zA-Z0-9._~+/=-`');
+ dataError(
+ 'The entered token is not a valid Bearer token. '
+ 'A token may only contain `a-zA-Z0-9._~+/=-`',
+ );
}
tokenStore.addCredential(Credential.token(hostedUrl, token));
diff --git a/lib/src/command/token_remove.dart b/lib/src/command/token_remove.dart
index 101214e..2e2db16 100644
--- a/lib/src/command/token_remove.dart
+++ b/lib/src/command/token_remove.dart
@@ -58,8 +58,10 @@
);
}
} on FormatException catch (e) {
- usageException('Invalid [hosted-url]: "${argResults.rest.first}"\n'
- '${e.message}');
+ usageException(
+ 'Invalid [hosted-url]: "${argResults.rest.first}"\n'
+ '${e.message}',
+ );
}
}
}
diff --git a/lib/src/command/unpack.dart b/lib/src/command/unpack.dart
index 3ec9ddf..21d096b 100644
--- a/lib/src/command/unpack.dart
+++ b/lib/src/command/unpack.dart
@@ -105,8 +105,11 @@
if (parseResult.description is! HostedDescription) {
fail('Can only fetch hosted packages.');
}
- final versions = await parseResult.source
- .doGetVersions(parseResult.toRef(), null, cache);
+ final versions = await parseResult.source.doGetVersions(
+ parseResult.toRef(),
+ null,
+ cache,
+ );
final constraint = parseResult.constraint;
versions.removeWhere((id) => !constraint.allows(id.version));
if (versions.isEmpty) {
@@ -145,21 +148,17 @@
);
final buffer = StringBuffer();
if (pubspec.resolution != Resolution.none) {
- log.message(
- '''
+ log.message('''
This package was developed as part of a workspace.
-Creating `pubspec_overrides.yaml` to resolve it alone.''',
- );
+Creating `pubspec_overrides.yaml` to resolve it alone.''');
buffer.writeln('resolution:');
}
if (pubspec.dependencyOverrides.isNotEmpty) {
- log.message(
- '''
+ log.message('''
This package was developed with dependency_overrides.
-Creating `pubspec_overrides.yaml` to resolve it without those overrides.''',
- );
+Creating `pubspec_overrides.yaml` to resolve it without those overrides.''');
buffer.writeln('dependency_overrides:');
}
if (buffer.isNotEmpty) {
@@ -168,29 +167,23 @@
buffer.toString(),
);
}
- final e = Entrypoint(
- destinationDir,
- cache,
- );
+ final e = Entrypoint(destinationDir, cache);
await e.acquireDependencies(SolveType.get);
} finally {
log.message('To explore type: cd $destinationDir');
final exampleDir = p.join(destinationDir, 'example');
if (dirExists(exampleDir)) {
- log.message(
- 'To explore the example type: cd $exampleDir',
- );
+ log.message('To explore the example type: cd $exampleDir');
}
}
}
}
- PackageRange _parseDescriptor(
- String packageName,
- String? descriptor,
- ) {
- late final defaultDescription =
- HostedDescription(packageName, cache.hosted.defaultUrl);
+ PackageRange _parseDescriptor(String packageName, String? descriptor) {
+ late final defaultDescription = HostedDescription(
+ packageName,
+ cache.hosted.defaultUrl,
+ );
if (descriptor == null) {
return PackageRange(
PackageRef(packageName, defaultDescription),
diff --git a/lib/src/command/upgrade.dart b/lib/src/command/upgrade.dart
index edd7eb6..5626dde 100644
--- a/lib/src/command/upgrade.dart
+++ b/lib/src/command/upgrade.dart
@@ -72,14 +72,16 @@
argParser.addFlag(
'unlock-transitive',
- help: 'Also upgrades the transitive dependencies '
+ help:
+ 'Also upgrades the transitive dependencies '
'of the listed [dependencies]',
negatable: false,
);
argParser.addFlag(
'major-versions',
- help: 'Upgrades packages to their latest resolvable versions, '
+ help:
+ 'Upgrades packages to their latest resolvable versions, '
'and updates pubspec.yaml.',
negatable: false,
);
@@ -164,8 +166,9 @@
'Running `upgrade --tighten` only in `${entrypoint.workspaceRoot.dir}`. Run `$topLevelProgram pub upgrade --tighten --directory example/` separately.',
);
}
- final changes =
- entrypoint.tighten(packagesToUpgrade: await _packagesToUpgrade);
+ final changes = entrypoint.tighten(
+ packagesToUpgrade: await _packagesToUpgrade,
+ );
entrypoint.applyChanges(changes, _dryRun);
}
}
@@ -197,12 +200,14 @@
Future<List<String>> _directDependenciesToUpgrade() async {
assert(_upgradeMajorVersions);
- final directDeps = {
- for (final package in entrypoint.workspaceRoot.transitiveWorkspace) ...[
- ...package.dependencies.keys,
- ...package.devDependencies.keys,
- ],
- }.toList();
+ final directDeps =
+ {
+ for (final package
+ in entrypoint.workspaceRoot.transitiveWorkspace) ...[
+ ...package.dependencies.keys,
+ ...package.devDependencies.keys,
+ ],
+ }.toList();
final packagesToUpgrade = await _packagesToUpgrade;
final toUpgrade =
packagesToUpgrade.isEmpty ? directDeps : packagesToUpgrade;
@@ -226,19 +231,15 @@
// Solve [resolvablePubspec] in-memory and consolidate the resolved
// versions of the packages into a map for quick searching.
final resolvedPackages = <String, PackageId>{};
- final solveResult = await log.spinner(
- 'Resolving dependencies',
- () async {
- return await resolveVersions(
- SolveType.upgrade,
- cache,
- entrypoint.workspaceRoot.transformWorkspace(
- (package) => stripVersionBounds(package.pubspec),
- ),
- );
- },
- condition: _shouldShowSpinner,
- );
+ final solveResult = await log.spinner('Resolving dependencies', () async {
+ return await resolveVersions(
+ SolveType.upgrade,
+ cache,
+ entrypoint.workspaceRoot.transformWorkspace(
+ (package) => stripVersionBounds(package.pubspec),
+ ),
+ );
+ }, condition: _shouldShowSpinner);
for (final resolvedPackage in solveResult.packages) {
resolvedPackages[resolvedPackage.name] = resolvedPackage;
}
@@ -260,8 +261,9 @@
}
// Skip [dep] if it has a dependency_override.
- if (entrypoint.workspaceRoot.pubspec.dependencyOverrides
- .containsKey(dep.name)) {
+ if (entrypoint.workspaceRoot.pubspec.dependencyOverrides.containsKey(
+ dep.name,
+ )) {
dependencyOverriddenDeps.add(dep.name);
continue;
}
@@ -273,8 +275,8 @@
}
(changes[package] ??= {})[dep] = dep.toRef().withConstraint(
- VersionConstraint.compatibleWith(resolvedPackage.version),
- );
+ VersionConstraint.compatibleWith(resolvedPackage.version),
+ );
}
}
@@ -306,21 +308,24 @@
(await _packagesToUpgrade).isEmpty ? SolveType.upgrade : SolveType.get;
entrypoint.applyChanges(changes, _dryRun);
- await entrypoint.withUpdatedRootPubspecs({
- for (final MapEntry(key: package, value: changesForPackage)
- in changes.entries)
- package: applyChanges(package.pubspec, changesForPackage),
- }).acquireDependencies(
- solveType,
- dryRun: _dryRun,
- precompile: !_dryRun && _precompile,
- unlock: await _packagesToUpgrade,
- );
+ await entrypoint
+ .withUpdatedRootPubspecs({
+ for (final MapEntry(key: package, value: changesForPackage)
+ in changes.entries)
+ package: applyChanges(package.pubspec, changesForPackage),
+ })
+ .acquireDependencies(
+ solveType,
+ dryRun: _dryRun,
+ precompile: !_dryRun && _precompile,
+ unlock: await _packagesToUpgrade,
+ );
// If any of the packages to upgrade are dependency overrides, then we
// show a warning.
- final toUpgradeOverrides = toUpgrade
- .where(entrypoint.workspaceRoot.allOverridesInWorkspace.containsKey);
+ final toUpgradeOverrides = toUpgrade.where(
+ entrypoint.workspaceRoot.allOverridesInWorkspace.containsKey,
+ );
if (toUpgradeOverrides.isNotEmpty) {
log.warning(
'Warning: dependency_overrides prevents upgrades for: '
@@ -353,8 +358,10 @@
void _showOfflineWarning() {
if (isOffline) {
- log.warning('Warning: Upgrading when offline may not update you to the '
- 'latest versions of your dependencies.');
+ log.warning(
+ 'Warning: Upgrading when offline may not update you to the '
+ 'latest versions of your dependencies.',
+ );
}
}
}
diff --git a/lib/src/command/uploader.dart b/lib/src/command/uploader.dart
index 86fc2c9..d163c0b 100644
--- a/lib/src/command/uploader.dart
+++ b/lib/src/command/uploader.dart
@@ -34,7 +34,8 @@
);
argParser.addOption(
'package',
- help: 'The package whose uploaders will be modified.\n'
+ help:
+ 'The package whose uploaders will be modified.\n'
'(defaults to the current package)',
);
argParser.addOption(
diff --git a/lib/src/command/workspace_list.dart b/lib/src/command/workspace_list.dart
index b48716b..7d111e9 100644
--- a/lib/src/command/workspace_list.dart
+++ b/lib/src/command/workspace_list.dart
@@ -42,20 +42,17 @@
}),
);
} else {
- for (final line in renderTable(
- [
- [format('Package', bold), format('Path', bold)],
- for (final package in entrypoint.workspaceRoot.transitiveWorkspace)
- [
- format(package.name, (x) => x),
- format(
- '${p.relative(p.absolute(package.dir))}${p.separator}',
- (x) => x,
- ),
- ],
- ],
- canUseAnsiCodes,
- )) {
+ for (final line in renderTable([
+ [format('Package', bold), format('Path', bold)],
+ for (final package in entrypoint.workspaceRoot.transitiveWorkspace)
+ [
+ format(package.name, (x) => x),
+ format(
+ '${p.relative(p.absolute(package.dir))}${p.separator}',
+ (x) => x,
+ ),
+ ],
+ ], canUseAnsiCodes)) {
message(line);
}
}
diff --git a/lib/src/command_runner.dart b/lib/src/command_runner.dart
index 22d4ade..9ced67e 100644
--- a/lib/src/command_runner.dart
+++ b/lib/src/command_runner.dart
@@ -41,8 +41,8 @@
/// 'flutter' if we are running inside `flutter pub` 'dart' otherwise.
String topLevelProgram = _isRunningInsideFlutter ? 'flutter' : 'dart';
-bool _isRunningInsideFlutter =
- (Platform.environment['PUB_ENVIRONMENT'] ?? '').contains('flutter_cli');
+bool _isRunningInsideFlutter = (Platform.environment['PUB_ENVIRONMENT'] ?? '')
+ .contains('flutter_cli');
class PubCommandRunner extends CommandRunner<int> implements PubTopLevel {
@override
@@ -100,11 +100,11 @@
'See https://dart.dev/tools/pub/cmd for detailed documentation.';
PubCommandRunner()
- : super(
- 'pub',
- 'Pub is a package manager for Dart.',
- usageLineLength: lineLength,
- ) {
+ : super(
+ 'pub',
+ 'Pub is a package manager for Dart.',
+ usageLineLength: lineLength,
+ ) {
argParser.addFlag('version', negatable: false, help: 'Print pub version.');
argParser.addFlag(
'trace',
@@ -214,9 +214,11 @@
}
if (depsRev == actualRev) return;
- log.warning("${log.yellow('Warning:')} the revision of pub in DEPS is "
- '${log.bold(depsRev.toString())},\n'
- 'but ${log.bold(actualRev)} is checked out in '
- '${p.relative(pubRoot)}.\n\n');
+ log.warning(
+ "${log.yellow('Warning:')} the revision of pub in DEPS is "
+ '${log.bold(depsRev.toString())},\n'
+ 'but ${log.bold(actualRev)} is checked out in '
+ '${p.relative(pubRoot)}.\n\n',
+ );
}
}
diff --git a/lib/src/dart.dart b/lib/src/dart.dart
index b72f7d2..c79d8b7 100644
--- a/lib/src/dart.dart
+++ b/lib/src/dart.dart
@@ -27,14 +27,15 @@
final AnalysisSession _session;
factory AnalysisContextManager(String packagePath) => sessions.putIfAbsent(
- packagePath,
- () => AnalysisContextManager._(packagePath),
- );
+ packagePath,
+ () => AnalysisContextManager._(packagePath),
+ );
AnalysisContextManager._(this.packagePath)
- : _session = AnalysisContextCollection(
- includedPaths: [packagePath],
- ).contextFor(packagePath).currentSession;
+ : _session =
+ AnalysisContextCollection(
+ includedPaths: [packagePath],
+ ).contextFor(packagePath).currentSession;
/// Parse the file with the given [path] into AST.
///
@@ -123,8 +124,10 @@
tempDir = createTempDir(p.dirname(outputPath), 'tmp');
// To avoid potential races we copy the incremental data to a temporary file
// for just this compilation.
- final temporaryIncrementalDill =
- p.join(tempDir, '${p.basename(incrementalDillPath)}.temp');
+ final temporaryIncrementalDill = p.join(
+ tempDir,
+ '${p.basename(incrementalDillPath)}.temp',
+ );
try {
if (fileExists(outputPath)) {
copyFile(outputPath, temporaryIncrementalDill);
diff --git a/lib/src/entrypoint.dart b/lib/src/entrypoint.dart
index 61d564a..36719a1 100644
--- a/lib/src/entrypoint.dart
+++ b/lib/src/entrypoint.dart
@@ -108,19 +108,16 @@
if (pubspec.resolution == Resolution.none) {
root = Package.load(
dir,
- loadPubspec: (
- path, {
- expectedName,
- required withPubspecOverrides,
- }) =>
- pubspecsMet[p.canonicalize(path)] ??
- Pubspec.load(
- path,
- cache.sources,
- expectedName: expectedName,
- allowOverridesFile: withPubspecOverrides,
- containingDescription: RootDescription(path),
- ),
+ loadPubspec:
+ (path, {expectedName, required withPubspecOverrides}) =>
+ pubspecsMet[p.canonicalize(path)] ??
+ Pubspec.load(
+ path,
+ cache.sources,
+ expectedName: expectedName,
+ allowOverridesFile: withPubspecOverrides,
+ containingDescription: RootDescription(path),
+ ),
withPubspecOverrides: true,
);
for (final package in root.transitiveWorkspace) {
@@ -218,7 +215,8 @@
e.message,
e.span,
explanation: 'Failed parsing lock file:',
- hint: 'Consider deleting the file and running '
+ hint:
+ 'Consider deleting the file and running '
'`$topLevelProgram pub get` to recreate it.',
);
}
@@ -237,8 +235,10 @@
static PackageConfig _loadPackageConfig(String packageConfigPath) {
Never badPackageConfig() {
- dataError('The "$packageConfigPath" file is not recognized by '
- '"pub" version, please run "$topLevelProgram pub get".');
+ dataError(
+ 'The "$packageConfigPath" file is not recognized by '
+ '"pub" version, please run "$topLevelProgram pub get".',
+ );
}
final String packageConfigRaw;
@@ -252,9 +252,7 @@
}
final PackageConfig result;
try {
- result = PackageConfig.fromJson(
- json.decode(packageConfigRaw) as Object?,
- );
+ result = PackageConfig.fromJson(json.decode(packageConfigRaw) as Object?);
} on FormatException {
badPackageConfig();
}
@@ -312,11 +310,11 @@
/// The path to the directory containing dependency executable snapshots.
String get _snapshotPath => p.join(
- isCachedGlobal
- ? workspaceRoot.dir
- : p.join(workspaceRoot.dir, '.dart_tool/pub'),
- 'bin',
- );
+ isCachedGlobal
+ ? workspaceRoot.dir
+ : p.join(workspaceRoot.dir, '.dart_tool/pub'),
+ 'bin',
+ );
Entrypoint._(
this.workingDir,
@@ -333,11 +331,8 @@
/// If [checkInCache] is `true` (the default) an error will be thrown if
/// [workingDir] is located inside [cache]`.rootDir`.
- Entrypoint(
- this.workingDir,
- this.cache, {
- bool checkInCache = true,
- }) : isCachedGlobal = false {
+ Entrypoint(this.workingDir, this.cache, {bool checkInCache = true})
+ : isCachedGlobal = false {
if (checkInCache && p.isWithin(cache.rootDir, workingDir)) {
fail('Cannot operate on packages inside the cache.');
}
@@ -349,17 +344,13 @@
final newWorkspaceRoot = workspaceRoot.transformWorkspace(
(package) => updatedPubspecs[package] ?? package.pubspec,
);
- final newWorkPackage = newWorkspaceRoot.transitiveWorkspace
- .firstWhere((package) => package.dir == workPackage.dir);
- return Entrypoint._(
- workingDir,
- _lockFile,
- _example,
- _packageGraph,
- cache,
- (root: newWorkspaceRoot, work: newWorkPackage),
- isCachedGlobal,
+ final newWorkPackage = newWorkspaceRoot.transitiveWorkspace.firstWhere(
+ (package) => package.dir == workPackage.dir,
);
+ return Entrypoint._(workingDir, _lockFile, _example, _packageGraph, cache, (
+ root: newWorkspaceRoot,
+ work: newWorkPackage,
+ ), isCachedGlobal);
}
/// Creates an entrypoint at the same location, that will use [pubspec] for
@@ -375,12 +366,13 @@
this._lockFile,
this.cache, {
SolveResult? solveResult,
- }) : _packages = (root: package, work: package),
- workingDir = package.dir,
- isCachedGlobal = true {
+ }) : _packages = (root: package, work: package),
+ workingDir = package.dir,
+ isCachedGlobal = true {
if (solveResult != null) {
- _packageGraph =
- Future.value(PackageGraph.fromSolveResult(this, solveResult));
+ _packageGraph = Future.value(
+ PackageGraph.fromSolveResult(this, solveResult),
+ );
}
}
@@ -411,8 +403,11 @@
packageConfigPath,
await _packageConfigFile(
cache,
- entrypointSdkConstraint: workspaceRoot
- .pubspec.sdkConstraints[sdk.identifier]?.effectiveConstraint,
+ entrypointSdkConstraint:
+ workspaceRoot
+ .pubspec
+ .sdkConstraints[sdk.identifier]
+ ?.effectiveConstraint,
),
);
writeTextFile(packageGraphPath, await _packageGraphFile(cache));
@@ -421,22 +416,22 @@
final workspaceRefDir = p.join(package.dir, '.dart_tool', 'pub');
final workspaceRefPath = p.join(workspaceRefDir, 'workspace_ref.json');
ensureDir(workspaceRefDir);
- final relativeRootPath =
- p.relative(workspaceRoot.dir, from: workspaceRefDir);
- writeTextFile(
- workspaceRefPath,
- '${const JsonEncoder.withIndent(' ').convert({
- 'workspaceRoot': relativeRootPath,
- })}\n',
+ final relativeRootPath = p.relative(
+ workspaceRoot.dir,
+ from: workspaceRefDir,
);
+ final workspaceRef = const JsonEncoder.withIndent(
+ ' ',
+ ).convert({'workspaceRoot': relativeRootPath});
+ writeTextFile(workspaceRefPath, '$workspaceRef\n');
}
}
}
Future<String> _packageGraphFile(SystemCache cache) async {
return const JsonEncoder.withIndent(' ').convert({
- 'roots': workspaceRoot.transitiveWorkspace.map((p) => p.name).toList()
- ..sort(),
+ 'roots':
+ workspaceRoot.transitiveWorkspace.map((p) => p.name).toList()..sort(),
'packages': [
for (final p in workspaceRoot.transitiveWorkspace)
{
@@ -449,8 +444,8 @@
{
'name': p.name,
'version': p.version.toString(),
- 'dependencies': (await cache.describe(p)).dependencies.keys.toList()
- ..sort(),
+ 'dependencies':
+ (await cache.describe(p)).dependencies.keys.toList()..sort(),
},
],
'configVersion': 1,
@@ -519,8 +514,9 @@
},
);
- final jsonText =
- const JsonEncoder.withIndent(' ').convert(packageConfig.toJson());
+ final jsonText = const JsonEncoder.withIndent(
+ ' ',
+ ).convert(packageConfig.toJson());
return '$jsonText\n';
}
@@ -557,9 +553,10 @@
}) async {
workspaceRoot; // This will throw early if pubspec.yaml could not be found.
summaryOnly = summaryOnly || _summaryOnlyEnvironment;
- final suffix = workspaceRoot.dir == '.'
- ? ''
- : ' in `${workspaceRoot.presentationDir}`';
+ final suffix =
+ workspaceRoot.dir == '.'
+ ? ''
+ : ' in `${workspaceRoot.presentationDir}`';
if (enforceLockfile && !fileExists(lockFilePath)) {
throw ApplicationException('''
@@ -661,11 +658,13 @@
/// the package itself if they are immutable.
Future<List<Executable>> get _builtExecutables async {
final graph = await packageGraph;
- final r = workspaceRoot.immediateDependencies.keys.expand((packageName) {
- final package = graph.packages[packageName]!;
- return package.executablePaths
- .map((path) => Executable(packageName, path));
- }).toList();
+ final r =
+ workspaceRoot.immediateDependencies.keys.expand((packageName) {
+ final package = graph.packages[packageName]!;
+ return package.executablePaths.map(
+ (path) => Executable(packageName, path),
+ );
+ }).toList();
return r;
}
@@ -831,8 +830,9 @@
final pubspecPath = p.normalize(p.join(dir, 'pubspec.yaml'));
log.fine(
- 'The $pubspecPath file has changed since the $lockFilePath file '
- 'was generated.');
+ 'The $pubspecPath file has changed since the $lockFilePath file '
+ 'was generated.',
+ );
return false;
}
@@ -843,7 +843,11 @@
if (source is CachedSource) continue;
try {
- if (cache.load(id).dependencies.values.every(
+ if (cache
+ .load(id)
+ .dependencies
+ .values
+ .every(
(dep) =>
root.allOverridesInWorkspace.containsKey(dep.name) ||
isDependencyUpToDate(dep),
@@ -854,10 +858,14 @@
// If we can't load the pubspec, the user needs to re-run "pub get".
}
- final relativePubspecPath =
- p.join(cache.getDirectory(id, relativeFrom: '.'), 'pubspec.yaml');
- log.fine('$relativePubspecPath has '
- 'changed since the $lockFilePath file was generated.');
+ final relativePubspecPath = p.join(
+ cache.getDirectory(id, relativeFrom: '.'),
+ 'pubspec.yaml',
+ );
+ log.fine(
+ '$relativePubspecPath has '
+ 'changed since the $lockFilePath file was generated.',
+ );
return false;
}
return true;
@@ -885,10 +893,11 @@
// Check that [packagePathsMapping] does not contain more packages than
// what is required. This could lead to import statements working, when
// they are not supposed to work.
- final hasExtraMappings = !packagePathsMapping.keys.every((packageName) {
- return packageName == root.name ||
- lockFile.packages.containsKey(packageName);
- });
+ final hasExtraMappings =
+ !packagePathsMapping.keys.every((packageName) {
+ return packageName == root.name ||
+ lockFile.packages.containsKey(packageName);
+ });
if (hasExtraMappings) {
return false;
}
@@ -942,17 +951,21 @@
);
return false;
}
- packagePathsMapping[pkg.name] =
- root.path('.dart_tool', p.fromUri(pkg.rootUri));
+ packagePathsMapping[pkg.name] = root.path(
+ '.dart_tool',
+ p.fromUri(pkg.rootUri),
+ );
}
if (!isPackagePathsMappingUpToDateWithLockfile(
packagePathsMapping,
packageConfigPath: packageConfigPath,
lockFilePath: lockFilePath,
)) {
- log.fine('The $lockFilePath file has changed since the '
- '$packageConfigPath file '
- 'was generated, please run "$topLevelProgram pub get" again.');
+ log.fine(
+ 'The $lockFilePath file has changed since the '
+ '$packageConfigPath file '
+ 'was generated, please run "$topLevelProgram pub get" again.',
+ );
return false;
}
@@ -986,13 +999,17 @@
cache.getDirectory(id, relativeFrom: '.'),
'pubspec.yaml',
);
- log.fine('$relativePubspecPath has '
- 'changed since the $lockFilePath file was generated.');
+ log.fine(
+ '$relativePubspecPath has '
+ 'changed since the $lockFilePath file was generated.',
+ );
return false;
}
} on FileException {
- log.fine('Failed to read pubspec.yaml for "${pkg.name}", perhaps the '
- 'entry is missing.');
+ log.fine(
+ 'Failed to read pubspec.yaml for "${pkg.name}", perhaps the '
+ 'entry is missing.',
+ );
return false;
}
}
@@ -1041,8 +1058,9 @@
late final String packageConfigPath;
late final String rootDir;
for (final parent in parentDirs(dir)) {
- final potentialPackageConfigPath =
- p.normalize(p.join(parent, '.dart_tool', 'package_config.json'));
+ final potentialPackageConfigPath = p.normalize(
+ p.join(parent, '.dart_tool', 'package_config.json'),
+ );
packageConfigStat = tryStatFile(potentialPackageConfigPath);
if (packageConfigStat != null) {
@@ -1069,8 +1087,9 @@
return null;
} else {
try {
- if (jsonDecode(workspaceRefText)
- case {'workspaceRoot': final String path}) {
+ if (jsonDecode(workspaceRefText) case {
+ 'workspaceRoot': final String path,
+ }) {
final potentialPackageConfigPath2 = relativeIfNeeded(
p.normalize(
p.absolute(
@@ -1095,10 +1114,7 @@
rootDir = relativeIfNeeded(
p.normalize(
p.absolute(
- p.join(
- p.dirname(potentialWorkspaceRefPath),
- path,
- ),
+ p.join(p.dirname(potentialWorkspaceRefPath), path),
),
),
);
@@ -1113,9 +1129,7 @@
return null;
}
} on FormatException catch (e) {
- log.fine(
- '`$potentialWorkspaceRefPath` not valid json: $e.',
- );
+ log.fine('`$potentialWorkspaceRefPath` not valid json: $e.');
return null;
}
}
@@ -1151,9 +1165,10 @@
// sdk-packages, and therefore do a new resolution.
//
// This also counts if Flutter was introduced or removed.
- final flutterRoot = flutter.rootDirectory == null
- ? null
- : p.toUri(p.absolute(flutter.rootDirectory!)).toString();
+ final flutterRoot =
+ flutter.rootDirectory == null
+ ? null
+ : p.toUri(p.absolute(flutter.rootDirectory!)).toString();
if (packageConfig.additionalProperties['flutterRoot'] != flutterRoot) {
log.fine('Flutter has moved since last invocation.');
return null;
@@ -1216,8 +1231,10 @@
lockfileNewerThanPubspecs = false;
break;
}
- final pubspecOverridesPath =
- p.join(package.rootUri.path, 'pubspec_overrides.yaml');
+ final pubspecOverridesPath = p.join(
+ package.rootUri.path,
+ 'pubspec_overrides.yaml',
+ );
final pubspecOverridesStat = tryStatFile(pubspecOverridesPath);
if (pubspecOverridesStat != null) {
// This will wrongly require you to reresolve if a
@@ -1263,13 +1280,16 @@
return (packageConfig, rootDir);
}
- if (isResolutionUpToDate()
- case (final PackageConfig packageConfig, final String rootDir)) {
+ if (isResolutionUpToDate() case (
+ final PackageConfig packageConfig,
+ final String rootDir,
+ )) {
log.fine('Package Config up to date.');
return (packageConfig: packageConfig, rootDir: rootDir);
}
final entrypoint = Entrypoint(
- dir, cache,
+ dir,
+ cache,
// [ensureUpToDate] is also used for entries in 'global_packages/'
checkInCache: false,
);
@@ -1289,10 +1309,8 @@
return (
packageConfig: entrypoint.packageConfig,
rootDir: relativeIfNeeded(
- p.normalize(
- p.absolute(entrypoint.workspaceRoot.dir),
- ),
- )
+ p.normalize(p.absolute(entrypoint.workspaceRoot.dir)),
+ ),
);
}
@@ -1326,8 +1344,9 @@
See https://dart.dev/go/sdk-constraint
''');
}
- if (!LanguageVersion.fromSdkConstraint(dartSdkConstraint)
- .supportsNullSafety) {
+ if (!LanguageVersion.fromSdkConstraint(
+ dartSdkConstraint,
+ ).supportsNullSafety) {
throw DataException('''
The lower bound of "sdk: '$dartSdkConstraint'" must be 2.12.0'
or higher to enable null safety.
@@ -1340,11 +1359,12 @@
for (final sdk in pubspec.sdkConstraints.keys) {
if (!sdks.containsKey(sdk)) {
final environment = pubspec.fields.nodes['environment'] as YamlMap;
- final keyNode = environment.nodes.entries
- .firstWhere((e) => (e.key as YamlNode).value == sdk)
- .key as YamlNode;
- throw SourceSpanApplicationException(
- '''
+ final keyNode =
+ environment.nodes.entries
+ .firstWhere((e) => (e.key as YamlNode).value == sdk)
+ .key
+ as YamlNode;
+ throw SourceSpanApplicationException('''
$pubspecPath refers to an unknown sdk '$sdk'.
Did you mean to add it as a dependency?
@@ -1353,9 +1373,7 @@
given sdk.
See https://dart.dev/go/sdk-constraint
-''',
- keyNode.span,
- );
+''', keyNode.span);
}
}
}
@@ -1447,7 +1465,8 @@
}
} else {
for (final packageToUpgrade in packagesToUpgrade) {
- final range = package.dependencies[packageToUpgrade] ??
+ final range =
+ package.dependencies[packageToUpgrade] ??
package.devDependencies[packageToUpgrade];
if (range != null) {
toTighten.add((package, range));
@@ -1462,24 +1481,24 @@
final resolvedVersion =
(packageVersions?.firstWhere((p) => p.name == range.name) ??
- lockFile.packages[range.name])
- ?.version ??
- workspaceVersions[range.name]!;
+ lockFile.packages[range.name])
+ ?.version ??
+ workspaceVersions[range.name]!;
if (range.source is HostedSource && constraint.isAny) {
- changesForPackage[range] = range
- .toRef()
- .withConstraint(VersionConstraint.compatibleWith(resolvedVersion));
+ changesForPackage[range] = range.toRef().withConstraint(
+ VersionConstraint.compatibleWith(resolvedVersion),
+ );
} else if (constraint is VersionRange) {
final min = constraint.min;
if (min != null && min < resolvedVersion) {
changesForPackage[range] = range.toRef().withConstraint(
- VersionRange(
- min: resolvedVersion,
- max: constraint.max,
- includeMin: true,
- includeMax: constraint.includeMax,
- ).asCompatibleWithIfPossible(),
- );
+ VersionRange(
+ min: resolvedVersion,
+ max: constraint.max,
+ includeMin: true,
+ includeMax: constraint.includeMax,
+ ).asCompatibleWithIfPossible(),
+ );
}
}
}
@@ -1504,10 +1523,10 @@
for (final change in changesForPackage.values) {
final section =
deps.contains(change.name) ? 'dependencies' : 'dev_dependencies';
- yamlEditor.update(
- [section, change.name],
- pubspecDescription(change, cache, package),
- );
+ yamlEditor.update([
+ section,
+ change.name,
+ ], pubspecDescription(change, cache, package));
}
writeTextFile(package.pubspecPath, yamlEditor.toString());
}
@@ -1516,10 +1535,7 @@
}
/// Outputs a summary of [changeSet].
- void _outputChangeSummary(
- ChangeSet changeSet, {
- required bool dryRun,
- }) {
+ void _outputChangeSummary(ChangeSet changeSet, {required bool dryRun}) {
if (workspaceRoot.workspaceChildren.isEmpty) {
final changesToWorkspaceRoot = changeSet[workspaceRoot] ?? {};
if (changesToWorkspaceRoot.isEmpty) {
@@ -1527,10 +1543,14 @@
log.message('\nNo changes $wouldBe pubspec.yaml!');
} else {
final changed = dryRun ? 'Would change' : 'Changed';
- final constraints =
- pluralize('constraint', changesToWorkspaceRoot.length);
- log.message('\n$changed ${changesToWorkspaceRoot.length} '
- '$constraints in pubspec.yaml:');
+ final constraints = pluralize(
+ 'constraint',
+ changesToWorkspaceRoot.length,
+ );
+ log.message(
+ '\n$changed ${changesToWorkspaceRoot.length} '
+ '$constraints in pubspec.yaml:',
+ );
changesToWorkspaceRoot.forEach((from, to) {
log.message(' ${from.name}: ${from.constraint} -> ${to.constraint}');
});
@@ -1545,8 +1565,10 @@
if (changesToPackage.isEmpty) continue;
final changed = dryRun ? 'Would change' : 'Changed';
final constraints = pluralize('constraint', changesToPackage.length);
- log.message('\n$changed ${changesToPackage.length} '
- '$constraints in ${package.pubspecPath}:');
+ log.message(
+ '\n$changed ${changesToPackage.length} '
+ '$constraints in ${package.pubspecPath}:',
+ );
changesToPackage.forEach((from, to) {
log.message(' ${from.name}: ${from.constraint} -> ${to.constraint}');
});
diff --git a/lib/src/error_group.dart b/lib/src/error_group.dart
index 78bb49b..6e46938 100644
--- a/lib/src/error_group.dart
+++ b/lib/src/error_group.dart
@@ -67,8 +67,10 @@
/// error, it's a [StateError] to try to register a new [Future].
Future<T> registerFuture<T>(Future<T> future) {
if (_isDone) {
- throw StateError("Can't register new members on a complete "
- 'ErrorGroup.');
+ throw StateError(
+ "Can't register new members on a complete "
+ 'ErrorGroup.',
+ );
}
final wrapped = _ErrorGroupFuture(this, future);
@@ -90,8 +92,10 @@
/// error, it's a [StateError] to try to register a new [Stream].
Stream<T> registerStream<T>(Stream<T> stream) {
if (_isDone) {
- throw StateError("Can't register new members on a complete "
- 'ErrorGroup.');
+ throw StateError(
+ "Can't register new members on a complete "
+ 'ErrorGroup.',
+ );
}
final wrapped = _ErrorGroupStream(this, stream);
@@ -146,7 +150,8 @@
void _signalFutureComplete(_ErrorGroupFuture future) {
if (_isDone) return;
- _isDone = _futures.every((future) => future._isDone) &&
+ _isDone =
+ _futures.every((future) => future._isDone) &&
_streams.every((stream) => stream._isDone);
if (_isDone) _doneCompleter.complete();
}
@@ -155,7 +160,8 @@
void _signalStreamComplete(_ErrorGroupStream stream) {
if (_isDone) return;
- _isDone = _futures.every((future) => future._isDone) &&
+ _isDone =
+ _futures.every((future) => future._isDone) &&
_streams.every((stream) => stream._isDone);
if (_isDone) _doneCompleter.complete();
}
@@ -182,13 +188,15 @@
/// Creates a new [_ErrorGroupFuture] that's a child of [_group] and wraps
/// [inner].
_ErrorGroupFuture(this._group, Future<T> inner) {
- inner.then((value) {
- if (!_isDone) _completer.complete(value);
- _isDone = true;
- _group._signalFutureComplete(this);
- }).catchError((Object e, [StackTrace? s]) async {
- _group._signalError(e, s);
- });
+ inner
+ .then((value) {
+ if (!_isDone) _completer.complete(value);
+ _isDone = true;
+ _group._signalFutureComplete(this);
+ })
+ .catchError((Object e, [StackTrace? s]) async {
+ _group._signalError(e, s);
+ });
// Make sure _completer.future doesn't automatically send errors to the
// top-level.
@@ -275,12 +283,15 @@
/// Creates a new [_ErrorGroupFuture] that's a child of [_group] and wraps
/// [inner].
_ErrorGroupStream(this._group, Stream<T> inner)
- : _controller = StreamController(sync: true) {
+ : _controller = StreamController(sync: true) {
// Use old-style asBroadcastStream behavior - cancel source _subscription
// the first time the stream has no listeners.
- _stream = inner.isBroadcast
- ? _controller.stream.asBroadcastStream(onCancel: (sub) => sub.cancel())
- : _controller.stream;
+ _stream =
+ inner.isBroadcast
+ ? _controller.stream.asBroadcastStream(
+ onCancel: (sub) => sub.cancel(),
+ )
+ : _controller.stream;
_subscription = inner.listen(
_controller.add,
onError: _group._signalError,
diff --git a/lib/src/exceptions.dart b/lib/src/exceptions.dart
index 0385d16..375eab1 100644
--- a/lib/src/exceptions.dart
+++ b/lib/src/exceptions.dart
@@ -59,7 +59,7 @@
final Chain? innerChain;
WrappedException(super.message, this.innerError, [StackTrace? innerTrace])
- : innerChain = innerTrace == null ? null : Chain.forTrace(innerTrace);
+ : innerChain = innerTrace == null ? null : Chain.forTrace(innerTrace);
}
/// A class for exceptions that shouldn't be printed at the top level.
@@ -68,7 +68,7 @@
/// [log.exception].
class SilentException extends WrappedException {
SilentException(Object? innerError, [StackTrace? innerTrace])
- : super(innerError.toString(), innerError, innerTrace);
+ : super(innerError.toString(), innerError, innerTrace);
}
/// A class for errors in a command's input data.
diff --git a/lib/src/executable.dart b/lib/src/executable.dart
index 654b90e..ee096d5 100644
--- a/lib/src/executable.dart
+++ b/lib/src/executable.dart
@@ -59,11 +59,15 @@
if (entrypoint.workPackage.name != executable.package &&
!entrypoint.workPackage.immediateDependencies.containsKey(package)) {
if ((await entrypoint.packageGraph).packages.containsKey(package)) {
- dataError('Package "$package" is not an immediate dependency.\n'
- 'Cannot run executables in transitive dependencies.');
+ dataError(
+ 'Package "$package" is not an immediate dependency.\n'
+ 'Cannot run executables in transitive dependencies.',
+ );
} else {
- dataError('Could not find package "$package". Did you forget to add a '
- 'dependency?');
+ dataError(
+ 'Could not find package "$package". Did you forget to add a '
+ 'dependency?',
+ );
}
}
@@ -189,17 +193,13 @@
// semantics without `fork` for starting the subprocess.
// https://github.com/dart-lang/sdk/issues/41966.
final subscription = ProcessSignal.sigint.watch().listen((e) {});
- final process = await Process.start(
- Platform.resolvedExecutable,
- [
- '--packages=$packageConfig',
- ...vmArgs,
- if (enableAsserts) '--enable-asserts',
- p.toUri(path).toString(),
- ...args,
- ],
- mode: ProcessStartMode.inheritStdio,
- );
+ final process = await Process.start(Platform.resolvedExecutable, [
+ '--packages=$packageConfig',
+ ...vmArgs,
+ if (enableAsserts) '--enable-asserts',
+ p.toUri(path).toString(),
+ ...args,
+ ], mode: ProcessStartMode.inheritStdio);
final exitCode = await process.exitCode;
await subscription.cancel();
@@ -312,8 +312,10 @@
final String workspaceRootDir;
try {
final String workspaceRootRelativeToCwd;
- (packageConfig: packageConfig, rootDir: workspaceRootRelativeToCwd) =
- await Entrypoint.ensureUpToDate(
+ (
+ packageConfig: packageConfig,
+ rootDir: workspaceRootRelativeToCwd,
+ ) = await Entrypoint.ensureUpToDate(
rootOrCurrent,
cache: SystemCache(rootDir: pubCacheDir),
);
@@ -326,26 +328,34 @@
}
// Find the first directory from [rootOrCurrent] to [workspaceRootDir] (both
// inclusive) that contains a package from the package config.
- final packageConfigDir =
- p.join(workspaceRootDir, '.dart_tool', 'package_config.json');
+ final packageConfigDir = p.join(
+ workspaceRootDir,
+ '.dart_tool',
+ 'package_config.json',
+ );
- final rootPackageName = maxBy<(String, String), int>(
- packageConfig.packages.map((package) {
- final packageRootDir =
- p.canonicalize(package.resolvedRootDir(packageConfigDir));
- if (p.equals(packageRootDir, rootOrCurrent) ||
- p.isWithin(packageRootDir, rootOrCurrent)) {
- return (package.name, packageRootDir);
- } else {
- return null;
- }
- }).nonNulls,
- (tuple) => tuple.$2.length,
- )?.$1;
+ final rootPackageName =
+ maxBy<(String, String), int>(
+ packageConfig.packages.map((package) {
+ final packageRootDir = p.canonicalize(
+ package.resolvedRootDir(packageConfigDir),
+ );
+ if (p.equals(packageRootDir, rootOrCurrent) ||
+ p.isWithin(packageRootDir, rootOrCurrent)) {
+ return (package.name, packageRootDir);
+ } else {
+ return null;
+ }
+ }).nonNulls,
+ (tuple) => tuple.$2.length,
+ )?.$1;
if (rootPackageName == null) {
- final packageConfigPath =
- p.join(workspaceRootDir, '.dart_tool', 'package_config.json');
+ final packageConfigPath = p.join(
+ workspaceRootDir,
+ '.dart_tool',
+ 'package_config.json',
+ );
throw CommandResolutionFailedException._(
'$packageConfigPath did not contain its own root package',
CommandResolutionIssue.fileNotFound,
@@ -486,7 +496,7 @@
/// Adapts the program-name following conventions of dart run
Executable.adaptProgramName(this.package, String program)
- : relativePath = _adaptProgramToPath(program);
+ : relativePath = _adaptProgramToPath(program);
Executable(this.package, this.relativePath);
diff --git a/lib/src/flutter_releases.dart b/lib/src/flutter_releases.dart
index ec7abd1..c048e1a 100644
--- a/lib/src/flutter_releases.dart
+++ b/lib/src/flutter_releases.dart
@@ -36,11 +36,12 @@
'Bad response - releases should be a list of maps.',
);
}
- final channel = {
- 'beta': Channel.beta,
- 'stable': Channel.stable,
- 'dev': Channel.dev,
- }[release['channel']];
+ final channel =
+ {
+ 'beta': Channel.beta,
+ 'stable': Channel.stable,
+ 'dev': Channel.dev,
+ }[release['channel']];
if (channel == null) {
throw const FormatException('Release with bad channel');
}
@@ -85,18 +86,16 @@
}
return flutterReleases.firstWhereOrNull(
(release) =>
- (sdkConstraints['flutter'] ?? VersionConstraint.any)
- .allows(release.flutterVersion) &&
- (sdkConstraints['dart'] ?? VersionConstraint.any)
- .allows(release.dartVersion),
+ (sdkConstraints['flutter'] ?? VersionConstraint.any).allows(
+ release.flutterVersion,
+ ) &&
+ (sdkConstraints['dart'] ?? VersionConstraint.any).allows(
+ release.dartVersion,
+ ),
);
}
-enum Channel {
- stable,
- beta,
- dev,
-}
+enum Channel { stable, beta, dev }
/// A version of the Flutter SDK and its related Dart SDK.
class FlutterRelease {
@@ -109,7 +108,8 @@
required this.channel,
});
@override
- String toString() => 'FlutterRelease(flutter=$flutterVersion, '
+ String toString() =>
+ 'FlutterRelease(flutter=$flutterVersion, '
'dart=$dartVersion, '
'channel=$channel)';
}
diff --git a/lib/src/git.dart b/lib/src/git.dart
index 2f0603f..09f401a 100644
--- a/lib/src/git.dart
+++ b/lib/src/git.dart
@@ -35,13 +35,14 @@
final int exitCode;
@override
- String get message => 'Git error. Command: `git ${args.join(' ')}`\n'
+ String get message =>
+ 'Git error. Command: `git ${args.join(' ')}`\n'
'stdout: ${stdout is String ? stdout : '<binary>'}\n'
'stderr: ${stderr is String ? stderr : '<binary>'}\n'
'exit code: $exitCode';
GitException(Iterable<String> args, this.stdout, this.stderr, this.exitCode)
- : args = args.toList();
+ : args = args.toList();
@override
String toString() => message;
@@ -92,8 +93,10 @@
Encoding stderrEncoding = systemEncoding,
}) async {
if (!isInstalled) {
- fail('Cannot find a Git executable.\n'
- 'Please ensure Git is correctly installed.');
+ fail(
+ 'Cannot find a Git executable.\n'
+ 'Please ensure Git is correctly installed.',
+ );
}
log.muteProgress();
@@ -107,12 +110,7 @@
stderrEncoding: stderrEncoding,
);
if (!result.success) {
- throw GitException(
- args,
- result.stdout,
- result.stderr,
- result.exitCode,
- );
+ throw GitException(args, result.stdout, result.stderr, result.exitCode);
}
return result.stdout;
} finally {
@@ -129,8 +127,10 @@
Encoding stderrEncoding = systemEncoding,
}) {
if (!isInstalled) {
- fail('Cannot find a Git executable.\n'
- 'Please ensure Git is correctly installed.');
+ fail(
+ 'Cannot find a Git executable.\n'
+ 'Please ensure Git is correctly installed.',
+ );
}
final result = runProcessSync(
@@ -142,12 +142,7 @@
stderrEncoding: stderrEncoding,
);
if (!result.success) {
- throw GitException(
- args,
- result.stdout,
- result.stderr,
- result.exitCode,
- );
+ throw GitException(args, result.stdout, result.stderr, result.exitCode);
}
return result.stdout;
@@ -161,8 +156,10 @@
Encoding stderrEncoding = systemEncoding,
}) {
if (!isInstalled) {
- fail('Cannot find a Git executable.\n'
- 'Please ensure Git is correctly installed.');
+ fail(
+ 'Cannot find a Git executable.\n'
+ 'Please ensure Git is correctly installed.',
+ );
}
final result = runProcessSyncBytes(
@@ -173,12 +170,7 @@
stderrEncoding: stderrEncoding,
);
if (!result.success) {
- throw GitException(
- args,
- result.stdout,
- result.stderr,
- result.exitCode,
- );
+ throw GitException(args, result.stdout, result.stderr, result.exitCode);
}
return result.stdout;
@@ -218,8 +210,10 @@
// Some users may have configured commands such as autorun, which may
// produce additional output, so we need to look for "git version"
// in every line of the output.
- final match = RegExp(r'^git version (\d+)\.(\d+)\..*$', multiLine: true)
- .matchAsPrefix(output);
+ final match = RegExp(
+ r'^git version (\d+)\.(\d+)\..*$',
+ multiLine: true,
+ ).matchAsPrefix(output);
if (match == null) return false;
final versionString = match[0]!.substring('git version '.length);
// Git seems to use many parts in the version number. We just check the
diff --git a/lib/src/global_packages.dart b/lib/src/global_packages.dart
index 43a5263..2d4eae3 100644
--- a/lib/src/global_packages.dart
+++ b/lib/src/global_packages.dart
@@ -206,8 +206,10 @@
final tempDir = cache.createTempDir();
// TODO(rnystrom): Look in "bin" and display list of binaries that
// user can run.
- LockFile([id], mainDependencies: {id.name})
- .writeToFile(p.join(tempDir, 'pubspec.lock'), cache);
+ LockFile(
+ [id],
+ mainDependencies: {id.name},
+ ).writeToFile(p.join(tempDir, 'pubspec.lock'), cache);
tryDeleteEntry(_packageDir(name));
tryRenameDir(tempDir, _packageDir(name));
@@ -351,14 +353,20 @@
final description = id.description.description;
if (description is GitDescription) {
- log.message('Package ${log.bold(name)} is currently active from Git '
- 'repository "${GitDescription.prettyUri(description.url)}".');
+ log.message(
+ 'Package ${log.bold(name)} is currently active from Git '
+ 'repository "${GitDescription.prettyUri(description.url)}".',
+ );
} else if (description is PathDescription) {
- log.message('Package ${log.bold(name)} is currently active at path '
- '"${description.path}".');
+ log.message(
+ 'Package ${log.bold(name)} is currently active at path '
+ '"${description.path}".',
+ );
} else {
- log.message('Package ${log.bold(name)} is currently active at version '
- '${log.bold(id.version.toString())}.');
+ log.message(
+ 'Package ${log.bold(name)} is currently active at version '
+ '${log.bold(id.version.toString())}.',
+ );
}
return lockFile;
}
@@ -433,8 +441,10 @@
lockFile.sdkConstraints.forEach((sdkName, constraint) {
final sdk = sdks[sdkName];
if (sdk == null) {
- dataError('${log.bold(name)} as globally activated requires '
- 'unknown SDK "$name".');
+ dataError(
+ '${log.bold(name)} as globally activated requires '
+ 'unknown SDK "$name".',
+ );
} else if (sdkName == 'dart') {
if (constraint.effectiveConstraint.allows((sdk as DartSdk).version)) {
return;
@@ -446,8 +456,10 @@
`$topLevelProgram pub global activate $name` to reactivate.
''');
} else {
- dataError('${log.bold(name)} as globally activated requires the '
- '${sdk.name} SDK, which is unsupported for global executables.');
+ dataError(
+ '${log.bold(name)} as globally activated requires the '
+ '${sdk.name} SDK, which is unsupported for global executables.',
+ );
}
});
@@ -582,8 +594,10 @@
LockFile.load(p.join(_directory, path), cache.sources).packages[name];
if (id == null) {
- throw FormatException("Pubspec for activated package $name didn't "
- 'contain an entry for itself.');
+ throw FormatException(
+ "Pubspec for activated package $name didn't "
+ 'contain an entry for itself.',
+ );
}
return id;
@@ -608,7 +622,7 @@
/// Returns a pair of two lists of strings. The first indicates which packages
/// were successfully re-activated; the second indicates which failed.
Future<(List<String> successes, List<String> failures)>
- repairActivatedPackages() async {
+ repairActivatedPackages() async {
final executables = <String, List<String>>{};
if (dirExists(_binStubDir)) {
for (var entry in listDir(_binStubDir)) {
@@ -667,7 +681,8 @@
}
successes.add(id.name);
} catch (error, stackTrace) {
- var message = 'Failed to reactivate '
+ var message =
+ 'Failed to reactivate '
'${log.bold(p.basenameWithoutExtension(entry))}';
if (id != null) {
message += ' ${id.version}';
@@ -683,15 +698,19 @@
}
if (executables.isNotEmpty) {
- final message = StringBuffer('Binstubs exist for non-activated '
- 'packages:\n');
+ final message = StringBuffer(
+ 'Binstubs exist for non-activated '
+ 'packages:\n',
+ );
executables.forEach((package, executableNames) {
for (var executable in executableNames) {
deleteEntry(p.join(_binStubDir, executable));
}
- message.writeln(' From ${log.bold(package)}: '
- '${toSentence(executableNames)}');
+ message.writeln(
+ ' From ${log.bold(package)}: '
+ '${toSentence(executableNames)}',
+ );
});
log.error(message.toString());
}
@@ -723,8 +742,9 @@
binStubScript,
overwrite: true,
isRefreshingBinstub: true,
- snapshot:
- executable.pathOfGlobalSnapshot(entrypoint.workspaceRoot.dir),
+ snapshot: executable.pathOfGlobalSnapshot(
+ entrypoint.workspaceRoot.dir,
+ ),
);
}
}
@@ -802,25 +822,32 @@
if (collided.isNotEmpty) {
for (var command in collided.keys.sorted()) {
if (overwriteBinStubs) {
- log.warning('Replaced ${log.bold(command)} previously installed from '
- '${log.bold(collided[command].toString())}.');
+ log.warning(
+ 'Replaced ${log.bold(command)} previously installed from '
+ '${log.bold(collided[command].toString())}.',
+ );
} else {
- log.warning('Executable ${log.bold(command)} was already installed '
- 'from ${log.bold(collided[command].toString())}.');
+ log.warning(
+ 'Executable ${log.bold(command)} was already installed '
+ 'from ${log.bold(collided[command].toString())}.',
+ );
}
}
if (!overwriteBinStubs) {
- log.warning('Deactivate the other package(s) or activate '
- '${log.bold(package.name)} using --overwrite.');
+ log.warning(
+ 'Deactivate the other package(s) or activate '
+ '${log.bold(package.name)} using --overwrite.',
+ );
}
}
// Show errors for any unknown executables.
if (executables != null) {
- final unknown = executables
- .where((exe) => !package.pubspec.executables.keys.contains(exe))
- .sorted();
+ final unknown =
+ executables
+ .where((exe) => !package.pubspec.executables.keys.contains(exe))
+ .sorted();
if (unknown.isNotEmpty) {
dataError("Unknown ${namedSequence('executable', unknown)}.");
}
@@ -834,8 +861,10 @@
final script = package.pubspec.executables[executable];
final scriptPath = p.join('bin', '$script.dart');
if (!binFiles.contains(scriptPath)) {
- log.warning('Warning: Executable "$executable" runs "$scriptPath", '
- 'which was not found in ${log.bold(package.name)}.');
+ log.warning(
+ 'Warning: Executable "$executable" runs "$scriptPath", '
+ 'which was not found in ${log.bold(package.name)}.',
+ );
}
}
@@ -955,8 +984,10 @@
final result = Process.runSync('chmod', ['+x', tmpPath]);
if (result.exitCode != 0) {
// Couldn't make it executable so don't leave it laying around.
- fail('Could not make "$tmpPath" executable (exit code '
- '${result.exitCode}):\n${result.stderr}');
+ fail(
+ 'Could not make "$tmpPath" executable (exit code '
+ '${result.exitCode}):\n${result.stderr}',
+ );
}
}
File(tmpPath).renameSync(binStubPath);
@@ -998,18 +1029,22 @@
final result = runProcessSync('where', [r'\q', '$installed.bat']);
if (result.exitCode == 0) return;
- log.warning("${log.yellow('Warning:')} Pub installs executables into "
- '${log.bold(_binStubDir)}, which is not on your path.\n'
- "You can fix that by adding that directory to your system's "
- '"Path" environment variable.\n'
- 'A web search for "configure windows path" will show you how.');
+ log.warning(
+ "${log.yellow('Warning:')} Pub installs executables into "
+ '${log.bold(_binStubDir)}, which is not on your path.\n'
+ "You can fix that by adding that directory to your system's "
+ '"Path" environment variable.\n'
+ 'A web search for "configure windows path" will show you how.',
+ );
} else {
// See if the shell can find one of the binstubs.
//
// The "command" builtin is more reliable than the "which" executable. See
// http://unix.stackexchange.com/questions/85249/why-not-use-which-what-to-use-then
- final result =
- runProcessSync('command', ['-v', installed], runInShell: true);
+ final result = runProcessSync('command', [
+ '-v',
+ installed,
+ ], runInShell: true);
if (result.exitCode == 0) return;
var binDir = _binStubDir;
@@ -1019,17 +1054,20 @@
p.relative(binDir, from: Platform.environment['HOME']),
);
}
- final shellConfigFiles = Platform.isMacOS
- // zsh is default on mac - mention that first.
- ? '(.zshrc, .bashrc, .bash_profile, etc.)'
- : '(.bashrc, .bash_profile, .zshrc etc.)';
- log.warning("${log.yellow('Warning:')} Pub installs executables into "
- '${log.bold(binDir)}, which is not on your path.\n'
- "You can fix that by adding this to your shell's config file "
- '$shellConfigFiles:\n'
- '\n'
- " ${log.bold('export PATH="\$PATH":"$binDir"')}\n"
- '\n');
+ final shellConfigFiles =
+ Platform.isMacOS
+ // zsh is default on mac - mention that first.
+ ? '(.zshrc, .bashrc, .bash_profile, etc.)'
+ : '(.bashrc, .bash_profile, .zshrc etc.)';
+ log.warning(
+ "${log.yellow('Warning:')} Pub installs executables into "
+ '${log.bold(binDir)}, which is not on your path.\n'
+ "You can fix that by adding this to your shell's config file "
+ '$shellConfigFiles:\n'
+ '\n'
+ " ${log.bold('export PATH="\$PATH":"$binDir"')}\n"
+ '\n',
+ );
}
}
diff --git a/lib/src/http.dart b/lib/src/http.dart
index 2e01664..51f2297 100644
--- a/lib/src/http.dart
+++ b/lib/src/http.dart
@@ -60,8 +60,9 @@
// This prevents conflicts where the same port is occupied by the same
// port on localhost.
final resolutions = await InternetAddress.lookup('localhost');
- final ipv4Address = resolutions
- .firstWhereOrNull((a) => a.type == InternetAddressType.IPv4);
+ final ipv4Address = resolutions.firstWhereOrNull(
+ (a) => a.type == InternetAddressType.IPv4,
+ );
if (ipv4Address != null) {
request = _OverrideUrlRequest(
request.url.replace(host: ipv4Address.host),
@@ -91,8 +92,9 @@
void _logRequest(http.BaseRequest request) {
final requestLog = StringBuffer();
requestLog.writeln('HTTP ${request.method} ${request.url}');
- request.headers
- .forEach((name, value) => requestLog.writeln(_logField(name, value)));
+ request.headers.forEach(
+ (name, value) => requestLog.writeln(_logField(name, value)),
+ );
if (request.method == 'POST') {
final contentTypeString = request.headers[HttpHeaders.contentTypeHeader];
@@ -130,11 +132,14 @@
final responseLog = StringBuffer();
final request = response.request!;
final stopwatch = _requestStopwatches.remove(request)!..stop();
- responseLog.writeln('HTTP response ${response.statusCode} '
- '${response.reasonPhrase} for ${request.method} ${request.url}');
+ responseLog.writeln(
+ 'HTTP response ${response.statusCode} '
+ '${response.reasonPhrase} for ${request.method} ${request.url}',
+ );
responseLog.writeln('took ${stopwatch.elapsed}');
- response.headers
- .forEach((name, value) => responseLog.writeln(_logField(name, value)));
+ response.headers.forEach(
+ (name, value) => responseLog.writeln(_logField(name, value)),
+ );
log.io(responseLog.toString().trim());
}
@@ -243,11 +248,10 @@
error['message'] is! String) {
invalidServerResponse(response);
}
- final formattedMessage =
- log.red(sanitizeForTerminal(error['message'] as String));
- fail(
- 'Message from server: $formattedMessage',
+ final formattedMessage = log.red(
+ sanitizeForTerminal(error['message'] as String),
);
+ fail('Message from server: $formattedMessage');
}
/// Handles an unsuccessful XML-formatted response from google cloud storage.
@@ -330,7 +334,8 @@
@override
String toString() {
- var temp = 'PubHttpResponseException: HTTP error ${response.statusCode} '
+ var temp =
+ 'PubHttpResponseException: HTTP error ${response.statusCode} '
'${response.reasonPhrase}';
if (message != '') {
temp += ': $message';
@@ -364,13 +369,15 @@
Future<T> retryForHttp<T>(String operation, FutureOr<T> Function() fn) async {
return await retry(
() async => await _httpPool.withResource(() async => await fn()),
- retryIf: (e) async =>
- (e is PubHttpException && e.isIntermittent) ||
- e is TimeoutException ||
- e is http.ClientException ||
- isHttpIOException(e),
- onRetry: (exception, attemptNumber) async =>
- log.io('Attempt #$attemptNumber for $operation'),
+ retryIf:
+ (e) async =>
+ (e is PubHttpException && e.isIntermittent) ||
+ e is TimeoutException ||
+ e is http.ClientException ||
+ isHttpIOException(e),
+ onRetry:
+ (exception, attemptNumber) async =>
+ log.io('Attempt #$attemptNumber for $operation'),
maxAttempts: math.max(
1, // Having less than 1 attempt doesn't make sense.
int.tryParse(Platform.environment['PUB_MAX_HTTP_RETRIES'] ?? '') ?? 7,
@@ -403,9 +410,11 @@
return;
} else if (statusCode == HttpStatus.notAcceptable &&
request?.headers['Accept'] == pubApiHeaders['Accept']) {
- fail('Pub ${sdk.version} is incompatible with the current version of '
- '${request?.url.host}.\n'
- 'Upgrade pub to the latest version and try again.');
+ fail(
+ 'Pub ${sdk.version} is incompatible with the current version of '
+ '${request?.url.host}.\n'
+ 'Upgrade pub to the latest version and try again.',
+ );
} else if (statusCode >= 500 ||
statusCode == HttpStatus.requestTimeout ||
statusCode == HttpStatus.tooManyRequests) {
diff --git a/lib/src/ignore.dart b/lib/src/ignore.dart
index 0b9b1fc..0a843be 100644
--- a/lib/src/ignore.dart
+++ b/lib/src/ignore.dart
@@ -96,10 +96,10 @@
bool ignoreCase = false,
void Function(String pattern, FormatException exception)? onInvalidPattern,
}) : _rules = _parseIgnorePatterns(
- patterns,
- ignoreCase,
- onInvalidPattern: onInvalidPattern,
- ).toList(growable: false);
+ patterns,
+ ignoreCase,
+ onInvalidPattern: onInvalidPattern,
+ ).toList(growable: false);
/// Returns `true` if [path] is ignored by the patterns used to create this
/// [Ignore] instance, assuming those patterns are placed at `.`.
@@ -156,10 +156,11 @@
return [path.substring(startOfNext, nextSlash)];
},
ignoreForDir: (dir) => dir == '.' || dir.isEmpty ? this : null,
- isDir: (candidate) =>
- candidate == '.' ||
- candidate.isEmpty ||
- path.length > candidate.length && path[candidate.length] == '/',
+ isDir:
+ (candidate) =>
+ candidate == '.' ||
+ candidate.isEmpty ||
+ path.length > candidate.length && path[candidate.length] == '/',
).isEmpty;
}
@@ -261,8 +262,9 @@
final ignore = ignoreForDir(
partial == '/' ? '.' : partial.substring(1, partial.length - 1),
);
- ignoreStack
- .add(ignore == null ? null : _IgnorePrefixPair(ignore, partial));
+ ignoreStack.add(
+ ignore == null ? null : _IgnorePrefixPair(ignore, partial),
+ );
}
// Do a depth first tree-search starting at [beneath].
// toVisit is a stack containing all items that are waiting to be processed.
@@ -293,9 +295,9 @@
ignore == null
? null
: _IgnorePrefixPair(
- ignore,
- current == '/' ? current : '$current/',
- ),
+ ignore,
+ current == '/' ? current : '$current/',
+ ),
);
// Put all entities in current on the stack to be processed.
toVisit.add(listDir(normalizedCurrent).map((x) => '/$x').toList());
@@ -329,9 +331,7 @@
_IgnoreParseResult.invalid(this.pattern, this.exception) : rule = null;
- _IgnoreParseResult.empty(this.pattern)
- : rule = null,
- exception = null;
+ _IgnoreParseResult.empty(this.pattern) : rule = null, exception = null;
}
class _IgnoreRule {
@@ -542,11 +542,7 @@
try {
return _IgnoreParseResult(
pattern,
- _IgnoreRule(
- RegExp(expr, caseSensitive: !ignoreCase),
- negative,
- pattern,
- ),
+ _IgnoreRule(RegExp(expr, caseSensitive: !ignoreCase), negative, pattern),
);
} on FormatException catch (e) {
throw AssertionError(
diff --git a/lib/src/io.dart b/lib/src/io.dart
index 15ac909..608bf87 100644
--- a/lib/src/io.dart
+++ b/lib/src/io.dart
@@ -113,8 +113,9 @@
/// directories.
String canonicalize(String pathString) {
final seen = <String>{};
- var components =
- Queue<String>.from(p.split(p.normalize(p.absolute(pathString))));
+ var components = Queue<String>.from(
+ p.split(p.normalize(p.absolute(pathString))),
+ );
// The canonical path, built incrementally as we iterate through [components].
var newPath = components.removeFirst();
@@ -124,8 +125,9 @@
// resolved in turn.
while (components.isNotEmpty) {
seen.add(p.join(newPath, p.joinAll(components)));
- final resolvedPath =
- _resolveLink(p.join(newPath, components.removeFirst()));
+ final resolvedPath = _resolveLink(
+ p.join(newPath, components.removeFirst()),
+ );
final relative = p.relative(resolvedPath, from: newPath);
// If the resolved path of the component relative to `newPath` is just ".",
@@ -399,10 +401,13 @@
// file hidden.
if (allowListFilter.any(pathInDir.contains)) {
- final allowedBasename =
- allowListFilter.firstWhere(pathInDir.contains);
- pathInDir =
- pathInDir.substring(0, pathInDir.length - allowedBasename.length);
+ final allowedBasename = allowListFilter.firstWhere(
+ pathInDir.contains,
+ );
+ pathInDir = pathInDir.substring(
+ 0,
+ pathInDir.length - allowedBasename.length,
+ );
}
if (pathInDir.contains('/.')) return false;
@@ -468,13 +473,17 @@
if (reason == null) rethrow;
if (i < maxRetries - 1) {
- log.io('Pub failed to $description because $reason. '
- 'Retrying in 50ms.');
+ log.io(
+ 'Pub failed to $description because $reason. '
+ 'Retrying in 50ms.',
+ );
sleep(const Duration(milliseconds: 50));
} else {
- fail('Pub failed to $description because $reason.\n'
- 'This may be caused by a virus scanner or having a file\n'
- 'in the directory open in another application.');
+ fail(
+ 'Pub failed to $description because $reason.\n'
+ 'This may be caused by a virus scanner or having a file\n'
+ 'in the directory open in another application.',
+ );
}
}
}
@@ -504,8 +513,10 @@
try {
deleteEntry(path);
} catch (error, stackTrace) {
- log.fine('Pub failed to delete $path: $error\n'
- '${Chain.forTrace(stackTrace)}');
+ log.fine(
+ 'Pub failed to delete $path: $error\n'
+ '${Chain.forTrace(stackTrace)}',
+ );
}
}
@@ -520,14 +531,10 @@
/// Renames (i.e. moves) the directory [from] to [to].
void renameDir(String from, String to) {
- _attempt(
- 'rename directory',
- () {
- log.io('Renaming directory $from to $to.');
- Directory(from).renameSync(to);
- },
- ignoreEmptyDir: true,
- );
+ _attempt('rename directory', () {
+ log.io('Renaming directory $from to $to.');
+ Directory(from).renameSync(to);
+ }, ignoreEmptyDir: true);
}
/// Renames directory [from] to [to].
@@ -561,27 +568,27 @@
bool _isDirectoryNotEmptyException(FileSystemException e) {
final errorCode = e.osError?.errorCode;
return
- // On Linux rename will fail with either ENOTEMPTY or EEXISTS if directory
- // exists: https://man7.org/linux/man-pages/man2/rename.2.html
+ // On Linux rename will fail with either ENOTEMPTY or EEXISTS if directory
+ // exists: https://man7.org/linux/man-pages/man2/rename.2.html
+ // ```
+ // #define ENOTEMPTY 39 /* Directory not empty */
+ // #define EEXIST 17 /* File exists */
+ // ```
+ // https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/tree/include/uapi/asm-generic/errno-base.h#n21
+ // https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/tree/include/uapi/asm-generic/errno.h#n20
+ (Platform.isLinux && (errorCode == 39 || errorCode == 17)) ||
+ // On Windows this may fail with ERROR_DIR_NOT_EMPTY or
+ // ERROR_ALREADY_EXISTS
+ // https://docs.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499-
+ (Platform.isWindows && (errorCode == 145 || errorCode == 183)) ||
+ // On MacOS rename will fail with ENOTEMPTY if directory exists.
+ // We also catch EEXIST - perhaps that could also be thrown...
// ```
- // #define ENOTEMPTY 39 /* Directory not empty */
- // #define EEXIST 17 /* File exists */
+ // #define ENOTEMPTY 66 /* Directory not empty */
+ // #define EEXIST 17 /* File exists */
// ```
- // https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/tree/include/uapi/asm-generic/errno-base.h#n21
- // https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/tree/include/uapi/asm-generic/errno.h#n20
- (Platform.isLinux && (errorCode == 39 || errorCode == 17)) ||
- // On Windows this may fail with ERROR_DIR_NOT_EMPTY or
- // ERROR_ALREADY_EXISTS
- // https://docs.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499-
- (Platform.isWindows && (errorCode == 145 || errorCode == 183)) ||
- // On MacOS rename will fail with ENOTEMPTY if directory exists.
- // We also catch EEXIST - perhaps that could also be thrown...
- // ```
- // #define ENOTEMPTY 66 /* Directory not empty */
- // #define EEXIST 17 /* File exists */
- // ```
- // https://github.com/apple-oss-distributions/xnu/blob/bb611c8fecc755a0d8e56e2fa51513527c5b7a0e/bsd/sys/errno.h#L190
- (Platform.isMacOS && (errorCode == 66 || errorCode == 17));
+ // https://github.com/apple-oss-distributions/xnu/blob/bb611c8fecc755a0d8e56e2fa51513527c5b7a0e/bsd/sys/errno.h#L190
+ (Platform.isMacOS && (errorCode == 66 || errorCode == 17));
}
/// Creates a new symlink at path [symlink] that points to [target].
@@ -656,15 +663,17 @@
final bool runningFromFlutter =
Platform.environment.containsKey('PUB_ENVIRONMENT') &&
- (Platform.environment['PUB_ENVIRONMENT'] ?? '').contains('flutter_cli');
+ (Platform.environment['PUB_ENVIRONMENT'] ?? '').contains('flutter_cli');
/// A regular expression to match the script path of a pub script running from
/// source in the Dart repo.
-final _dartRepoRegExp = RegExp(r'/third_party/pkg/pub/('
- r'bin/pub\.dart'
- r'|'
- r'test/.*_test\.dart'
- r')$');
+final _dartRepoRegExp = RegExp(
+ r'/third_party/pkg/pub/('
+ r'bin/pub\.dart'
+ r'|'
+ r'test/.*_test\.dart'
+ r')$',
+);
/// Whether pub is running from source in the Dart repo.
///
@@ -676,17 +685,19 @@
///
/// This throws a [StateError] if it's called when not running pub from source
/// in the Dart repo.
-final String dartRepoRoot = (() {
- if (!runningFromDartRepo) {
- throw StateError('Not running from source in the Dart repo.');
- }
+final String dartRepoRoot =
+ (() {
+ if (!runningFromDartRepo) {
+ throw StateError('Not running from source in the Dart repo.');
+ }
- // Get the URL of the repo root in a way that works when either both running
- // as a test or as a pub executable.
- final url = Platform.script
- .replace(path: Platform.script.path.replaceAll(_dartRepoRegExp, ''));
- return p.fromUri(url);
-})();
+ // Get the URL of the repo root in a way that works when either both
+ // running as a test or as a pub executable.
+ final url = Platform.script.replace(
+ path: Platform.script.path.replaceAll(_dartRepoRegExp, ''),
+ );
+ return p.fromUri(url);
+ })();
/// Displays a message and reads a yes/no confirmation from the user.
///
@@ -752,8 +763,10 @@
/// exited already. This is useful to prevent Future chains from proceeding
/// after you've decided to exit.
Future flushThenExit(int status) {
- return Future.wait([stdout.close(), stderr.close()])
- .then((_) => exit(status));
+ return Future.wait([
+ stdout.close(),
+ stderr.close(),
+ ]).then((_) => exit(status));
}
/// Returns a [EventSink] that pipes all data to [consumer] and a [Future] that
@@ -789,8 +802,11 @@
return _descriptorPool.withResource(() async {
ProcessResult result;
try {
- (executable, args) =
- _sanitizeExecutablePath(executable, args, workingDir: workingDir);
+ (executable, args) = _sanitizeExecutablePath(
+ executable,
+ args,
+ workingDir: workingDir,
+ );
result = await Process.run(
executable,
args,
@@ -834,8 +850,11 @@
return _descriptorPool.request().then((resource) async {
Process ioProcess;
try {
- (executable, args) =
- _sanitizeExecutablePath(executable, args, workingDir: workingDir);
+ (executable, args) = _sanitizeExecutablePath(
+ executable,
+ args,
+ workingDir: workingDir,
+ );
ioProcess = await Process.start(
executable,
args,
@@ -868,8 +887,11 @@
ArgumentError.checkNotNull(executable, 'executable');
ProcessResult result;
try {
- (executable, args) =
- _sanitizeExecutablePath(executable, args, workingDir: workingDir);
+ (executable, args) = _sanitizeExecutablePath(
+ executable,
+ args,
+ workingDir: workingDir,
+ );
result = Process.runSync(
executable,
args,
@@ -902,8 +924,11 @@
}) {
ProcessResult result;
try {
- (executable, args) =
- _sanitizeExecutablePath(executable, args, workingDir: workingDir);
+ (executable, args) = _sanitizeExecutablePath(
+ executable,
+ args,
+ workingDir: workingDir,
+ );
result = Process.runSync(
executable,
args,
@@ -939,9 +964,8 @@
final String stderr;
final int exitCode;
BytesProcessResult(List<int> stdout, this.stderr, this.exitCode)
- :
- // Not clear that we need to do this, but seems harmless.
- stdout = stdout is Uint8List ? stdout : Uint8List.fromList(stdout);
+ : // Not clear that we need to do this, but seems harmless.
+ stdout = stdout is Uint8List ? stdout : Uint8List.fromList(stdout);
bool get success => exitCode == exit_codes.SUCCESS;
}
@@ -1076,9 +1100,10 @@
/// If [host] is "localhost", this will automatically listen on both the IPv4
/// and IPv6 loopback addresses.
Future<HttpServer> bindServer(String host, int port) async {
- final server = host == 'localhost'
- ? await HttpMultiServer.loopback(port)
- : await HttpServer.bind(host, port);
+ final server =
+ host == 'localhost'
+ ? await HttpMultiServer.loopback(port)
+ : await HttpServer.bind(host, port);
server.autoCompress = true;
return server;
}
@@ -1172,9 +1197,10 @@
break;
case TypeFlag.symlink:
// Link to another file in this tar, relative from this entry.
- final resolvedTarget = p.joinAll(
- [parentDirectory, ...p.posix.split(entry.header.linkName!)],
- );
+ final resolvedTarget = p.joinAll([
+ parentDirectory,
+ ...p.posix.split(entry.header.linkName!),
+ ]);
if (!checkValidTarget(resolvedTarget)) {
// Don't allow links to files outside of this tar.
break;
@@ -1214,10 +1240,7 @@
/// considered to be [baseDir], which defaults to the current working directory.
///
/// Returns a [ByteStream] that emits the contents of the archive.
-ByteStream createTarGz(
- List<String> contents, {
- required String baseDir,
-}) {
+ByteStream createTarGz(List<String> contents, {required String baseDir}) {
final buffer = StringBuffer();
buffer.write('Creating .tar.gz stream containing:\n');
contents.forEach(buffer.writeln);
@@ -1245,8 +1268,10 @@
final name = p.url.joinAll(p.split(relative));
if (stat.type == FileSystemEntityType.link) {
- log.message('$entry is a link locally, but will be uploaded as a '
- 'duplicate file.');
+ log.message(
+ '$entry is a link locally, but will be uploaded as a '
+ 'duplicate file.',
+ );
}
if (stat.type == FileSystemEntityType.directory) {
return TarEntry(
diff --git a/lib/src/language_version.dart b/lib/src/language_version.dart
index f7d2b9d..6bf4c8a 100644
--- a/lib/src/language_version.dart
+++ b/lib/src/language_version.dart
@@ -24,15 +24,9 @@
factory LanguageVersion.parse(String languageVersion) {
final m = _languageVersionPattern.firstMatch(languageVersion);
if (m == null) {
- throw FormatException(
- 'Invalid language version string',
- languageVersion,
- );
+ throw FormatException('Invalid language version string', languageVersion);
}
- return LanguageVersion(
- int.parse(m.group(1)!),
- int.parse(m.group(2)!),
- );
+ return LanguageVersion(int.parse(m.group(1)!), int.parse(m.group(2)!));
}
/// The language version implied by a Dart SDK constraint in `pubspec.yaml`.
@@ -65,8 +59,7 @@
/// The language version implied by a Dart sdk version.
factory LanguageVersion.fromLanguageVersionToken(
LanguageVersionToken version,
- ) =>
- LanguageVersion(version.major, version.minor);
+ ) => LanguageVersion(version.major, version.minor);
bool get supportsNullSafety => this >= firstVersionWithNullSafety;
@@ -112,8 +105,10 @@
static const firstVersionWithNullSafety = LanguageVersion(2, 12);
static const firstVersionWithShorterHostedSyntax = LanguageVersion(2, 15);
static const firstVersionWithWorkspaces = LanguageVersion(3, 5);
- static const firstVersionForbidingUnknownDescriptionKeys =
- LanguageVersion(3, 7);
+ static const firstVersionForbidingUnknownDescriptionKeys = LanguageVersion(
+ 3,
+ 7,
+ );
/// Transform language version to string that can be parsed with
/// [LanguageVersion.parse].
diff --git a/lib/src/levenshtein.dart b/lib/src/levenshtein.dart
index 1c1f974..23abfeb 100644
--- a/lib/src/levenshtein.dart
+++ b/lib/src/levenshtein.dart
@@ -39,7 +39,6 @@
final newDistance = _min3(
1 + distances[j], // Deletion
1 + distances[j - 1], // Insertion
-
// Substitution
holder + (a.codeUnitAt(i - 1) == b.codeUnitAt(j - 1) ? 0 : 1),
);
diff --git a/lib/src/lock_file.dart b/lib/src/lock_file.dart
index edf9deb..4d5f14b 100644
--- a/lib/src/lock_file.dart
+++ b/lib/src/lock_file.dart
@@ -50,15 +50,15 @@
Set<String>? devDependencies,
Set<String>? overriddenDependencies,
}) : this._(
- {
- for (final id in ids)
- if (!id.isRoot) id.name: id,
- },
- sdkConstraints ?? {'dart': SdkConstraint(VersionConstraint.any)},
- mainDependencies ?? const UnmodifiableSetView.empty(),
- devDependencies ?? const UnmodifiableSetView.empty(),
- overriddenDependencies ?? const UnmodifiableSetView.empty(),
- );
+ {
+ for (final id in ids)
+ if (!id.isRoot) id.name: id,
+ },
+ sdkConstraints ?? {'dart': SdkConstraint(VersionConstraint.any)},
+ mainDependencies ?? const UnmodifiableSetView.empty(),
+ devDependencies ?? const UnmodifiableSetView.empty(),
+ overriddenDependencies ?? const UnmodifiableSetView.empty(),
+ );
LockFile._(
Map<String, PackageId> packages,
@@ -69,11 +69,11 @@
) : packages = UnmodifiableMapView(packages);
LockFile.empty()
- : packages = const {},
- sdkConstraints = {'dart': SdkConstraint(VersionConstraint.any)},
- mainDependencies = const UnmodifiableSetView.empty(),
- devDependencies = const UnmodifiableSetView.empty(),
- overriddenDependencies = const UnmodifiableSetView.empty();
+ : packages = const {},
+ sdkConstraints = {'dart': SdkConstraint(VersionConstraint.any)},
+ mainDependencies = const UnmodifiableSetView.empty(),
+ devDependencies = const UnmodifiableSetView.empty(),
+ overriddenDependencies = const UnmodifiableSetView.empty();
/// Loads a lockfile from [filePath].
factory LockFile.load(String filePath, SourceRegistry sources) {
@@ -111,8 +111,12 @@
);
final sdkConstraints = <String, SdkConstraint>{};
- final sdkNode =
- _getEntry<YamlScalar?>(parsed, 'sdk', 'string', required: false);
+ final sdkNode = _getEntry<YamlScalar?>(
+ parsed,
+ 'sdk',
+ 'string',
+ required: false,
+ );
if (sdkNode != null) {
// Lockfiles produced by pub versions from 1.14.0 through 1.18.0 included
// a top-level "sdk" field which encoded the unified constraint on the
@@ -123,8 +127,12 @@
);
}
- final sdksField =
- _getEntry<YamlMap?>(parsed, 'sdks', 'map', required: false);
+ final sdksField = _getEntry<YamlMap?>(
+ parsed,
+ 'sdks',
+ 'map',
+ required: false,
+ );
if (sdksField != null) {
_parseEachEntry<String, YamlScalar>(
@@ -136,11 +144,12 @@
// TODO(sigurdm): push the switching into `SdkConstraint`.
sdkConstraints[name] = switch (name) {
'dart' => SdkConstraint.interpretDartSdkConstraint(
- originalConstraint,
- defaultUpperBoundConstraint: null,
- ),
- 'flutter' =>
- SdkConstraint.interpretFlutterSdkConstraint(originalConstraint),
+ originalConstraint,
+ defaultUpperBoundConstraint: null,
+ ),
+ 'flutter' => SdkConstraint.interpretFlutterSdkConstraint(
+ originalConstraint,
+ ),
_ => SdkConstraint(originalConstraint),
};
},
@@ -155,27 +164,38 @@
final devDependencies = <String>{};
final overriddenDependencies = <String>{};
- final packageEntries =
- _getEntry<YamlMap?>(parsed, 'packages', 'map', required: false);
+ final packageEntries = _getEntry<YamlMap?>(
+ parsed,
+ 'packages',
+ 'map',
+ required: false,
+ );
if (packageEntries != null) {
_parseEachEntry<String, YamlMap>(
packageEntries,
(name, spec) {
// Parse the version.
- final versionEntry =
- _getEntry<YamlScalar>(spec, 'version', 'version string');
+ final versionEntry = _getEntry<YamlScalar>(
+ spec,
+ 'version',
+ 'version string',
+ );
final version = _parseVersion(versionEntry);
// Parse the source.
final sourceName = _getStringEntry(spec, 'source');
- final descriptionNode =
- _getEntry<YamlNode>(spec, 'description', 'description');
+ final descriptionNode = _getEntry<YamlNode>(
+ spec,
+ 'description',
+ 'description',
+ );
- final dynamic description = descriptionNode is YamlScalar
- ? descriptionNode.value
- : descriptionNode;
+ final dynamic description =
+ descriptionNode is YamlScalar
+ ? descriptionNode.value
+ : descriptionNode;
// Let the source parse the description.
final source = sources(sourceName);
@@ -238,10 +258,7 @@
try {
return fn();
} on FormatException catch (e) {
- throw SourceSpanFormatException(
- '$description: ${e.message}',
- span,
- );
+ throw SourceSpanFormatException('$description: ${e.message}', span);
}
}
@@ -254,11 +271,7 @@
}
static Version _parseVersion(YamlNode node) {
- return _parseNode(
- node,
- 'version',
- parse: Version.parse,
- );
+ return _parseNode(node, 'version', parse: Version.parse);
}
static String _getStringEntry(YamlMap map, String key) {
@@ -360,8 +373,9 @@
packageMap[id.name] = {
'version': id.version.toString(),
'source': id.source.name,
- 'description':
- id.description.serializeForLockfile(containingDir: packageDir),
+ 'description': id.description.serializeForLockfile(
+ containingDir: packageDir,
+ ),
'dependency': _dependencyType(id.name),
};
}
@@ -387,7 +401,8 @@
///
/// Relative paths will be resolved relative to [lockFilePath]
void writeToFile(String lockFilePath, SystemCache cache) {
- final windowsLineEndings = fileExists(lockFilePath) &&
+ final windowsLineEndings =
+ fileExists(lockFilePath) &&
detectWindowsLineEndings(readTextFile(lockFilePath));
final serialized = serialize(p.dirname(lockFilePath), cache);
diff --git a/lib/src/log.dart b/lib/src/log.dart
index 988fba1..0abaeae 100644
--- a/lib/src/log.dart
+++ b/lib/src/log.dart
@@ -247,8 +247,10 @@
List<String> arguments,
String workingDirectory,
) {
- io("Spawning \"$executable ${arguments.join(' ')}\" in "
- '${p.absolute(workingDirectory)}');
+ io(
+ "Spawning \"$executable ${arguments.join(' ')}\" in "
+ '${p.absolute(workingDirectory)}',
+ );
}
/// Logs the results of running [executable].
@@ -269,8 +271,10 @@
var numLines = 0;
for (var line in output.split('\n')) {
if (++numLines > 1000) {
- buffer.writeln('[${output.length - 1000}] more lines of output '
- 'truncated...]');
+ buffer.writeln(
+ '[${output.length - 1000}] more lines of output '
+ 'truncated...]',
+ );
break;
}
@@ -327,11 +331,14 @@
/// Prints the recorded log transcript to stderr.
void dumpTranscriptToStdErr() {
stderr.writeln('---- Log transcript ----');
- _transcript.forEach((entry) {
- _printToStream(stderr, entry, showLabel: true);
- }, (discarded) {
- stderr.writeln('---- ($discarded discarded) ----');
- });
+ _transcript.forEach(
+ (entry) {
+ _printToStream(stderr, entry, showLabel: true);
+ },
+ (discarded) {
+ stderr.writeln('---- ($discarded discarded) ----');
+ },
+ );
stderr.writeln('---- End log transcript ----');
}
@@ -379,10 +386,7 @@
'---- ${p.absolute(entrypoint.workspaceRoot.pubspecPath)} ----',
);
buffer.writeln(
- limitLength(
- readTextFile(entrypoint.workspaceRoot.pubspecPath),
- 5000,
- ),
+ limitLength(readTextFile(entrypoint.workspaceRoot.pubspecPath), 5000),
);
buffer.writeln('---- End pubspec.yaml ----');
} else {
@@ -400,11 +404,14 @@
buffer.writeln('---- Log transcript ----');
- _transcript.forEach((entry) {
- _printToStream(buffer, entry, showLabel: true);
- }, (discarded) {
- buffer.writeln('---- ($discarded entries discarded) ----');
- });
+ _transcript.forEach(
+ (entry) {
+ _printToStream(buffer, entry, showLabel: true);
+ },
+ (discarded) {
+ buffer.writeln('---- ($discarded entries discarded) ----');
+ },
+ );
buffer.writeln('---- End log transcript ----');
ensureDir(p.dirname(path));
try {
@@ -618,8 +625,9 @@
if (error is SourceSpanException && error.span?.sourceUrl != null) {
// Normalize paths and make them absolute for backwards compatibility with
// the protocol used by the analyzer.
- errorJson['path'] =
- p.normalize(p.absolute(p.fromUri(error.span!.sourceUrl)));
+ errorJson['path'] = p.normalize(
+ p.absolute(p.fromUri(error.span!.sourceUrl)),
+ );
}
if (error is FileException) {
@@ -655,9 +663,9 @@
String Function(String)? format,
String? prefix,
String? suffix,
- }) : _format = format ?? _noFormat,
- _prefix = prefix ?? '',
- _suffix = suffix ?? '';
+ }) : _format = format ?? _noFormat,
+ _prefix = prefix ?? '',
+ _suffix = suffix ?? '';
String formatted({required bool useColors}) {
return useColors
@@ -676,14 +684,10 @@
String value,
String Function(String) format, {
String? prefix = '',
-}) =>
- FormattedString(value, format: format, prefix: prefix);
+}) => FormattedString(value, format: format, prefix: prefix);
/// Formats a table of [rows], inserting enough spaces to make columns line up.
-List<String> renderTable(
- List<List<FormattedString>> rows,
- bool useColors,
-) {
+List<String> renderTable(List<List<FormattedString>> rows, bool useColors) {
// Compute the width of each column by taking the max across all rows.
final columnWidths = <int, int>{};
for (var i = 0; i < rows.length; i++) {
diff --git a/lib/src/oauth2.dart b/lib/src/oauth2.dart
index 2e286cc..cc070b1 100644
--- a/lib/src/oauth2.dart
+++ b/lib/src/oauth2.dart
@@ -42,8 +42,10 @@
/// The URL from which the pub client will retrieve Google's OIDC endpoint URIs.
///
/// [Google OpenID Connect documentation]: https://developers.google.com/identity/openid-connect/openid-connect#discovery
-final _oidcDiscoveryDocumentEndpoint =
- Uri.https('accounts.google.com', '/.well-known/openid-configuration');
+final _oidcDiscoveryDocumentEndpoint = Uri.https(
+ 'accounts.google.com',
+ '/.well-known/openid-configuration',
+);
/// The URL to which the user will be directed to authorize the pub client to
/// get an OAuth2 access token.
@@ -52,9 +54,10 @@
/// a refresh token from the server. See the [Google OAuth2 documentation][].
///
/// [Google OAuth2 documentation]: https://developers.google.com/accounts/docs/OAuth2WebServer#offline
-final _authorizationEndpoint =
- Uri.parse('https://accounts.google.com/o/oauth2/auth?access_type=offline'
- '&approval_prompt=force');
+final _authorizationEndpoint = Uri.parse(
+ 'https://accounts.google.com/o/oauth2/auth?access_type=offline'
+ '&approval_prompt=force',
+);
/// The URL from which the pub client will request an access token once it's
/// been authorized by the user.
@@ -116,32 +119,36 @@
/// prompting the user for their authorization. It will also re-authorize and
/// re-run [fn] if a recoverable authorization error is detected.
Future<T> withClient<T>(Future<T> Function(http.Client) fn) {
- return _getClient().then((client) {
- return fn(client).whenComplete(() {
- // TODO(sigurdm): refactor the http subsystem, so we can close [client]
- // here.
+ return _getClient()
+ .then((client) {
+ return fn(client).whenComplete(() {
+ // TODO(sigurdm): refactor the http subsystem, so we can close
+ // [client] here.
- // Be sure to save the credentials even when an error happens.
- _saveCredentials(client.credentials);
- });
- }).catchError((Object error) {
- if (error is _ExpirationException) {
- log.error("Pub's authorization to upload packages has expired and "
- "can't be automatically refreshed.");
- return withClient(fn);
- } else if (error is _AuthorizationException) {
- var message = 'OAuth2 authorization failed';
- if (error.description != null) {
- message = '$message (${error.description})';
- }
- log.error('$message.');
- _clearCredentials();
- return withClient(fn);
- } else {
- // ignore: only_throw_errors
- throw error;
- }
- });
+ // Be sure to save the credentials even when an error happens.
+ _saveCredentials(client.credentials);
+ });
+ })
+ .catchError((Object error) {
+ if (error is _ExpirationException) {
+ log.error(
+ "Pub's authorization to upload packages has expired and "
+ "can't be automatically refreshed.",
+ );
+ return withClient(fn);
+ } else if (error is _AuthorizationException) {
+ var message = 'OAuth2 authorization failed';
+ if (error.description != null) {
+ message = '$message (${error.description})';
+ }
+ log.error('$message.');
+ _clearCredentials();
+ return withClient(fn);
+ } else {
+ // ignore: only_throw_errors
+ throw error;
+ }
+ });
}
/// Gets a new OAuth2 client.
@@ -180,8 +187,10 @@
final credentials = Credentials.fromJson(readTextFile(path));
if (credentials.isExpired && !credentials.canRefresh) {
- log.error("Pub's authorization to upload packages has expired and "
- "can't be automatically refreshed.");
+ log.error(
+ "Pub's authorization to upload packages has expired and "
+ "can't be automatically refreshed.",
+ );
return null; // null means re-authorize.
}
@@ -189,8 +198,10 @@
} catch (e) {
// Don't print the error message itself here. I might be leaking data about
// credentials.
- log.error('Warning: could not load the saved OAuth2 credentials.\n'
- 'Obtaining new credentials...');
+ log.error(
+ 'Warning: could not load the saved OAuth2 credentials.\n'
+ 'Obtaining new credentials...',
+ );
return null; // null means re-authorize.
}
}
@@ -220,7 +231,9 @@
/// Returns a Future that completes to a fully-authorized [_Client].
Future<_Client> _authorize() async {
final grant = _AuthorizationCodeGrant(
- _identifier, _authorizationEndpoint, tokenEndpoint,
+ _identifier,
+ _authorizationEndpoint,
+ tokenEndpoint,
secret: _secret,
// Google's OAuth2 API doesn't support basic auth.
basicAuth: false,
@@ -242,8 +255,9 @@
// Closing the server here is safe, since it will wait until the response
// is sent to actually shut down.
server.close();
- completer
- .complete(grant.handleAuthorizationResponse(queryToMap(queryString)));
+ completer.complete(
+ grant.handleAuthorizationResponse(queryToMap(queryString)),
+ );
return shelf.Response.found('https://pub.dev/authorized');
});
@@ -254,10 +268,11 @@
);
log.message(
- 'Pub needs your authorization to upload packages on your behalf.\n'
- 'In a web browser, go to $authUrl\n'
- 'Then click "Allow access".\n\n'
- 'Waiting for your authorization...');
+ 'Pub needs your authorization to upload packages on your behalf.\n'
+ 'In a web browser, go to $authUrl\n'
+ 'Then click "Allow access".\n\n'
+ 'Waiting for your authorization...',
+ );
final client = await completer.future;
log.message('Successfully authorized.\n');
@@ -270,10 +285,12 @@
/// See https://developers.google.com/identity/openid-connect/openid-connect#discovery
Future<Map> fetchOidcDiscoveryDocument() async {
final discoveryResponse = await retryForHttp(
- 'fetching Google\'s OpenID Connect Discovery document', () async {
- final request = http.Request('GET', _oidcDiscoveryDocumentEndpoint);
- return await globalHttpClient.fetch(request);
- });
+ 'fetching Google\'s OpenID Connect Discovery document',
+ () async {
+ final request = http.Request('GET', _oidcDiscoveryDocumentEndpoint);
+ return await globalHttpClient.fetch(request);
+ },
+ );
return parseJsonResponse(discoveryResponse);
}
@@ -419,14 +436,14 @@
http.Client? httpClient,
_CredentialsRefreshedCallback? onCredentialsRefreshed,
Map<String, dynamic> Function(MediaType? contentType, String body)?
- getParameters,
+ getParameters,
String? codeVerifier,
- }) : _basicAuth = basicAuth,
- _httpClient = httpClient ?? http.Client(),
- _delimiter = delimiter ?? ' ',
- _getParameters = getParameters ?? parseJsonParameters,
- _onCredentialsRefreshed = onCredentialsRefreshed,
- _codeVerifier = codeVerifier ?? _createCodeVerifier();
+ }) : _basicAuth = basicAuth,
+ _httpClient = httpClient ?? http.Client(),
+ _delimiter = delimiter ?? ' ',
+ _getParameters = getParameters ?? parseJsonParameters,
+ _onCredentialsRefreshed = onCredentialsRefreshed,
+ _codeVerifier = codeVerifier ?? _createCodeVerifier();
/// Returns the URL to which the resource owner should be redirected to
/// authorize this client.
@@ -509,13 +526,17 @@
if (_stateString != null) {
if (!parameters.containsKey('state')) {
- throw FormatException('Invalid OAuth response for '
- '"$authorizationEndpoint": parameter "state" expected to be '
- '"$_stateString", was missing.');
+ throw FormatException(
+ 'Invalid OAuth response for '
+ '"$authorizationEndpoint": parameter "state" expected to be '
+ '"$_stateString", was missing.',
+ );
} else if (parameters['state'] != _stateString) {
- throw FormatException('Invalid OAuth response for '
- '"$authorizationEndpoint": parameter "state" expected to be '
- '"$_stateString", was "${parameters['state']}".');
+ throw FormatException(
+ 'Invalid OAuth response for '
+ '"$authorizationEndpoint": parameter "state" expected to be '
+ '"$_stateString", was "${parameters['state']}".',
+ );
}
}
@@ -525,9 +546,11 @@
final uri = uriString == null ? null : Uri.parse(uriString);
throw _AuthorizationException(parameters['error']!, description, uri);
} else if (!parameters.containsKey('code')) {
- throw FormatException('Invalid OAuth response for '
- '"$authorizationEndpoint": did not contain required parameter '
- '"code".');
+ throw FormatException(
+ 'Invalid OAuth response for '
+ '"$authorizationEndpoint": did not contain required parameter '
+ '"code".',
+ );
}
return _handleAuthorizationCode(parameters['code']);
@@ -583,8 +606,11 @@
if (secret != null) body['client_secret'] = secret;
}
- final response =
- await _httpClient!.post(tokenEndpoint, headers: headers, body: body);
+ final response = await _httpClient!.post(
+ tokenEndpoint,
+ headers: headers,
+ body: body,
+ );
final credentials = _handleAccessTokenResponse(
response,
@@ -606,7 +632,8 @@
// Randomly generate a 128 character string to be used as the PKCE code
// verifier.
- static String _createCodeVerifier() => List.generate(
+ static String _createCodeVerifier() =>
+ List.generate(
128,
(i) => _charset[Random.secure().nextInt(_charset.length)],
).join();
@@ -753,9 +780,9 @@
_CredentialsRefreshedCallback? onCredentialsRefreshed,
bool basicAuth = true,
http.Client? httpClient,
- }) : _basicAuth = basicAuth,
- _onCredentialsRefreshed = onCredentialsRefreshed,
- _httpClient = httpClient ?? http.Client() {
+ }) : _basicAuth = basicAuth,
+ _onCredentialsRefreshed = onCredentialsRefreshed,
+ _httpClient = httpClient ?? http.Client() {
if (identifier == null && secret != null) {
throw ArgumentError('secret may not be passed without identifier.');
}
@@ -787,8 +814,9 @@
return response;
}
- final challenge = challenges
- .firstWhereOrNull((challenge) => challenge.scheme == 'bearer');
+ final challenge = challenges.firstWhereOrNull(
+ (challenge) => challenge.scheme == 'bearer',
+ );
if (challenge == null) return response;
final params = challenge.parameters;
@@ -956,14 +984,14 @@
this.expiration,
String? delimiter,
Map<String, dynamic> Function(MediaType? mediaType, String body)?
- getParameters,
- }) : scopes = UnmodifiableListView(
- // Explicitly type-annotate the list literal to work around
- // sdk#24202.
- scopes == null ? <String>[] : scopes.toList(),
- ),
- _delimiter = delimiter ?? ' ',
- _getParameters = getParameters ?? parseJsonParameters;
+ getParameters,
+ }) : scopes = UnmodifiableListView(
+ // Explicitly type-annotate the list literal to work around
+ // sdk#24202.
+ scopes == null ? <String>[] : scopes.toList(),
+ ),
+ _delimiter = delimiter ?? ' ',
+ _getParameters = getParameters ?? parseJsonParameters;
/// Loads a set of credentials from a JSON-serialized form.
///
@@ -1040,13 +1068,13 @@
/// Nothing is guaranteed about the output except that it's valid JSON and
/// compatible with [Credentials.toJson].
String toJson() => jsonEncode({
- 'accessToken': accessToken,
- 'refreshToken': refreshToken,
- 'idToken': idToken,
- 'tokenEndpoint': tokenEndpoint?.toString(),
- 'scopes': scopes,
- 'expiration': expiration?.millisecondsSinceEpoch,
- });
+ 'accessToken': accessToken,
+ 'refreshToken': refreshToken,
+ 'idToken': idToken,
+ 'tokenEndpoint': tokenEndpoint?.toString(),
+ 'scopes': scopes,
+ 'expiration': expiration?.millisecondsSinceEpoch,
+ });
/// Returns a new set of refreshed credentials.
///
@@ -1079,11 +1107,15 @@
final startTime = DateTime.now();
final tokenEndpoint = this.tokenEndpoint;
if (refreshToken == null) {
- throw StateError("Can't refresh credentials without a refresh "
- 'token.');
+ throw StateError(
+ "Can't refresh credentials without a refresh "
+ 'token.',
+ );
} else if (tokenEndpoint == null) {
- throw StateError("Can't refresh credentials without a token "
- 'endpoint.');
+ throw StateError(
+ "Can't refresh credentials without a token "
+ 'endpoint.',
+ );
}
final headers = <String, String>{};
@@ -1098,8 +1130,11 @@
if (secret != null) body['client_secret'] = secret;
}
- final response =
- await httpClient.post(tokenEndpoint, headers: headers, body: body);
+ final response = await httpClient.post(
+ tokenEndpoint,
+ headers: headers,
+ body: body,
+ );
final credentials = _handleAccessTokenResponse(
response,
tokenEndpoint,
@@ -1165,7 +1200,7 @@
List<String>? scopes,
String delimiter, {
Map<String, dynamic> Function(MediaType? contentType, String body)?
- getParameters,
+ getParameters,
}) {
getParameters ??= parseJsonParameters;
@@ -1179,8 +1214,10 @@
throw const FormatException('Missing Content-Type string.');
}
- final parameters =
- getParameters(MediaType.parse(contentTypeString), response.body);
+ final parameters = getParameters(
+ MediaType.parse(contentTypeString),
+ response.body,
+ );
for (var requiredParameter in ['access_token', 'token_type']) {
if (!parameters.containsKey(requiredParameter)) {
@@ -1189,8 +1226,9 @@
);
} else if (parameters[requiredParameter] is! String) {
throw FormatException(
- 'required parameter "$requiredParameter" was not a string, was '
- '"${parameters[requiredParameter]}"');
+ 'required parameter "$requiredParameter" was not a string, was '
+ '"${parameters[requiredParameter]}"',
+ );
}
}
@@ -1232,9 +1270,12 @@
final scope = parameters['scope'] as String?;
if (scope != null) scopes = scope.split(delimiter);
- final expiration = expiresIn == null
- ? null
- : startTime.add(Duration(seconds: expiresIn as int) - _expirationGrace);
+ final expiration =
+ expiresIn == null
+ ? null
+ : startTime.add(
+ Duration(seconds: expiresIn as int) - _expirationGrace,
+ );
return Credentials(
parameters['access_token'] as String,
@@ -1245,8 +1286,10 @@
expiration: expiration,
);
} on FormatException catch (e) {
- throw FormatException('Invalid OAuth response for "$tokenEndpoint": '
- '${e.message}.\n\n${response.body}');
+ throw FormatException(
+ 'Invalid OAuth response for "$tokenEndpoint": '
+ '${e.message}.\n\n${response.body}',
+ );
}
}
@@ -1266,8 +1309,10 @@
if (reasonPhrase != null && reasonPhrase.isNotEmpty) {
reason = ' $reasonPhrase';
}
- throw FormatException('OAuth request for "$tokenEndpoint" failed '
- 'with status ${response.statusCode}$reason.\n\n${response.body}');
+ throw FormatException(
+ 'OAuth request for "$tokenEndpoint" failed '
+ 'with status ${response.statusCode}$reason.\n\n${response.body}',
+ );
}
final contentTypeString = response.headers['content-type'];
@@ -1279,8 +1324,10 @@
if (!parameters.containsKey('error')) {
throw const FormatException('did not contain required parameter "error"');
} else if (parameters['error'] is! String) {
- throw FormatException('required parameter "error" was not a string, was '
- '"${parameters["error"]}"');
+ throw FormatException(
+ 'required parameter "error" was not a string, was '
+ '"${parameters["error"]}"',
+ );
}
for (var name in ['error_description', 'error_uri']) {
@@ -1302,10 +1349,8 @@
}
/// The type of a callback that parses parameters from an HTTP response.
-typedef _GetParameters = Map<String, dynamic> Function(
- MediaType? contentType,
- String body,
-);
+typedef _GetParameters =
+ Map<String, dynamic> Function(MediaType? contentType, String body);
/// Parses parameters from a response with a JSON body, as per the
/// [OAuth2 spec][].
@@ -1333,8 +1378,8 @@
/// Adds additional query parameters to [url], overwriting the original
/// parameters if a name conflict occurs.
Uri _addQueryParameters(Uri url, Map<String, String> parameters) => url.replace(
- queryParameters: Map.from(url.queryParameters)..addAll(parameters),
- );
+ queryParameters: Map.from(url.queryParameters)..addAll(parameters),
+);
String _basicAuthHeader(String identifier, String secret) {
final userPass = '${Uri.encodeFull(identifier)}:${Uri.encodeFull(secret)}';
diff --git a/lib/src/package.dart b/lib/src/package.dart
index 1a1644f..a92eaec 100644
--- a/lib/src/package.dart
+++ b/lib/src/package.dart
@@ -130,11 +130,11 @@
// If the entire package directory is ignored, don't consider it part of a
// git repo. `git check-ignore` will return a status code of 0 for
// ignored, 1 for not ignored, and 128 for not a Git repo.
- final result = runProcessSync(
- git.command!,
- ['check-ignore', '--quiet', '.'],
- workingDir: dir,
- );
+ final result = runProcessSync(git.command!, [
+ 'check-ignore',
+ '--quiet',
+ '.',
+ ], workingDir: dir);
return result.exitCode == 1;
}
}
@@ -164,7 +164,8 @@
String path, {
String? expectedName,
required bool withPubspecOverrides,
- }) loadPubspec,
+ })
+ loadPubspec,
}) {
final pubspec = loadPubspec(
dir,
@@ -172,24 +173,23 @@
expectedName: expectedName,
);
- final workspacePackages = pubspec.workspace.map(
- (workspacePath) {
- try {
- return Package.load(
- p.join(dir, workspacePath),
- loadPubspec: loadPubspec,
- withPubspecOverrides: withPubspecOverrides,
- );
- } on FileException catch (e) {
- final pubspecPath = p.join(dir, 'pubspec.yaml');
- throw FileException(
- '${e.message}\n'
- 'That was included in the workspace of $pubspecPath.',
- e.path,
- );
- }
- },
- ).toList();
+ final workspacePackages =
+ pubspec.workspace.map((workspacePath) {
+ try {
+ return Package.load(
+ p.join(dir, workspacePath),
+ loadPubspec: loadPubspec,
+ withPubspecOverrides: withPubspecOverrides,
+ );
+ } on FileException catch (e) {
+ final pubspecPath = p.join(dir, 'pubspec.yaml');
+ throw FileException(
+ '${e.message}\n'
+ 'That was included in the workspace of $pubspecPath.',
+ e.path,
+ );
+ }
+ }).toList();
for (final package in workspacePackages) {
if (package.pubspec.resolution != Resolution.workspace) {
fail('''
@@ -265,13 +265,14 @@
}) {
final packageDir = dir;
final root = git.repoRoot(packageDir) ?? packageDir;
- beneath = p
- .toUri(
- p.normalize(
- p.relative(p.join(packageDir, beneath ?? '.'), from: root),
- ),
- )
- .path;
+ beneath =
+ p
+ .toUri(
+ p.normalize(
+ p.relative(p.join(packageDir, beneath ?? '.'), from: root),
+ ),
+ )
+ .path;
if (beneath == './') beneath = '.';
String resolve(String path) {
if (Platform.isWindows) {
@@ -292,9 +293,7 @@
if (!link.existsSync()) {
return;
}
- throw DataException(
- 'Could not resolve symbolic link $path. $e',
- );
+ throw DataException('Could not resolve symbolic link $path. $e');
}
}
}
@@ -310,100 +309,105 @@
Directory(path).resolveSymbolicLinksSync();
}
- final result = Ignore.listFiles(
- beneath: beneath,
- listDir: (dir) {
- final resolvedDir = p.normalize(resolve(dir));
- verifyLink(resolvedDir);
+ final result =
+ Ignore.listFiles(
+ beneath: beneath,
+ listDir: (dir) {
+ final resolvedDir = p.normalize(resolve(dir));
+ verifyLink(resolvedDir);
- {
- final canonicalized = p.canonicalize(resolvedDir);
- final symlinkResolvedDir = resolveDirSymlinks(canonicalized);
- for (final parent in parentDirs(p.dirname(canonicalized))) {
- final symlinkResolvedParent = resolveDirSymlinks(parent);
- if (p.equals(symlinkResolvedDir, symlinkResolvedParent)) {
- dataError('''
+ {
+ final canonicalized = p.canonicalize(resolvedDir);
+ final symlinkResolvedDir = resolveDirSymlinks(canonicalized);
+ for (final parent in parentDirs(p.dirname(canonicalized))) {
+ final symlinkResolvedParent = resolveDirSymlinks(parent);
+ if (p.equals(symlinkResolvedDir, symlinkResolvedParent)) {
+ dataError('''
Pub does not support symlink cycles.
$symlinkResolvedDir => ${p.canonicalize(symlinkResolvedParent)}
''');
+ }
+ }
}
- }
- }
- var contents = Directory(resolvedDir).listSync(followLinks: false);
+ var contents = Directory(resolvedDir).listSync(followLinks: false);
- if (!recursive) {
- contents = contents.where((entity) => entity is! Directory).toList();
- }
- return contents.map((entity) {
- final relative = p.relative(entity.path, from: root);
- if (Platform.isWindows) {
- return p.posix.joinAll(p.split(relative));
- }
- return relative;
- });
- },
- ignoreForDir: (dir) {
- final pubIgnore = resolve('$dir/.pubignore');
- final gitIgnore = resolve('$dir/.gitignore');
- final ignoreFile = fileExists(pubIgnore)
- ? pubIgnore
- : (fileExists(gitIgnore) ? gitIgnore : null);
+ if (!recursive) {
+ contents =
+ contents.where((entity) => entity is! Directory).toList();
+ }
+ return contents.map((entity) {
+ final relative = p.relative(entity.path, from: root);
+ if (Platform.isWindows) {
+ return p.posix.joinAll(p.split(relative));
+ }
+ return relative;
+ });
+ },
+ ignoreForDir: (dir) {
+ final pubIgnore = resolve('$dir/.pubignore');
+ final gitIgnore = resolve('$dir/.gitignore');
+ final ignoreFile =
+ fileExists(pubIgnore)
+ ? pubIgnore
+ : (fileExists(gitIgnore) ? gitIgnore : null);
- final rules = [
- if (dir == beneath) ..._basicIgnoreRules,
- if (ignoreFile != null) readTextFile(ignoreFile),
- ];
- return rules.isEmpty
- ? null
- : Ignore(
- rules,
- onInvalidPattern: (pattern, exception) {
- log.warning(
- '$ignoreFile had invalid pattern $pattern. '
- '${exception.message}',
- );
- },
- // Ignore case on macOS and Windows, because `git clone` and
- // `git init` will set `core.ignoreCase = true` in the local
- // local `.git/config` file for the repository.
- //
- // So on Windows and macOS most users will have case-insensitive
- // behavior with `.gitignore`, hence, it seems reasonable to do
- // the same when we interpret `.gitignore` and `.pubignore`.
- //
- // There are cases where a user may have case-sensitive behavior
- // with `.gitignore` on Windows and macOS:
- //
- // (A) The user has manually overwritten the repository
- // configuration setting `core.ignoreCase = false`.
- //
- // (B) The git-clone or git-init command that create the
- // repository did not deem `core.ignoreCase = true` to be
- // appropriate. Documentation for [git-config]][1] implies
- // this might depend on whether or not the filesystem is
- // case sensitive:
- // > If true, this option enables various workarounds to
- // > enable Git to work better on filesystems that are not
- // > case sensitive, like FAT.
- // > ...
- // > The default is false, except git-clone[1] or
- // > git-init[1] will probe and set core.ignoreCase true
- // > if appropriate when the repository is created.
- //
- // In either case, it seems likely that users on Windows and
- // macOS will prefer case-insensitive matching. We specifically
- // know that some tooling will generate `.PDB` files instead of
- // `.pdb`, see: [#3003][2]
- //
- // [1]: https://git-scm.com/docs/git-config/2.14.6#Documentation/git-config.txt-coreignoreCase
- // [2]: https://github.com/dart-lang/pub/issues/3003
- ignoreCase: Platform.isMacOS || Platform.isWindows,
- );
- },
- isDir: (dir) => dirExists(resolve(dir)),
- includeDirs: includeDirs,
- ).map(resolve).toList();
+ final rules = [
+ if (dir == beneath) ..._basicIgnoreRules,
+ if (ignoreFile != null) readTextFile(ignoreFile),
+ ];
+ return rules.isEmpty
+ ? null
+ : Ignore(
+ rules,
+ onInvalidPattern: (pattern, exception) {
+ log.warning(
+ '$ignoreFile had invalid pattern $pattern. '
+ '${exception.message}',
+ );
+ },
+ // Ignore case on macOS and Windows, because `git clone` and
+ // `git init` will set `core.ignoreCase = true` in the local
+ // local `.git/config` file for the repository.
+ //
+ // So on Windows and macOS most users will have
+ // case-insensitive behavior with `.gitignore`, hence, it
+ // seems reasonable to do the same when we interpret
+ // `.gitignore` and `.pubignore`.
+ //
+ // There are cases where a user may have case-sensitive
+ // behavior with `.gitignore` on Windows and macOS:
+ //
+ // (A) The user has manually overwritten the repository
+ // configuration setting `core.ignoreCase = false`.
+ //
+ // (B) The git-clone or git-init command that create the
+ // repository did not deem `core.ignoreCase = true` to be
+ // appropriate. Documentation for [git-config]][1]
+ // implies this might depend on whether or not the
+ // filesystem is case sensitive: > If true, this option
+ // enables various workarounds to > enable Git to work
+ // better on filesystems that are not > case sensitive,
+ // like FAT.
+ // > ...
+ // > The default is false, except git-clone[1] or
+ // > git-init[1] will probe and set core.ignoreCase true
+ // > if appropriate when the repository is created.
+ //
+ // In either case, it seems likely that users on Windows and
+ // macOS will prefer case-insensitive matching. We
+ // specifically know that some tooling will generate `.PDB`
+ // files instead of `.pdb`, see: [#3003][2]
+ //
+ // [1]:
+ // https://git-scm.com/docs/git-config/2.14.6#Documentation/git-config.txt-coreignoreCase
+ // [2]: https://github.com/dart-lang/pub/issues/3003
+ ignoreCase: Platform.isMacOS || Platform.isWindows,
+ );
+ },
+ isDir: (dir) => dirExists(resolve(dir)),
+ includeDirs: includeDirs,
+ ).map(resolve).toList();
for (final f in result) {
verifyLink(f);
}
@@ -412,21 +416,16 @@
/// Applies [transform] to each package in the workspace and returns a derived
/// package.
- Package transformWorkspace(
- Pubspec Function(Package) transform,
- ) {
+ Package transformWorkspace(Pubspec Function(Package) transform) {
final workspace = {
for (final package in transitiveWorkspace) package.dir: package,
};
return Package.load(
dir,
withPubspecOverrides: true,
- loadPubspec: (
- path, {
- expectedName,
- required withPubspecOverrides,
- }) =>
- transform(workspace[path]!),
+ loadPubspec:
+ (path, {expectedName, required withPubspecOverrides}) =>
+ transform(workspace[path]!),
);
}
}
@@ -516,9 +515,9 @@
p.canonicalize(root.dir),
};
for (final package in root.transitiveWorkspace
- // We don't want to look at the roots parents. The first package is always
- // the root, so skip that.
- .skip(1)) {
+ // We don't want to look at the roots parents. The first package is always
+ // the root, so skip that.
+ .skip(1)) {
// Run through all parent directories until we meet another workspace
// package.
for (final dir in parentDirs(package.dir).skip(1)) {
diff --git a/lib/src/package_config.dart b/lib/src/package_config.dart
index a8726a6..408d876 100644
--- a/lib/src/package_config.dart
+++ b/lib/src/package_config.dart
@@ -141,13 +141,14 @@
generatorVersion: generatorVersion,
additionalProperties: Map.fromEntries(
root.entries.where(
- (e) => !{
- 'configVersion',
- 'packages',
- 'generated',
- 'generator',
- 'generatorVersion',
- }.contains(e.key),
+ (e) =>
+ !{
+ 'configVersion',
+ 'packages',
+ 'generated',
+ 'generator',
+ 'generatorVersion',
+ }.contains(e.key),
),
),
);
@@ -155,12 +156,12 @@
/// Convert to JSON structure.
Map<String, Object?> toJson() => {
- 'configVersion': configVersion,
- 'packages': packages.map((p) => p.toJson()).toList(),
- 'generated': generated?.toUtc().toIso8601String(),
- 'generator': generator,
- 'generatorVersion': generatorVersion?.toString(),
- }..addAll(additionalProperties);
+ 'configVersion': configVersion,
+ 'packages': packages.map((p) => p.toJson()).toList(),
+ 'generated': generated?.toUtc().toIso8601String(),
+ 'generator': generator,
+ 'generatorVersion': generatorVersion?.toString(),
+ }..addAll(additionalProperties);
// We allow the package called 'flutter_gen' to be injected into
// package_config.
@@ -289,11 +290,11 @@
/// Convert to JSON structure.
Map<String, Object?> toJson() => {
- 'name': name,
- 'rootUri': rootUri.toString(),
- if (packageUri != null) 'packageUri': packageUri.toString(),
- if (languageVersion != null) 'languageVersion': '$languageVersion',
- }..addAll(additionalProperties ?? {});
+ 'name': name,
+ 'rootUri': rootUri.toString(),
+ if (packageUri != null) 'packageUri': packageUri.toString(),
+ if (languageVersion != null) 'languageVersion': '$languageVersion',
+ }..addAll(additionalProperties ?? {});
@override
String toString() {
diff --git a/lib/src/package_graph.dart b/lib/src/package_graph.dart
index b0a85f5..9cd77a2 100644
--- a/lib/src/package_graph.dart
+++ b/lib/src/package_graph.dart
@@ -38,13 +38,14 @@
) {
final packages = {
for (final id in result.packages)
- id.name: id.isRoot
- ? entrypoint.workspaceRoot
- : Package(
- result.pubspecs[id.name]!,
- entrypoint.cache.getDirectory(id),
- [],
- ),
+ id.name:
+ id.isRoot
+ ? entrypoint.workspaceRoot
+ : Package(
+ result.pubspecs[id.name]!,
+ entrypoint.cache.getDirectory(id),
+ [],
+ ),
};
return PackageGraph(entrypoint, packages);
@@ -68,13 +69,13 @@
final closure = transitiveClosure(graph.keys, (n) => graph[n]!);
_transitiveDependencies =
mapMap<String, Set<String>, String, Set<Package>>(
- closure,
- value: (depender, names) {
- final set = names.map((name) => packages[name]!).toSet();
- set.add(packages[depender]!);
- return set;
- },
- );
+ closure,
+ value: (depender, names) {
+ final set = names.map((name) => packages[name]!).toSet();
+ set.add(packages[depender]!);
+ return set;
+ },
+ );
}
return _transitiveDependencies![package]!;
}
@@ -96,7 +97,8 @@
bool isPackageMutable(String package) {
if (!_isPackageFromImmutableSource(package)) return true;
- return transitiveDependencies(package)
- .any((dep) => !_isPackageFromImmutableSource(dep.name));
+ return transitiveDependencies(
+ package,
+ ).any((dep) => !_isPackageFromImmutableSource(dep.name));
}
}
diff --git a/lib/src/package_name.dart b/lib/src/package_name.dart
index c74ec4a..647561e 100644
--- a/lib/src/package_name.dart
+++ b/lib/src/package_name.dart
@@ -86,10 +86,10 @@
/// Creates an ID for the given root package.
static PackageId root(Package package) => PackageId(
- package.name,
- package.version,
- ResolvedRootDescription(RootDescription(package.dir)),
- );
+ package.name,
+ package.version,
+ ResolvedRootDescription(RootDescription(package.dir)),
+ );
@override
int get hashCode => Object.hash(name, version, description);
@@ -242,14 +242,14 @@
this.showVersion,
bool? showSource,
bool? showDescription,
- }) : showSource = showDescription == true ? true : showSource,
- showDescription = showDescription ?? false;
+ }) : showSource = showDescription == true ? true : showSource,
+ showDescription = showDescription ?? false;
/// Returns a [PackageDetail] with the maximum amount of detail between `this`
/// and [other].
PackageDetail max(PackageDetail other) => PackageDetail(
- showVersion: showVersion! || other.showVersion!,
- showSource: showSource! || other.showSource!,
- showDescription: showDescription || other.showDescription,
- );
+ showVersion: showVersion! || other.showVersion!,
+ showSource: showSource! || other.showSource!,
+ showDescription: showDescription || other.showDescription,
+ );
}
diff --git a/lib/src/pubspec.dart b/lib/src/pubspec.dart
index 16755bc..0923ba7 100644
--- a/lib/src/pubspec.dart
+++ b/lib/src/pubspec.dart
@@ -126,9 +126,9 @@
'workspace' => Resolution.workspace,
'external' => Resolution.external,
_ => _error(
- '"resolution" must be one of `workspace`, `local`, `external`',
- resolutionNode!.span,
- )
+ '"resolution" must be one of `workspace`, `local`, `external`',
+ resolutionNode!.span,
+ ),
};
}();
@@ -228,9 +228,10 @@
final constraints = {
'dart': SdkConstraint.interpretDartSdkConstraint(
originalDartSdkConstraint,
- defaultUpperBoundConstraint: _includeDefaultSdkConstraint
- ? _defaultUpperBoundSdkConstraint
- : null,
+ defaultUpperBoundConstraint:
+ _includeDefaultSdkConstraint
+ ? _defaultUpperBoundSdkConstraint
+ : null,
),
};
@@ -250,9 +251,10 @@
_packageName,
_FileType.pubspec,
);
- constraints[name] = name == 'flutter'
- ? SdkConstraint.interpretFlutterSdkConstraint(constraint)
- : SdkConstraint(constraint);
+ constraints[name] =
+ name == 'flutter'
+ ? SdkConstraint.interpretFlutterSdkConstraint(constraint)
+ : SdkConstraint(constraint);
});
}
return constraints;
@@ -311,19 +313,19 @@
String dir, {
String? expectedName,
required bool withPubspecOverrides,
- }) loadRootWithSources(SourceRegistry sources) {
+ })
+ loadRootWithSources(SourceRegistry sources) {
return (
String dir, {
String? expectedName,
required bool withPubspecOverrides,
- }) =>
- Pubspec.load(
- dir,
- sources,
- expectedName: expectedName,
- allowOverridesFile: withPubspecOverrides,
- containingDescription: RootDescription(dir),
- );
+ }) => Pubspec.load(
+ dir,
+ sources,
+ expectedName: expectedName,
+ allowOverridesFile: withPubspecOverrides,
+ containingDescription: RootDescription(dir),
+ );
}
Pubspec(
@@ -338,29 +340,34 @@
this.workspace = const <String>[],
this.dependencyOverridesFromOverridesFile = false,
this.resolution = Resolution.none,
- }) : _dependencies = dependencies == null
- ? null
- : {for (final d in dependencies) d.name: d},
- _devDependencies = devDependencies == null
- ? null
- : {for (final d in devDependencies) d.name: d},
- _dependencyOverrides = dependencyOverrides == null
- ? null
- : {for (final d in dependencyOverrides) d.name: d},
- _givenSdkConstraints = sdkConstraints ??
- UnmodifiableMapView({'dart': SdkConstraint(VersionConstraint.any)}),
- _includeDefaultSdkConstraint = false,
- sources = sources ??
- ((String? name) => throw StateError('No source registry given')),
- _overridesFileFields = null,
- // This is a dummy value. Dependencies should already be resolved, so we
- // never need to do relative resolutions.
- _containingDescription = RootDescription('.'),
- super(
- fields == null ? YamlMap() : YamlMap.wrap(fields),
- name: name,
- version: version,
- );
+ }) : _dependencies =
+ dependencies == null
+ ? null
+ : {for (final d in dependencies) d.name: d},
+ _devDependencies =
+ devDependencies == null
+ ? null
+ : {for (final d in devDependencies) d.name: d},
+ _dependencyOverrides =
+ dependencyOverrides == null
+ ? null
+ : {for (final d in dependencyOverrides) d.name: d},
+ _givenSdkConstraints =
+ sdkConstraints ??
+ UnmodifiableMapView({'dart': SdkConstraint(VersionConstraint.any)}),
+ _includeDefaultSdkConstraint = false,
+ sources =
+ sources ??
+ ((String? name) => throw StateError('No source registry given')),
+ _overridesFileFields = null,
+ // This is a dummy value. Dependencies should already be resolved, so we
+ // never need to do relative resolutions.
+ _containingDescription = RootDescription('.'),
+ super(
+ fields == null ? YamlMap() : YamlMap.wrap(fields),
+ name: name,
+ version: version,
+ );
/// Returns a Pubspec object for an already-parsed map representing its
/// contents.
@@ -376,22 +383,24 @@
String? expectedName,
Uri? location,
required Description containingDescription,
- }) : _overridesFileFields = overridesFields,
- _includeDefaultSdkConstraint = true,
- _givenSdkConstraints = null,
- dependencyOverridesFromOverridesFile = overridesFields != null &&
- overridesFields.containsKey('dependency_overrides'),
- _containingDescription = containingDescription,
- super(
- fields is YamlMap
- ? fields
- : YamlMap.wrap(fields, sourceUrl: location),
- ) {
+ }) : _overridesFileFields = overridesFields,
+ _includeDefaultSdkConstraint = true,
+ _givenSdkConstraints = null,
+ dependencyOverridesFromOverridesFile =
+ overridesFields != null &&
+ overridesFields.containsKey('dependency_overrides'),
+ _containingDescription = containingDescription,
+ super(
+ fields is YamlMap ? fields : YamlMap.wrap(fields, sourceUrl: location),
+ ) {
if (overridesFields != null) {
overridesFields.nodes.forEach((key, _) {
final keyNode = key as YamlNode;
- if (!const {'dependency_overrides', 'resolution', 'workspace'}
- .contains(keyNode.value)) {
+ if (!const {
+ 'dependency_overrides',
+ 'resolution',
+ 'workspace',
+ }.contains(keyNode.value)) {
throw SourceSpanApplicationException(
'pubspec_overrides.yaml only supports the '
'`dependency_overrides`, `resolution` and `workspace` fields.',
@@ -526,16 +535,16 @@
///
/// This will return at most one error for each field.
List<SourceSpanApplicationException> get allErrors => _collectErrorsFor([
- () => name,
- () => version,
- () => dependencies,
- () => devDependencies,
- () => publishTo,
- () => executables,
- () => falseSecrets,
- () => sdkConstraints,
- () => ignoredAdvisories,
- ]);
+ () => name,
+ () => version,
+ () => dependencies,
+ () => devDependencies,
+ () => publishTo,
+ () => executables,
+ () => falseSecrets,
+ () => sdkConstraints,
+ () => ignoredAdvisories,
+ ]);
/// Returns the type of dependency from this package onto [name].
DependencyType dependencyType(String? name) {
@@ -579,8 +588,9 @@
_error('"$field" field must be a map.', node.span);
}
- final nonStringNode = node.nodes.keys
- .firstWhereOrNull((e) => e is YamlScalar && e.value is! String);
+ final nonStringNode = node.nodes.keys.firstWhereOrNull(
+ (e) => e is YamlScalar && e.value is! String,
+ );
if (nonStringNode != null) {
_error(
'A dependency name must be a string.',
@@ -588,87 +598,83 @@
);
}
- node.nodes.forEach(
- (nameNode, specNode) {
- final name = (nameNode as YamlNode).value;
- if (name is! String) {
- _error('A dependency name must be a string.', nameNode.span);
- }
- if (!packageNameRegExp.hasMatch(name)) {
- _error('Not a valid package name.', nameNode.span);
- }
- final spec = specNode.value;
- if (packageName != null && name == packageName) {
- _error('A package may not list itself as a dependency.', nameNode.span);
- }
+ node.nodes.forEach((nameNode, specNode) {
+ final name = (nameNode as YamlNode).value;
+ if (name is! String) {
+ _error('A dependency name must be a string.', nameNode.span);
+ }
+ if (!packageNameRegExp.hasMatch(name)) {
+ _error('Not a valid package name.', nameNode.span);
+ }
+ final spec = specNode.value;
+ if (packageName != null && name == packageName) {
+ _error('A package may not list itself as a dependency.', nameNode.span);
+ }
- final String? sourceName;
- VersionConstraint versionConstraint = VersionRange();
- YamlNode? descriptionNode;
- if (spec == null) {
- sourceName = null;
- } else if (spec is String) {
- sourceName = null;
- versionConstraint =
- _parseVersionConstraint(specNode, packageName, fileType);
- } else if (specNode is YamlMap) {
- // Don't write to the immutable YAML map.
- final versionNode = specNode.nodes['version'];
- versionConstraint = _parseVersionConstraint(
- versionNode,
- packageName,
- fileType,
- );
- final otherEntries = specNode.nodes.entries
- .where((entry) => (entry.key as YamlNode).value != 'version')
- .toList();
- if (otherEntries.length > 1) {
- _error('A dependency may only have one source.', specNode.span);
- } else if (otherEntries.isEmpty) {
- // Default to a hosted dependency if no source is specified.
- sourceName = 'hosted';
- } else {
- switch (otherEntries.single) {
- case MapEntry(
- key: YamlScalar(value: final String s),
- value: final d
- ):
- sourceName = s;
- descriptionNode = d;
- case MapEntry(key: final k, value: _):
- _error(
- 'A source name must be a string.',
- (k as YamlNode).span,
- );
- }
- }
- } else {
- _error(
- 'A dependency specification must be a string or a mapping.',
- specNode.span,
- );
- }
-
- // Let the source validate the description.
- final ref = _wrapFormatException(
- 'description',
- descriptionNode?.span,
- () {
- return sources(sourceName).parseRef(
- name,
- descriptionNode?.value,
- containingDescription: containingDescription,
- languageVersion: languageVersion,
- );
- },
+ final String? sourceName;
+ VersionConstraint versionConstraint = VersionRange();
+ YamlNode? descriptionNode;
+ if (spec == null) {
+ sourceName = null;
+ } else if (spec is String) {
+ sourceName = null;
+ versionConstraint = _parseVersionConstraint(
+ specNode,
packageName,
fileType,
- targetPackage: name,
);
+ } else if (specNode is YamlMap) {
+ // Don't write to the immutable YAML map.
+ final versionNode = specNode.nodes['version'];
+ versionConstraint = _parseVersionConstraint(
+ versionNode,
+ packageName,
+ fileType,
+ );
+ final otherEntries =
+ specNode.nodes.entries
+ .where((entry) => (entry.key as YamlNode).value != 'version')
+ .toList();
+ if (otherEntries.length > 1) {
+ _error('A dependency may only have one source.', specNode.span);
+ } else if (otherEntries.isEmpty) {
+ // Default to a hosted dependency if no source is specified.
+ sourceName = 'hosted';
+ } else {
+ switch (otherEntries.single) {
+ case MapEntry(key: YamlScalar(value: final String s), value: final d):
+ sourceName = s;
+ descriptionNode = d;
+ case MapEntry(key: final k, value: _):
+ _error('A source name must be a string.', (k as YamlNode).span);
+ }
+ }
+ } else {
+ _error(
+ 'A dependency specification must be a string or a mapping.',
+ specNode.span,
+ );
+ }
- dependencies[name] = ref.withConstraint(versionConstraint);
- },
- );
+ // Let the source validate the description.
+ final ref = _wrapFormatException(
+ 'description',
+ descriptionNode?.span,
+ () {
+ return sources(sourceName).parseRef(
+ name,
+ descriptionNode?.value,
+ containingDescription: containingDescription,
+ languageVersion: languageVersion,
+ );
+ },
+ packageName,
+ fileType,
+ targetPackage: name,
+ );
+
+ dependencies[name] = ref.withConstraint(versionConstraint);
+ });
return dependencies;
}
@@ -729,7 +735,8 @@
var msg = 'Invalid $description';
final typeName = _fileTypeName(fileType);
if (targetPackage != null) {
- msg = '$msg in the "$packageName" $typeName on the "$targetPackage" '
+ msg =
+ '$msg in the "$packageName" $typeName on the "$targetPackage" '
'dependency';
}
msg = '$msg: ${e.message}';
@@ -742,10 +749,7 @@
throw SourceSpanApplicationException(message, span, hint: hint);
}
-enum _FileType {
- pubspec,
- pubspecOverrides,
-}
+enum _FileType { pubspec, pubspecOverrides }
String _fileTypeName(_FileType type) {
switch (type) {
@@ -783,9 +787,10 @@
constraint is VersionRange &&
constraint.max == null &&
defaultUpperBoundConstraint.allowsAny(constraint)) {
- constraint = VersionConstraint.intersection(
- [constraint, defaultUpperBoundConstraint],
- );
+ constraint = VersionConstraint.intersection([
+ constraint,
+ defaultUpperBoundConstraint,
+ ]);
}
// If a package is null safe it should also be compatible with dart 3.
// Therefore we rewrite a null-safety enabled constraint with the upper
diff --git a/lib/src/pubspec_parse.dart b/lib/src/pubspec_parse.dart
index 3f9824e..394af60 100644
--- a/lib/src/pubspec_parse.dart
+++ b/lib/src/pubspec_parse.dart
@@ -14,8 +14,9 @@
/// This allows dot-separated valid Dart identifiers. The dots are there for
/// compatibility with Google's internal Dart packages, but they may not be used
/// when publishing a package to pub.dev.
-final packageNameRegExp =
- RegExp('^${identifierRegExp.pattern}(\\.${identifierRegExp.pattern})*\$');
+final packageNameRegExp = RegExp(
+ '^${identifierRegExp.pattern}(\\.${identifierRegExp.pattern})*\$',
+);
/// Helper class for pubspec parsing to:
/// - extract the fields and methods that are reusable outside of `pub` client,
@@ -30,12 +31,9 @@
/// This includes the fields from which other properties are derived.
final YamlMap fields;
- PubspecBase(
- this.fields, {
- String? name,
- Version? version,
- }) : _name = name,
- _version = version;
+ PubspecBase(this.fields, {String? name, Version? version})
+ : _name = name,
+ _version = version;
/// The package's name.
String get name => _name ??= fields.expectPackageNameField();
@@ -123,9 +121,9 @@
advisoryIDs = <String>{};
Never ignoredAdvisoriesError(SourceSpan span) => _error(
- '"ignored_advisories" field must be a list of advisory IDs',
- span,
- );
+ '"ignored_advisories" field must be a list of advisory IDs',
+ span,
+ );
final ignoredAdvisoriesNode = fields.nodes['ignored_advisories'];
if (ignoredAdvisoriesNode == null) {
@@ -161,9 +159,9 @@
// Throws a [PubspecException]
Never falseSecretsError(SourceSpan span) => _error(
- '"false_secrets" field must be a list of git-ignore style patterns',
- span,
- );
+ '"false_secrets" field must be a list of git-ignore style patterns',
+ span,
+ );
final falseSecretsNode = fields.nodes['false_secrets'];
if (falseSecretsNode == null) {
@@ -231,11 +229,14 @@
_executables![keyValue] = switch (value.value) {
null => keyValue,
final String s when valuePattern.hasMatch(s) => _error(
- '"executables" values may not contain path separators.',
- value.span,
- ),
+ '"executables" values may not contain path separators.',
+ value.span,
+ ),
final String s => s,
- _ => _error('"executables" values must be strings or null.', value.span)
+ _ => _error(
+ '"executables" values must be strings or null.',
+ value.span,
+ ),
};
});
diff --git a/lib/src/pubspec_utils.dart b/lib/src/pubspec_utils.dart
index 0e84c54..c235bf1 100644
--- a/lib/src/pubspec_utils.dart
+++ b/lib/src/pubspec_utils.dart
@@ -37,9 +37,7 @@
}) {
stripOnly ??= [];
- List<PackageRange> stripBounds(
- Map<String, PackageRange> constrained,
- ) {
+ List<PackageRange> stripBounds(Map<String, PackageRange> constrained) {
final result = <PackageRange>[];
for (final name in constrained.keys) {
@@ -70,9 +68,7 @@
/// version constraints replaced by `>=c` where `c`, is the member of `current`
/// that has same name as the dependency.
Pubspec atLeastCurrent(Pubspec original, List<PackageId> current) {
- List<PackageRange> fixBounds(
- Map<String, PackageRange> constrained,
- ) {
+ List<PackageRange> fixBounds(Map<String, PackageRange> constrained) {
final result = <PackageRange>[];
for (final name in constrained.keys) {
@@ -83,8 +79,8 @@
} else {
result.add(
packageRange.toRef().withConstraint(
- VersionRange(min: currentVersion.version, includeMin: true),
- ),
+ VersionRange(min: currentVersion.version, includeMin: true),
+ ),
);
}
}
diff --git a/lib/src/rate_limited_scheduler.dart b/lib/src/rate_limited_scheduler.dart
index 6cb03b9..d0efaed 100644
--- a/lib/src/rate_limited_scheduler.dart
+++ b/lib/src/rate_limited_scheduler.dart
@@ -63,8 +63,8 @@
RateLimitedScheduler(
Future<V> Function(J) runJob, {
required int maxConcurrentOperations,
- }) : _runJob = runJob,
- _pool = Pool(maxConcurrentOperations);
+ }) : _runJob = runJob,
+ _pool = Pool(maxConcurrentOperations);
/// Pick the next task in [_queue] and run it.
///
@@ -82,8 +82,10 @@
// Use an async function to catch sync exceptions from _runJob.
Future<V> runJob() async {
- return _results[task.jobId] =
- await task.zone.runUnary(_runJob, task.jobId);
+ return _results[task.jobId] = await task.zone.runUnary(
+ _runJob,
+ task.jobId,
+ );
}
completer.complete(runJob());
diff --git a/lib/src/sdk.dart b/lib/src/sdk.dart
index 848e71d..203c95e 100644
--- a/lib/src/sdk.dart
+++ b/lib/src/sdk.dart
@@ -53,9 +53,11 @@
/// A map from SDK identifiers that appear in pubspecs to the implementations of
/// those SDKs.
-final sdks = UnmodifiableMapView<String, Sdk>(
- {'dart': sdk, 'flutter': FlutterSdk(), 'fuchsia': FuchsiaSdk()},
-);
+final sdks = UnmodifiableMapView<String, Sdk>({
+ 'dart': sdk,
+ 'flutter': FlutterSdk(),
+ 'fuchsia': FuchsiaSdk(),
+});
/// The core Dart SDK.
final sdk = DartSdk();
diff --git a/lib/src/sdk/dart.dart b/lib/src/sdk/dart.dart
index 39e364d..4b3e397 100644
--- a/lib/src/sdk/dart.dart
+++ b/lib/src/sdk/dart.dart
@@ -63,7 +63,8 @@
// Some of the pub integration tests require an SDK version number, but the
// tests on the bots are not run from a built SDK so this lets us avoid
// parsing the missing version file.
- final sdkVersion = Platform.environment['_PUB_TEST_SDK_VERSION'] ??
+ final sdkVersion =
+ Platform.environment['_PUB_TEST_SDK_VERSION'] ??
Platform.version.split(' ').first;
return Version.parse(sdkVersion);
diff --git a/lib/src/sdk/flutter.dart b/lib/src/sdk/flutter.dart
index e8277b3..4f17abd 100644
--- a/lib/src/sdk/flutter.dart
+++ b/lib/src/sdk/flutter.dart
@@ -68,13 +68,15 @@
// $FLUTTER_ROOT has been set, but doesn't exist.
return null;
}
- final flutterVersionPath =
- p.join(rootDirectory, 'bin', 'cache', 'flutter.version.json');
+ final flutterVersionPath = p.join(
+ rootDirectory,
+ 'bin',
+ 'cache',
+ 'flutter.version.json',
+ );
try {
- final versionJson = jsonDecode(
- readTextFile(flutterVersionPath),
- );
+ final versionJson = jsonDecode(readTextFile(flutterVersionPath));
if (versionJson is! Map) {
return null;
}
diff --git a/lib/src/sdk/fuchsia.dart b/lib/src/sdk/fuchsia.dart
index 8e40188..6a49127 100644
--- a/lib/src/sdk/fuchsia.dart
+++ b/lib/src/sdk/fuchsia.dart
@@ -31,8 +31,9 @@
Version? get version {
if (!_isAvailable) return null;
- _version ??=
- Version.parse(readTextFile(p.join(_rootDirectory!, 'version')).trim());
+ _version ??= Version.parse(
+ readTextFile(p.join(_rootDirectory!, 'version')).trim(),
+ );
return _version;
}
diff --git a/lib/src/sdk/sdk_package_config.dart b/lib/src/sdk/sdk_package_config.dart
index 8a0f71b..03c6c62 100644
--- a/lib/src/sdk/sdk_package_config.dart
+++ b/lib/src/sdk/sdk_package_config.dart
@@ -49,8 +49,10 @@
factory SdkPackageConfig.fromYaml(YamlMap yaml) {
final version = yaml.expectField<int>('version');
if (version != 1) {
- throw UnsupportedError('This SDK only supports version 1 of the '
- 'sdk_packages.yaml format, but got version $version');
+ throw UnsupportedError(
+ 'This SDK only supports version 1 of the '
+ 'sdk_packages.yaml format, but got version $version',
+ );
}
final packages = <String, SdkPackage>{};
final packageDescriptions =
@@ -60,20 +62,14 @@
packages[package.name] = package;
}
- return SdkPackageConfig(
- yaml.expectField<String>('sdk'),
- packages,
- version,
- );
+ return SdkPackageConfig(yaml.expectField<String>('sdk'), packages, version);
}
Map<String, Object?> toMap() => {
- 'sdk': sdk,
- 'packages': [
- for (var package in packages.values) package.toMap(),
- ],
- 'version': version,
- };
+ 'sdk': sdk,
+ 'packages': [for (var package in packages.values) package.toMap()],
+ 'version': version,
+ };
}
/// The structure for each `packages` entry in an `sdk_packages.yaml` file.
@@ -91,11 +87,8 @@
SdkPackage(this.name, this.path);
SdkPackage.fromYaml(YamlMap yaml)
- : name = yaml.expectPackageNameField(),
- path = yaml.expectField<String>('path');
+ : name = yaml.expectPackageNameField(),
+ path = yaml.expectField<String>('path');
- Map<String, Object?> toMap() => {
- 'name': name,
- 'path': path,
- };
+ Map<String, Object?> toMap() => {'name': name, 'path': path};
}
diff --git a/lib/src/solver/assignment.dart b/lib/src/solver/assignment.dart
index f038141..7380b00 100644
--- a/lib/src/solver/assignment.dart
+++ b/lib/src/solver/assignment.dart
@@ -25,8 +25,8 @@
/// Creates a decision: a speculative assignment of a single package version.
Assignment.decision(PackageId package, this.decisionLevel, this.index)
- : cause = null,
- super(package.toRange(), true);
+ : cause = null,
+ super(package.toRange(), true);
/// Creates a derivation: an assignment that's automatically propagated from
/// incompatibilities.
diff --git a/lib/src/solver/failure.dart b/lib/src/solver/failure.dart
index 7f784d0..da4a185 100644
--- a/lib/src/solver/failure.dart
+++ b/lib/src/solver/failure.dart
@@ -38,18 +38,18 @@
}
SolveFailure(this.incompatibility, {this.suggestions})
- : assert(
- incompatibility.terms.isEmpty ||
- incompatibility.terms.single.package.isRoot,
- );
+ : assert(
+ incompatibility.terms.isEmpty ||
+ incompatibility.terms.single.package.isRoot,
+ );
/// Describes how [incompatibility] was derived, and thus why version solving
/// failed.
@override
String toString() => [
- _Writer(incompatibility).write(),
- if (suggestions != null) suggestions,
- ].join('\n');
+ _Writer(incompatibility).write(),
+ if (suggestions != null) suggestions,
+ ].join('\n');
}
/// A class that writes a human-readable description of the cause of a
@@ -160,9 +160,9 @@
.toSet() // avoid duplicates
.sortedBy((hint) => hint) // sort hints for consistent ordering.
.forEach((hint) {
- buffer.writeln();
- buffer.writeln(hint);
- });
+ buffer.writeln();
+ buffer.writeln(hint);
+ });
return buffer.toString();
}
@@ -204,8 +204,9 @@
// from their successors or that are used for multiple derivations.
final numbered = conclusion || _derivations[incompatibility]! > 1;
final conjunction = conclusion || incompatibility == _root ? 'So,' : 'And';
- final incompatibilityString =
- log.bold(incompatibility.toString(detailsForIncompatibility));
+ final incompatibilityString = log.bold(
+ incompatibility.toString(detailsForIncompatibility),
+ );
final conflictClause = incompatibility.cause as ConflictCause;
var detailsForCause = _detailsForCause(conflictClause);
@@ -278,17 +279,23 @@
}
}
} else if (cause is ConflictCause || otherCause is ConflictCause) {
- final derived = cause is ConflictCause
- ? conflictClause.conflict
- : conflictClause.other;
- final ext = cause is ConflictCause
- ? conflictClause.other
- : conflictClause.conflict;
+ final derived =
+ cause is ConflictCause
+ ? conflictClause.conflict
+ : conflictClause.other;
+ final ext =
+ cause is ConflictCause
+ ? conflictClause.other
+ : conflictClause.conflict;
final derivedLine = _lineNumbers[derived];
if (derivedLine != null) {
- final extAndDerived =
- ext.andToString(derived, detailsForCause, null, derivedLine);
+ final extAndDerived = ext.andToString(
+ derived,
+ detailsForCause,
+ null,
+ derivedLine,
+ );
_write(
incompatibility,
'Because $extAndDerived, $incompatibilityString.',
@@ -296,12 +303,14 @@
);
} else if (_isCollapsible(derived)) {
final derivedCause = derived.cause as ConflictCause;
- final collapsedDerived = derivedCause.conflict.cause is ConflictCause
- ? derivedCause.conflict
- : derivedCause.other;
- final collapsedExt = derivedCause.conflict.cause is ConflictCause
- ? derivedCause.other
- : derivedCause.conflict;
+ final collapsedDerived =
+ derivedCause.conflict.cause is ConflictCause
+ ? derivedCause.conflict
+ : derivedCause.other;
+ final collapsedExt =
+ derivedCause.conflict.cause is ConflictCause
+ ? derivedCause.other
+ : derivedCause.conflict;
detailsForCause = mergeMaps(
detailsForCause,
@@ -327,8 +336,10 @@
);
}
} else {
- final conflictAndOther = conflictClause.conflict
- .andToString(conflictClause.other, detailsForCause);
+ final conflictAndOther = conflictClause.conflict.andToString(
+ conflictClause.other,
+ detailsForCause,
+ );
_write(
incompatibility,
'Because '
@@ -417,11 +428,15 @@
if (conflictPackage == null) continue;
if (conflictPackage.description.source !=
term.package.description.source) {
- details[term.package.name] =
- const PackageDetail(showSource: true, showVersion: false);
+ details[term.package.name] = const PackageDetail(
+ showSource: true,
+ showVersion: false,
+ );
} else if (conflictPackage.toRef() != term.package.toRef()) {
- details[term.package.name] =
- const PackageDetail(showDescription: true, showVersion: false);
+ details[term.package.name] = const PackageDetail(
+ showDescription: true,
+ showVersion: false,
+ );
}
}
diff --git a/lib/src/solver/incompatibility.dart b/lib/src/solver/incompatibility.dart
index 1b4f714..bfdfe43 100644
--- a/lib/src/solver/incompatibility.dart
+++ b/lib/src/solver/incompatibility.dart
@@ -44,9 +44,10 @@
if (terms.length != 1 &&
cause is ConflictCause &&
terms.any((term) => term.isPositive && term.package.isRoot)) {
- terms = terms
- .where((term) => !term.isPositive || !term.package.isRoot)
- .toList();
+ terms =
+ terms
+ .where((term) => !term.isPositive || !term.package.isRoot)
+ .toList();
}
if (terms.length == 1 ||
@@ -111,8 +112,9 @@
assert(terms.first.isPositive);
final cause = this.cause as SdkIncompatibilityCause;
- final buffer =
- StringBuffer(_terse(terms.first, details, allowEvery: true));
+ final buffer = StringBuffer(
+ _terse(terms.first, details, allowEvery: true),
+ );
if (cause.noNullSafetyCause) {
buffer.write(' doesn\'t support null safety');
} else {
@@ -170,12 +172,14 @@
final term2 = terms.last;
if (term1.isPositive == term2.isPositive) {
if (term1.isPositive) {
- final package1 = term1.constraint.isAny
- ? _terseRef(term1, details)
- : _terse(term1, details);
- final package2 = term2.constraint.isAny
- ? _terseRef(term2, details)
- : _terse(term2, details);
+ final package1 =
+ term1.constraint.isAny
+ ? _terseRef(term1, details)
+ : _terse(term1, details);
+ final package2 =
+ term2.constraint.isAny
+ ? _terseRef(term2, details)
+ : _terse(term2, details);
return '$package1 is incompatible with $package2';
} else {
return 'either ${_terse(term1, details)} or '
@@ -222,12 +226,20 @@
final requiresBoth = _tryRequiresBoth(other, details, thisLine, otherLine);
if (requiresBoth != null) return requiresBoth;
- final requiresThrough =
- _tryRequiresThrough(other, details, thisLine, otherLine);
+ final requiresThrough = _tryRequiresThrough(
+ other,
+ details,
+ thisLine,
+ otherLine,
+ );
if (requiresThrough != null) return requiresThrough;
- final requiresForbidden =
- _tryRequiresForbidden(other, details, thisLine, otherLine);
+ final requiresForbidden = _tryRequiresForbidden(
+ other,
+ details,
+ thisLine,
+ otherLine,
+ );
if (requiresForbidden != null) return requiresForbidden;
final buffer = StringBuffer(toString(details));
@@ -264,9 +276,11 @@
.map((term) => _terse(term, details))
.join(' or ');
- final buffer =
- StringBuffer('${_terse(thisPositive, details, allowEvery: true)} ');
- final isDependency = cause is DependencyIncompatibilityCause &&
+ final buffer = StringBuffer(
+ '${_terse(thisPositive, details, allowEvery: true)} ',
+ );
+ final isDependency =
+ cause is DependencyIncompatibilityCause &&
other.cause is DependencyIncompatibilityCause;
buffer.write(isDependency ? 'depends on' : 'requires');
buffer.write(' both $thisNegatives');
@@ -326,15 +340,19 @@
final buffer = StringBuffer();
if (priorPositives.length > 1) {
- final priorString =
- priorPositives.map((term) => _terse(term, details)).join(' or ');
+ final priorString = priorPositives
+ .map((term) => _terse(term, details))
+ .join(' or ');
buffer.write('if $priorString then ');
} else {
- final verb = prior.cause is DependencyIncompatibilityCause
- ? 'depends on'
- : 'requires';
- buffer.write('${_terse(priorPositives.first, details, allowEvery: true)} '
- '$verb ');
+ final verb =
+ prior.cause is DependencyIncompatibilityCause
+ ? 'depends on'
+ : 'requires';
+ buffer.write(
+ '${_terse(priorPositives.first, details, allowEvery: true)} '
+ '$verb ',
+ );
}
buffer.write(_terse(priorNegative, details));
@@ -395,8 +413,9 @@
final buffer = StringBuffer();
if (positives.length > 1) {
- final priorString =
- positives.map((term) => _terse(term, details)).join(' or ');
+ final priorString = positives
+ .map((term) => _terse(term, details))
+ .join(' or ');
buffer.write('if $priorString then ');
} else {
buffer.write(_terse(positives.first, details, allowEvery: true));
@@ -437,8 +456,10 @@
buffer.write("which doesn't match any versions");
} else if (latterCause is PackageNotFoundIncompatibilityCause) {
final exceptionMessage = latterCause.exception.message;
- buffer.write("which doesn't exist "
- '($exceptionMessage)');
+ buffer.write(
+ "which doesn't exist "
+ '($exceptionMessage)',
+ );
} else {
buffer.write('which is forbidden');
}
@@ -463,10 +484,10 @@
}
/// Returns a terse representation of [term]'s package ref.
- String _terseRef(Term term, Map<String, PackageDetail>? details) =>
- term.package
- .toRef()
- .toString(details == null ? null : details[term.package.name]);
+ String _terseRef(Term term, Map<String, PackageDetail>? details) => term
+ .package
+ .toRef()
+ .toString(details == null ? null : details[term.package.name]);
/// Returns a terse representation of [term]'s package.
///
@@ -480,8 +501,9 @@
if (allowEvery && term!.constraint.isAny) {
return 'every version of ${_terseRef(term, details)}';
} else {
- return term!.package
- .toString(details == null ? null : details[term.package.name]);
+ return term!.package.toString(
+ details == null ? null : details[term.package.name],
+ );
}
}
}
diff --git a/lib/src/solver/package_lister.dart b/lib/src/solver/package_lister.dart
index 0a97132..56aa960 100644
--- a/lib/src/solver/package_lister.dart
+++ b/lib/src/solver/package_lister.dart
@@ -85,17 +85,16 @@
/// All versions of the package, sorted by [Version.compareTo].
Future<List<PackageId>> get _versions => _versionsMemo.runOnce(() async {
- final cachedVersions = _ref.isRoot
- ? [
+ final cachedVersions =
+ _ref.isRoot
+ ? [
PackageId(
_ref.name,
_rootPackage!.pubspec.version,
- ResolvedRootDescription(
- _ref.description as RootDescription,
- ),
+ ResolvedRootDescription(_ref.description as RootDescription),
),
]
- : (await withDependencyType(
+ : (await withDependencyType(
_dependencyType,
() => _systemCache.getVersions(
_ref,
@@ -103,9 +102,9 @@
),
))
..sort((id1, id2) => id1.version.compareTo(id2.version));
- _cachedVersions = cachedVersions;
- return cachedVersions;
- });
+ _cachedVersions = cachedVersions;
+ return cachedVersions;
+ });
final _versionsMemo = AsyncMemoizer<List<PackageId>>();
/// The most recent version of this package (or the oldest, if we're
@@ -124,8 +123,8 @@
this._allowedRetractedVersion, {
bool downgrade = false,
this.sdkOverrides = const {},
- }) : _isDowngrade = downgrade,
- _rootPackage = null;
+ }) : _isDowngrade = downgrade,
+ _rootPackage = null;
/// Creates a package lister for the root [package].
PackageLister.root(
@@ -133,17 +132,17 @@
this._systemCache, {
required Set<String> overriddenPackages,
required Map<String, Version>? sdkOverrides,
- }) : _ref = PackageRef.root(package),
- // Treat the package as locked so we avoid the logic for finding the
- // boundaries of various constraints, which is useless for the root
- // package.
- _locked = PackageId.root(package),
- _dependencyType = DependencyType.none,
- _overriddenPackages = overriddenPackages,
- _isDowngrade = false,
- _allowedRetractedVersion = null,
- sdkOverrides = sdkOverrides ?? {},
- _rootPackage = package;
+ }) : _ref = PackageRef.root(package),
+ // Treat the package as locked so we avoid the logic for finding the
+ // boundaries of various constraints, which is useless for the root
+ // package.
+ _locked = PackageId.root(package),
+ _dependencyType = DependencyType.none,
+ _overriddenPackages = overriddenPackages,
+ _isDowngrade = false,
+ _allowedRetractedVersion = null,
+ sdkOverrides = sdkOverrides ?? {},
+ _rootPackage = package;
/// Returns the number of versions of this package that match [constraint].
Future<int> countVersions(VersionConstraint constraint) async {
@@ -228,20 +227,18 @@
log.fine('Failed to parse pubspec for $id:\n$error');
_knownInvalidVersions = _knownInvalidVersions.union(id.version);
return [
- Incompatibility(
- [Term(id.toRange(), true)],
- NoVersionsIncompatibilityCause(),
- ),
+ Incompatibility([
+ Term(id.toRange(), true),
+ ], NoVersionsIncompatibilityCause()),
];
} on PackageNotFoundException {
// We can only get here if the lockfile refers to a specific package
// version that doesn't exist (probably because it was yanked).
_knownInvalidVersions = _knownInvalidVersions.union(id.version);
return [
- Incompatibility(
- [Term(id.toRange(), true)],
- NoVersionsIncompatibilityCause(),
- ),
+ Incompatibility([
+ Term(id.toRange(), true),
+ ], NoVersionsIncompatibilityCause()),
];
}
}
@@ -268,11 +265,13 @@
}
final entries = [
- ...pubspec.dependencies.values
- .where((range) => !_overriddenPackages.contains(range.name)),
+ ...pubspec.dependencies.values.where(
+ (range) => !_overriddenPackages.contains(range.name),
+ ),
if (id.isRoot)
- ...pubspec.devDependencies.values
- .where((range) => !_overriddenPackages.contains(range.name)),
+ ...pubspec.devDependencies.values.where(
+ (range) => !_overriddenPackages.contains(range.name),
+ ),
if (id.isRoot) ...[
..._rootPackage!.workspaceChildren.map((p) {
return PackageRange(
@@ -290,8 +289,8 @@
final index = lowerBound(
versions,
id,
- compare: (PackageId id1, PackageId id2) =>
- id1.version.compareTo(id2.version),
+ compare:
+ (PackageId id1, PackageId id2) => id1.version.compareTo(id2.version),
);
assert(index < versions.length);
assert(versions[index].version == id.version);
@@ -340,10 +339,10 @@
/// Returns an [Incompatibility] that represents a dependency from [depender]
/// onto [target].
Incompatibility _dependency(PackageRange depender, PackageRange target) {
- return Incompatibility(
- [Term(depender, true), Term(target, false)],
- DependencyIncompatibilityCause(depender, target),
- );
+ return Incompatibility([
+ Term(depender, true),
+ Term(target, false),
+ ], DependencyIncompatibilityCause(depender, target));
}
/// If the version at [index] in [_versions] isn't compatible with the current
@@ -357,32 +356,36 @@
if (allowsSdk(await _describeSafe(versions[index]))) return null;
- final (boundsFirstIndex, boundsLastIndex) =
- await _findBounds(index, (pubspec) => !allowsSdk(pubspec));
+ final (boundsFirstIndex, boundsLastIndex) = await _findBounds(
+ index,
+ (pubspec) => !allowsSdk(pubspec),
+ );
final incompatibleVersions = VersionRange(
min: boundsFirstIndex == 0 ? null : versions[boundsFirstIndex].version,
includeMin: true,
- max: boundsLastIndex == versions.length - 1
- ? null
- : versions[boundsLastIndex + 1].version,
+ max:
+ boundsLastIndex == versions.length - 1
+ ? null
+ : versions[boundsLastIndex + 1].version,
alwaysIncludeMaxPreRelease: true,
);
_knownInvalidVersions = incompatibleVersions.union(_knownInvalidVersions);
final sdkConstraint = await foldAsync<VersionConstraint, PackageId>(
- slice(versions, boundsFirstIndex, boundsLastIndex + 1),
- VersionConstraint.empty, (previous, version) async {
- final pubspec = await _describeSafe(version);
- return previous.union(
- pubspec.sdkConstraints[sdk.identifier]?.effectiveConstraint ??
- VersionConstraint.any,
- );
- });
-
- return Incompatibility(
- [Term(_ref.withConstraint(incompatibleVersions), true)],
- SdkIncompatibilityCause(sdkConstraint, sdk),
+ slice(versions, boundsFirstIndex, boundsLastIndex + 1),
+ VersionConstraint.empty,
+ (previous, version) async {
+ final pubspec = await _describeSafe(version);
+ return previous.union(
+ pubspec.sdkConstraints[sdk.identifier]?.effectiveConstraint ??
+ VersionConstraint.any,
+ );
+ },
);
+
+ return Incompatibility([
+ Term(_ref.withConstraint(incompatibleVersions), true),
+ ], SdkIncompatibilityCause(sdkConstraint, sdk));
}
/// Returns the first and last indices in [_versions] of the contiguous set of
@@ -427,9 +430,10 @@
final bounds = <String, Version>{};
var previous = versions[index];
outer:
- for (var id in upper
- ? versions.skip(index + 1)
- : versions.reversed.skip(versions.length - index)) {
+ for (var id
+ in upper
+ ? versions.skip(index + 1)
+ : versions.reversed.skip(versions.length - index)) {
final pubspec = await _describeSafe(id);
// The upper bound is exclusive and so is the first package with a
@@ -486,7 +490,8 @@
if (constraint == null) return true;
return sdk.isAvailable &&
- constraint.effectiveConstraint
- .allows(sdkOverrides[sdk.identifier] ?? sdk.version!);
+ constraint.effectiveConstraint.allows(
+ sdkOverrides[sdk.identifier] ?? sdk.version!,
+ );
}
}
diff --git a/lib/src/solver/partial_solution.dart b/lib/src/solver/partial_solution.dart
index 8b0fe25..ac9d52a 100644
--- a/lib/src/solver/partial_solution.dart
+++ b/lib/src/solver/partial_solution.dart
@@ -154,9 +154,10 @@
return assignment;
}
- assignedTerm = assignedTerm == null
- ? assignment
- : assignedTerm.intersect(assignment);
+ assignedTerm =
+ assignedTerm == null
+ ? assignment
+ : assignedTerm.intersect(assignment);
// As soon as we have enough assignments to satisfy [term], return them.
if (assignedTerm!.satisfies(term)) return assignment;
diff --git a/lib/src/solver/reformat_ranges.dart b/lib/src/solver/reformat_ranges.dart
index f8110e1..7158c80 100644
--- a/lib/src/solver/reformat_ranges.dart
+++ b/lib/src/solver/reformat_ranges.dart
@@ -30,13 +30,12 @@
Incompatibility reformatRanges(
Map<PackageRef, PackageLister> packageListers,
Incompatibility incompatibility,
-) =>
- Incompatibility(
- incompatibility.terms
- .map((term) => _reformatTerm(packageListers, term))
- .toList(),
- _reformatCause(packageListers, incompatibility.cause),
- );
+) => Incompatibility(
+ incompatibility.terms
+ .map((term) => _reformatTerm(packageListers, term))
+ .toList(),
+ _reformatCause(packageListers, incompatibility.cause),
+);
/// Returns [term] with the upper and lower bounds of its package range
/// reformatted if necessary.
@@ -150,7 +149,7 @@
) =>
cause is ConflictCause
? ConflictCause(
- reformatRanges(packageListers, cause.conflict),
- reformatRanges(packageListers, cause.other),
- )
+ reformatRanges(packageListers, cause.conflict),
+ reformatRanges(packageListers, cause.other),
+ )
: cause;
diff --git a/lib/src/solver/report.dart b/lib/src/solver/report.dart
index f7570ab..bfc426d 100644
--- a/lib/src/solver/report.dart
+++ b/lib/src/solver/report.dart
@@ -62,9 +62,9 @@
required bool dryRun,
required bool enforceLockfile,
required bool quiet,
- }) : _dryRun = dryRun,
- _quiet = quiet,
- _enforceLockfile = enforceLockfile;
+ }) : _dryRun = dryRun,
+ _quiet = quiet,
+ _enforceLockfile = enforceLockfile;
/// Displays a report of the results of the version resolution in
/// [_newLockFile] relative to the [_previousLockFile] file.
@@ -266,17 +266,19 @@
/// Displays a two-line message, number of outdated packages and an
/// instruction to run `pub outdated` if outdated packages are detected.
void reportOutdated() {
- final outdatedPackagesCount = _newLockFile.packages.values.where((id) {
- final versions = _availableVersions[id.name]!;
- // A version is counted:
- // - if there is a newer version which is not a pre-release and current
- // version is also not a pre-release or,
- // - if the current version is pre-release then any upgraded version is
- // considered.
- return versions.any(
- (v) => v > id.version && (id.version.isPreRelease || !v.isPreRelease),
- );
- }).length;
+ final outdatedPackagesCount =
+ _newLockFile.packages.values.where((id) {
+ final versions = _availableVersions[id.name]!;
+ // A version is counted:
+ // - if there is a newer version which is not a pre-release and
+ // current version is also not a pre-release or,
+ // - if the current version is pre-release then any upgraded version
+ // is considered.
+ return versions.any(
+ (v) =>
+ v > id.version && (id.version.isPreRelease || !v.isPreRelease),
+ );
+ }).length;
if (outdatedPackagesCount > 0) {
String packageCountString;
@@ -285,21 +287,23 @@
} else {
packageCountString = '$outdatedPackagesCount packages have';
}
- message('$packageCountString newer versions incompatible with '
- 'dependency constraints.\n'
- 'Try `$topLevelProgram pub outdated` for more information.');
+ message(
+ '$packageCountString newer versions incompatible with '
+ 'dependency constraints.\n'
+ 'Try `$topLevelProgram pub outdated` for more information.',
+ );
}
}
void reportAdvisories() {
if (advisoryDisplayHandles.isNotEmpty) {
message('Dependencies are affected by security advisories:');
- for (var footnote = 0;
- footnote < advisoryDisplayHandles.length;
- footnote++) {
- message(
- ' [^$footnote]: ${advisoryDisplayHandles[footnote]}',
- );
+ for (
+ var footnote = 0;
+ footnote < advisoryDisplayHandles.length;
+ footnote++
+ ) {
+ message(' [^$footnote]: ${advisoryDisplayHandles[footnote]}');
}
}
}
@@ -308,8 +312,8 @@
lockFile.mainDependencies.contains(name)
? DependencyType.direct
: lockFile.devDependencies.contains(name)
- ? DependencyType.dev
- : DependencyType.none;
+ ? DependencyType.dev
+ : DependencyType.none;
String? _constructAdvisoriesMessage(
List<int> footnotes,
@@ -428,10 +432,11 @@
final advisoryFootnotes = <int>[];
final reportedAdvisories = advisories
.where(
- (adv) => _rootPubspec.ignoredAdvisories.intersection({
- ...adv.aliases,
- adv.id,
- }).isEmpty,
+ (adv) =>
+ _rootPubspec.ignoredAdvisories.intersection({
+ ...adv.aliases,
+ adv.id,
+ }).isEmpty,
)
.take(maxAdvisoryFootnotesPerLine);
for (final adv in reportedAdvisories) {
@@ -454,21 +459,17 @@
'retracted, ${maxAll(versions, Version.prioritize)} available',
);
} else if (newId.version.isPreRelease && newerUnstable) {
- notes.add(
- 'retracted, ${maxAll(versions)} available',
- );
+ notes.add('retracted, ${maxAll(versions)} available');
} else {
- notes.add(
- 'retracted',
- );
+ notes.add('retracted');
}
} else if (status.isDiscontinued &&
- [DependencyType.direct, DependencyType.dev]
- .contains(_rootPubspec.dependencyType(name))) {
+ [
+ DependencyType.direct,
+ DependencyType.dev,
+ ].contains(_rootPubspec.dependencyType(name))) {
if (status.discontinuedReplacedBy == null) {
- notes.add(
- 'discontinued',
- );
+ notes.add('discontinued');
} else {
notes.add(
'discontinued replaced by ${status.discontinuedReplacedBy}',
@@ -476,16 +477,12 @@
}
} else if (newerStable) {
// If there are newer stable versions, only show those.
- notes.add(
- '${maxAll(versions, Version.prioritize)} available',
- );
+ notes.add('${maxAll(versions, Version.prioritize)} available');
} else if (
- // Only show newer prereleases for versions where a prerelease is
- // already chosen.
- newId.version.isPreRelease && newerUnstable) {
- notes.add(
- '${maxAll(versions)} available',
- );
+ // Only show newer prereleases for versions where a prerelease is
+ // already chosen.
+ newId.version.isPreRelease && newerUnstable) {
+ notes.add('${maxAll(versions)} available');
}
message = notes.isEmpty ? null : '(${notes.join(', ')})';
@@ -494,7 +491,8 @@
final oldDependencyType = dependencyType(_previousLockFile, name);
final newDependencyType = dependencyType(_newLockFile, name);
- final dependencyTypeChanged = oldId != null &&
+ final dependencyTypeChanged =
+ oldId != null &&
newId != null &&
oldDependencyType != newDependencyType;
@@ -559,13 +557,11 @@
void _writeDependencyType(DependencyType t, StringBuffer output) {
output.write(
- log.bold(
- switch (t) {
- DependencyType.direct => 'direct',
- DependencyType.dev => 'dev',
- DependencyType.none => 'transitive',
- },
- ),
+ log.bold(switch (t) {
+ DependencyType.direct => 'direct',
+ DependencyType.dev => 'dev',
+ DependencyType.none => 'transitive',
+ }),
);
}
diff --git a/lib/src/solver/result.dart b/lib/src/solver/result.dart
index 2d3ea06..7c88666 100644
--- a/lib/src/solver/result.dart
+++ b/lib/src/solver/result.dart
@@ -67,12 +67,11 @@
packages.map((id) async {
if (id.source is CachedSource) {
return await withDependencyType(
- _root.pubspec.dependencyType(id.name), () async {
- return (await cache.downloadPackage(
- id,
- ))
- .packageId;
- });
+ _root.pubspec.dependencyType(id.name),
+ () async {
+ return (await cache.downloadPackage(id)).packageId;
+ },
+ );
}
return id;
}),
@@ -83,15 +82,17 @@
// Don't factor in overridden dependencies' SDK constraints, because we'll
// accept those packages even if their constraints don't match.
- final nonOverrides = pubspecs.values
- .where((pubspec) => !_overriddenPackages.contains(pubspec.name))
- .toList();
+ final nonOverrides =
+ pubspecs.values
+ .where((pubspec) => !_overriddenPackages.contains(pubspec.name))
+ .toList();
final sdkConstraints = <String, VersionConstraint>{};
for (var pubspec in nonOverrides) {
pubspec.sdkConstraints.forEach((identifier, constraint) {
- sdkConstraints[identifier] = constraint.effectiveConstraint
- .intersect(sdkConstraints[identifier] ?? VersionConstraint.any);
+ sdkConstraints[identifier] = constraint.effectiveConstraint.intersect(
+ sdkConstraints[identifier] ?? VersionConstraint.any,
+ );
});
}
return LockFile(
@@ -112,10 +113,11 @@
///
/// This includes packages that were added or removed.
Set<String> get changedPackages {
- final changed = packages
- .where((id) => _previousLockFile.packages[id.name] != id)
- .map((id) => id.name)
- .toSet();
+ final changed =
+ packages
+ .where((id) => _previousLockFile.packages[id.name] != id)
+ .map((id) => id.name)
+ .toSet();
return changed.union(
_previousLockFile.packages.keys
@@ -136,6 +138,7 @@
);
@override
- String toString() => 'Took $attemptedSolutions tries to resolve to\n'
+ String toString() =>
+ 'Took $attemptedSolutions tries to resolve to\n'
'- ${packages.join("\n- ")}';
}
diff --git a/lib/src/solver/solve_suggestions.dart b/lib/src/solver/solve_suggestions.dart
index ab92339..36408ec 100644
--- a/lib/src/solver/solve_suggestions.dart
+++ b/lib/src/solver/solve_suggestions.dart
@@ -80,10 +80,11 @@
}
if (suggestions.isEmpty) return null;
- final tryOne = suggestions.length == 1
- ? 'You can try the following suggestion to make the pubspec resolve:'
- : 'You can try one of the following suggestions '
- 'to make the pubspec resolve:';
+ final tryOne =
+ suggestions.length == 1
+ ? 'You can try the following suggestion to make the pubspec resolve:'
+ : 'You can try one of the following suggestions '
+ 'to make the pubspec resolve:';
suggestions.sort((a, b) => a.priority.compareTo(b.priority));
@@ -99,16 +100,18 @@
String packageAddDescription(Entrypoint entrypoint, PackageId id) {
final name = id.name;
- final isDev =
- entrypoint.workspaceRoot.pubspec.devDependencies.containsKey(name);
+ final isDev = entrypoint.workspaceRoot.pubspec.devDependencies.containsKey(
+ name,
+ );
final resolvedDescription = id.description;
final String descriptor;
final d = resolvedDescription.description.serializeForPubspec(
- containingDir: Directory.current
- .path // The add command will resolve file names relative to CWD.
+ containingDir:
+ Directory
+ .current
+ .path, // The add command will resolve file names relative to CWD.
// This currently should have no implications as we don't create suggestions
// for path-packages.
- ,
languageVersion: entrypoint.workspaceRoot.pubspec.languageVersion,
);
if (d == null) {
@@ -151,8 +154,9 @@
if (constraint == null) return null;
// Find the most relevant Flutter release fulfilling the constraint.
- final bestRelease =
- await inferBestFlutterRelease({cause.sdk.identifier: constraint});
+ final bestRelease = await inferBestFlutterRelease({
+ cause.sdk.identifier: constraint,
+ });
if (bestRelease == null) return null;
final result = await _tryResolve(
entrypoint.workspaceRoot,
@@ -180,7 +184,8 @@
Future<_ResolutionSuggestion?> suggestSinglePackageUpdate(String name) async {
// TODO(https://github.com/dart-lang/pub/issues/4127): This should
// operate on all packages in workspace.
- final originalRange = entrypoint.workspaceRoot.dependencies[name] ??
+ final originalRange =
+ entrypoint.workspaceRoot.dependencies[name] ??
entrypoint.workspaceRoot.devDependencies[name];
if (originalRange == null ||
originalRange.description is! HostedDescription) {
@@ -209,18 +214,21 @@
final addDescription = packageAddDescription(entrypoint, resolvingPackage);
var priority = 1;
- var suggestion = '* Try updating your constraint on $name: '
+ var suggestion =
+ '* Try updating your constraint on $name: '
'$topLevelProgram pub add $addDescription';
if (originalConstraint is VersionRange) {
final min = originalConstraint.min;
if (min != null) {
if (resolvingPackage.version < min) {
priority = 3;
- suggestion = '* Consider downgrading your constraint on $name: '
+ suggestion =
+ '* Consider downgrading your constraint on $name: '
'$topLevelProgram pub add $addDescription';
} else {
priority = 2;
- suggestion = '* Try upgrading your constraint on $name: '
+ suggestion =
+ '* Try upgrading your constraint on $name: '
'$topLevelProgram pub add $addDescription';
}
}
@@ -235,8 +243,10 @@
required bool stripLowerBound,
}) async {
final originalPubspec = entrypoint.workspaceRoot.pubspec;
- final relaxedPubspec =
- stripVersionBounds(originalPubspec, stripLowerBound: stripLowerBound);
+ final relaxedPubspec = stripVersionBounds(
+ originalPubspec,
+ stripLowerBound: stripLowerBound,
+ );
final result = await _tryResolve(
Package(
@@ -250,9 +260,10 @@
}
final updatedPackageVersions = <PackageId>[];
for (final id in result.packages) {
- final originalConstraint = (originalPubspec.dependencies[id.name] ??
- originalPubspec.devDependencies[id.name])
- ?.constraint;
+ final originalConstraint =
+ (originalPubspec.dependencies[id.name] ??
+ originalPubspec.devDependencies[id.name])
+ ?.constraint;
if (originalConstraint != null) {
updatedPackageVersions.add(id);
}
diff --git a/lib/src/solver/term.dart b/lib/src/solver/term.dart
index a646d57..d856b18 100644
--- a/lib/src/solver/term.dart
+++ b/lib/src/solver/term.dart
@@ -26,7 +26,7 @@
Term get inverse => Term(package, !isPositive);
Term(PackageRange package, this.isPositive)
- : package = package.withTerseConstraint();
+ : package = package.withTerseConstraint();
VersionConstraint get constraint => package.constraint;
diff --git a/lib/src/solver/version_solver.dart b/lib/src/solver/version_solver.dart
index a77d2c1..165da9b 100644
--- a/lib/src/solver/version_solver.dart
+++ b/lib/src/solver/version_solver.dart
@@ -80,8 +80,9 @@
/// Names of packages that are overridden in this resolution as a [Set] for
/// convenience.
- late final Set<String> _overriddenPackages =
- MapKeySet(_root.allOverridesInWorkspace);
+ late final Set<String> _overriddenPackages = MapKeySet(
+ _root.allOverridesInWorkspace,
+ );
/// The set of packages for which the lockfile should be ignored.
final Set<String> _unlock;
@@ -99,18 +100,17 @@
this._lockFile,
Iterable<String> unlock, {
Map<String, Version> sdkOverrides = const {},
- }) : _sdkOverrides = sdkOverrides,
- _dependencyOverrides = _root.allOverridesInWorkspace,
- _unlock = {...unlock};
+ }) : _sdkOverrides = sdkOverrides,
+ _dependencyOverrides = _root.allOverridesInWorkspace,
+ _unlock = {...unlock};
/// Prime the solver with [constraints].
void addConstraints(Iterable<ConstraintAndCause> constraints) {
for (final constraint in constraints) {
_addIncompatibility(
- Incompatibility(
- [Term(constraint.range, false)],
- PackageVersionForbiddenCause(reason: constraint.cause),
- ),
+ Incompatibility([
+ Term(constraint.range, false),
+ ], PackageVersionForbiddenCause(reason: constraint.cause)),
);
}
}
@@ -120,10 +120,9 @@
Future<SolveResult> solve() async {
_stopwatch.start();
_addIncompatibility(
- Incompatibility(
- [Term(PackageRange.root(_root), false)],
- RootIncompatibilityCause(),
- ),
+ Incompatibility([
+ Term(PackageRange.root(_root), false),
+ ], RootIncompatibilityCause()),
);
try {
@@ -138,8 +137,10 @@
});
} finally {
// Gather some solving metrics.
- log.solver('Version solving took ${_stopwatch.elapsed} seconds.\n'
- 'Tried ${_solution.attemptedSolutions} solutions.');
+ log.solver(
+ 'Version solving took ${_stopwatch.elapsed} seconds.\n'
+ 'Tried ${_solution.attemptedSolutions} solutions.',
+ );
}
}
@@ -222,8 +223,10 @@
// [incompatibility] is satisfied and we have a conflict.
if (unsatisfied == null) return #conflict;
- _log("derived:${unsatisfied.isPositive ? ' not' : ''} "
- '${unsatisfied.package}');
+ _log(
+ "derived:${unsatisfied.isPositive ? ' not' : ''} "
+ '${unsatisfied.package}',
+ );
_solution.derive(
unsatisfied.package,
!unsatisfied.isPositive,
@@ -285,8 +288,10 @@
mostRecentSatisfier = satisfier;
difference = null;
} else {
- previousSatisfierLevel =
- math.max(previousSatisfierLevel, satisfier.decisionLevel);
+ previousSatisfierLevel = math.max(
+ previousSatisfierLevel,
+ satisfier.decisionLevel,
+ );
}
if (mostRecentTerm == term) {
@@ -350,8 +355,10 @@
final partially = difference == null ? '' : ' partially';
final bang = log.red('!');
- _log('$bang $mostRecentTerm is$partially satisfied by '
- '$mostRecentSatisfier');
+ _log(
+ '$bang $mostRecentTerm is$partially satisfied by '
+ '$mostRecentSatisfier',
+ );
_log('$bang which is caused by "${mostRecentSatisfier.cause}"');
_log('$bang thus: $incompatibility');
}
@@ -373,10 +380,9 @@
for (var candidate in unsatisfied) {
if (candidate.source is! UnknownSource) continue;
_addIncompatibility(
- Incompatibility(
- [Term(candidate.toRef().withConstraint(VersionConstraint.any), true)],
- UnknownSourceIncompatibilityCause(),
- ),
+ Incompatibility([
+ Term(candidate.toRef().withConstraint(VersionConstraint.any), true),
+ ], UnknownSourceIncompatibilityCause()),
);
return candidate.name;
}
@@ -395,10 +401,9 @@
version = await _packageLister(package).bestVersion(package.constraint);
} on PackageNotFoundException catch (error) {
_addIncompatibility(
- Incompatibility(
- [Term(package.toRef().withConstraint(VersionConstraint.any), true)],
- PackageNotFoundIncompatibilityCause(error),
- ),
+ Incompatibility([
+ Term(package.toRef().withConstraint(VersionConstraint.any), true),
+ ], PackageNotFoundIncompatibilityCause(error)),
);
return package.name;
}
@@ -409,31 +414,33 @@
// any version instead so that the lister gives us more general
// incompatibilities. This makes error reporting much nicer.
if (_excludesSingleVersion(package.constraint)) {
- version =
- await _packageLister(package).bestVersion(VersionConstraint.any);
+ version = await _packageLister(
+ package,
+ ).bestVersion(VersionConstraint.any);
} else {
// If there are no versions that satisfy [package.constraint], add an
// incompatibility that indicates that.
_addIncompatibility(
- Incompatibility(
- [Term(package, true)],
- NoVersionsIncompatibilityCause(),
- ),
+ Incompatibility([
+ Term(package, true),
+ ], NoVersionsIncompatibilityCause()),
);
return package.name;
}
}
var conflict = false;
- for (var incompatibility
- in await _packageLister(package).incompatibilitiesFor(version!)) {
+ for (var incompatibility in await _packageLister(
+ package,
+ ).incompatibilitiesFor(version!)) {
_addIncompatibility(incompatibility);
// If an incompatibility is already satisfied, then selecting [version]
// would cause a conflict. We'll continue adding its dependencies, then go
// back to unit propagation which will guide us to choose a better
// version.
- conflict = conflict ||
+ conflict =
+ conflict ||
incompatibility.terms.every(
(term) =>
term.package.name == package.name || _solution.satisfies(term),
@@ -507,12 +514,13 @@
// way that doesn't fetch.
List<PackageId> ids;
try {
- ids = package.source is HostedSource
- ? await _systemCache.getVersions(
- package.toRef(),
- maxAge: const Duration(days: 3),
- )
- : [package];
+ ids =
+ package.source is HostedSource
+ ? await _systemCache.getVersions(
+ package.toRef(),
+ maxAge: const Duration(days: 3),
+ )
+ : [package];
} on Exception {
ids = <PackageId>[package];
}
diff --git a/lib/src/source/git.dart b/lib/src/source/git.dart
index b0fda4b..7bed62f 100644
--- a/lib/src/source/git.dart
+++ b/lib/src/source/git.dart
@@ -46,8 +46,10 @@
if (description is String) {
url = description;
} else if (description is! Map) {
- throw const FormatException('The description must be a Git URL or a map '
- "with a 'url' key.");
+ throw const FormatException(
+ 'The description must be a Git URL or a map '
+ "with a 'url' key.",
+ );
} else {
final descriptionUrl = description['url'];
if (descriptionUrl is! String) {
@@ -60,25 +62,25 @@
final descriptionRef = description['ref'];
if (descriptionRef is! String?) {
throw const FormatException(
- "The 'ref' field of the description must be a "
- 'string.');
+ "The 'ref' field of the description must be a "
+ 'string.',
+ );
}
ref = descriptionRef;
final descriptionPath = description['path'];
if (descriptionPath is! String?) {
throw const FormatException(
- "The 'path' field of the description must be a "
- 'string.');
+ "The 'path' field of the description must be a "
+ 'string.',
+ );
}
path = descriptionPath;
if (languageVersion.forbidsUnknownDescriptionKeys) {
for (final key in description.keys) {
if (!['url', 'ref', 'path'].contains(key)) {
- throw FormatException(
- 'Unknown key "$key" in description.',
- );
+ throw FormatException('Unknown key "$key" in description.');
}
}
}
@@ -109,27 +111,34 @@
String? containingDir,
}) {
if (description is! Map) {
- throw const FormatException("The description must be a map with a 'url' "
- 'key.');
+ throw const FormatException(
+ "The description must be a map with a 'url' "
+ 'key.',
+ );
}
final ref = description['ref'];
if (ref is! String?) {
throw const FormatException(
- "The 'ref' field of the description must be a "
- 'string.');
+ "The 'ref' field of the description must be a "
+ 'string.',
+ );
}
final resolvedRef = description['resolved-ref'];
if (resolvedRef is! String) {
- throw const FormatException("The 'resolved-ref' field of the description "
- 'must be a string.');
+ throw const FormatException(
+ "The 'resolved-ref' field of the description "
+ 'must be a string.',
+ );
}
final url = description['url'];
if (url is! String) {
- throw const FormatException("The 'url' field of the description "
- 'must be a string.');
+ throw const FormatException(
+ "The 'url' field of the description "
+ 'must be a string.',
+ );
}
return PackageId(
name,
@@ -138,9 +147,7 @@
GitDescription(
url: url,
ref: ref,
- path: _validatedPath(
- description['path'],
- ),
+ path: _validatedPath(description['path']),
containingDir: containingDir,
),
resolvedRef,
@@ -161,8 +168,10 @@
// system aren't allowed. This can happen if a hosted or git dependency
// has a git dependency.
if (containingDir == null) {
- throw FormatException('"$url" is a relative path, but this '
- 'isn\'t a local pubspec.');
+ throw FormatException(
+ '"$url" is a relative path, but this '
+ 'isn\'t a local pubspec.',
+ );
}
// A relative path is stored internally as absolute resolved relative to
// [containingPath].
@@ -191,8 +200,9 @@
path ??= '.';
if (path is! String) {
throw const FormatException(
- "The 'path' field of the description must be a "
- 'string.');
+ "The 'path' field of the description must be a "
+ 'string.',
+ );
}
// Use Dart's URL parser to validate the URL.
@@ -208,8 +218,9 @@
}
if (!p.url.isWithin('.', path) && !p.url.equals('.', path)) {
throw const FormatException(
- "The 'path' field of the description must not reach outside the "
- 'repository.');
+ "The 'path' field of the description must not reach outside the "
+ 'repository.',
+ );
}
return p.url.normalize(parsed.toString());
}
@@ -274,8 +285,9 @@
final description = resolvedDescription.description;
// Normalize the path because Git treats "./" at the beginning of a path
// specially.
- var pathInCache =
- p.normalize(p.join(p.fromUri(description.path), pathInProject));
+ var pathInCache = p.normalize(
+ p.join(p.fromUri(description.path), pathInProject),
+ );
// Git doesn't recognize backslashes in paths, even on Windows.
if (Platform.isWindows) pathInCache = pathInCache.replaceAll('\\', '/');
@@ -284,13 +296,16 @@
final revision = resolvedDescription.resolvedRef;
try {
- return await git.run(
- [_gitDirArg(repoPath), 'show', '$revision:$pathInCache'],
- workingDir: repoPath,
- );
+ return await git.run([
+ _gitDirArg(repoPath),
+ 'show',
+ '$revision:$pathInCache',
+ ], workingDir: repoPath);
} on git.GitException catch (_) {
- fail('Could not find a file named "$pathInCache" in '
- '${GitDescription.prettyUri(description.url)} $revision.');
+ fail(
+ 'Could not find a file named "$pathInCache" in '
+ '${GitDescription.prettyUri(description.url)} $revision.',
+ );
}
}
@@ -329,11 +344,7 @@
throw StateError('Called with wrong ref');
}
return _pool.withResource(
- () => _describeUncached(
- id.toRef(),
- description.resolvedRef,
- cache,
- ),
+ () => _describeUncached(id.toRef(), description.resolvedRef, cache),
);
}
@@ -388,8 +399,10 @@
throw ArgumentError('Wrong source');
}
if (!git.isInstalled) {
- fail('Cannot get ${id.name} from Git (${description.url}).\n'
- 'Please ensure Git is correctly installed.');
+ fail(
+ 'Cannot get ${id.name} from Git (${description.url}).\n'
+ 'Please ensure Git is correctly installed.',
+ );
}
ensureDir(p.join(cache.rootDirForSource(this), 'cache'));
@@ -445,33 +458,34 @@
final result = <RepairResult>[];
- final packages = listDir(rootDir)
- .where((entry) => dirExists(p.join(entry, '.git')))
- .expand((revisionCachePath) {
- return _readPackageList(revisionCachePath).map((relative) {
- // If we've already failed to load another package from this
- // repository, ignore it.
- if (!dirExists(revisionCachePath)) return null;
+ final packages =
+ listDir(rootDir)
+ .where((entry) => dirExists(p.join(entry, '.git')))
+ .expand((revisionCachePath) {
+ return _readPackageList(revisionCachePath).map((relative) {
+ // If we've already failed to load another package from this
+ // repository, ignore it.
+ if (!dirExists(revisionCachePath)) return null;
- final packageDir = p.join(revisionCachePath, relative);
- try {
- return Package.load(
- packageDir,
- loadPubspec: Pubspec.loadRootWithSources(cache.sources),
- );
- } catch (error, stackTrace) {
- log.error('Failed to load package', error, stackTrace);
- final name = p.basename(revisionCachePath).split('-').first;
- result.add(
- RepairResult(name, Version.none, this, success: false),
- );
- tryDeleteEntry(revisionCachePath);
- return null;
- }
- });
- })
- .nonNulls
- .toList();
+ final packageDir = p.join(revisionCachePath, relative);
+ try {
+ return Package.load(
+ packageDir,
+ loadPubspec: Pubspec.loadRootWithSources(cache.sources),
+ );
+ } catch (error, stackTrace) {
+ log.error('Failed to load package', error, stackTrace);
+ final name = p.basename(revisionCachePath).split('-').first;
+ result.add(
+ RepairResult(name, Version.none, this, success: false),
+ );
+ tryDeleteEntry(revisionCachePath);
+ return null;
+ }
+ });
+ })
+ .nonNulls
+ .toList();
// Note that there may be multiple packages with the same name and version
// (pinned to different commits). The sort order of those is unspecified.
@@ -482,13 +496,19 @@
// ignore it.
if (!dirExists(package.dir)) continue;
- log.message('Resetting Git repository for '
- '${log.bold(package.name)} ${package.version}...');
+ log.message(
+ 'Resetting Git repository for '
+ '${log.bold(package.name)} ${package.version}...',
+ );
try {
// Remove all untracked files.
- await git
- .run(['clean', '-d', '--force', '-x'], workingDir: package.dir);
+ await git.run([
+ 'clean',
+ '-d',
+ '--force',
+ '-x',
+ ], workingDir: package.dir);
// Discard all changes to tracked files.
await git.run(['reset', '--hard', 'HEAD'], workingDir: package.dir);
@@ -497,8 +517,10 @@
RepairResult(package.name, package.version, this, success: true),
);
} on git.GitException catch (error, stackTrace) {
- log.error('Failed to reset ${log.bold(package.name)} '
- '${package.version}. Error:\n$error');
+ log.error(
+ 'Failed to reset ${log.bold(package.name)} '
+ '${package.version}. Error:\n$error',
+ );
log.fine(stackTrace.toString());
result.add(
RepairResult(package.name, package.version, this, success: false),
@@ -607,10 +629,11 @@
}
var isValid = true;
try {
- final result = await git.run(
- [_gitDirArg(dirPath), 'rev-parse', '--is-inside-git-dir'],
- workingDir: dirPath,
- );
+ final result = await git.run([
+ _gitDirArg(dirPath),
+ 'rev-parse',
+ '--is-inside-git-dir',
+ ], workingDir: dirPath);
if (result.trim() != 'true') {
isValid = false;
}
@@ -663,11 +686,13 @@
Future<String> _firstRevision(String path, String reference) async {
final String output;
try {
- output = (await git.run(
- [_gitDirArg(path), 'rev-list', '--max-count=1', reference],
- workingDir: path,
- ))
- .trim();
+ output =
+ (await git.run([
+ _gitDirArg(path),
+ 'rev-list',
+ '--max-count=1',
+ reference,
+ ], workingDir: path)).trim();
} on git.GitException catch (e) {
throw PackageNotFoundException(
"Could not find git ref '$reference' (${e.stderr})",
@@ -686,11 +711,7 @@
/// out the working tree, but instead makes the repository a local mirror of
/// the remote repository. See the manpage for `git clone` for more
/// information.
- Future<void> _clone(
- String from,
- String to, {
- bool mirror = false,
- }) async {
+ Future<void> _clone(String from, String to, {bool mirror = false}) async {
// Git on Windows does not seem to automatically create the destination
// directory.
ensureDir(to);
@@ -726,14 +747,15 @@
/// Checks out the reference [ref] in [repoPath].
Future<void> _checkOut(String repoPath, String ref) {
return git
- .run(['checkout', ref], workingDir: repoPath).then((result) => null);
+ .run(['checkout', ref], workingDir: repoPath)
+ .then((result) => null);
}
String _revisionCachePath(PackageId id, SystemCache cache) => p.join(
- cache.rootDirForSource(this),
- '${_repoName(id.description.description as GitDescription)}-'
- '${(id.description as ResolvedGitDescription).resolvedRef}',
- );
+ cache.rootDirForSource(this),
+ '${_repoName(id.description.description as GitDescription)}-'
+ '${(id.description as ResolvedGitDescription).resolvedRef}',
+ );
/// Returns the path to the canonical clone of the repository referred to by
/// [description] (the one in `<system cache>/git/cache`).
@@ -787,8 +809,8 @@
required this.relative,
required String? ref,
required String? path,
- }) : ref = ref ?? 'HEAD',
- path = path ?? '.';
+ }) : ref = ref ?? 'HEAD',
+ path = path ?? '.';
factory GitDescription({
required String url,
@@ -807,7 +829,8 @@
@override
String format() {
- var result = '${prettyUri(url)} at '
+ var result =
+ '${prettyUri(url)} at '
'$ref';
if (path != '.') result += ' in $path';
return result;
@@ -818,12 +841,13 @@
required String? containingDir,
required LanguageVersion languageVersion,
}) {
- final relativeUrl = containingDir != null && relative
- ? p.url.relative(
- url,
- from: p.toUri(p.normalize(p.absolute(containingDir))).toString(),
- )
- : url;
+ final relativeUrl =
+ containingDir != null && relative
+ ? p.url.relative(
+ url,
+ from: p.toUri(p.normalize(p.absolute(containingDir))).toString(),
+ )
+ : url;
if (ref == 'HEAD' && path == '.') return relativeUrl;
return {
'url': relativeUrl,
@@ -843,12 +867,8 @@
other.path == path;
}
- GitDescription withRef(String newRef) => GitDescription.raw(
- url: url,
- relative: relative,
- ref: newRef,
- path: path,
- );
+ GitDescription withRef(String newRef) =>
+ GitDescription.raw(url: url, relative: relative, ref: newRef, path: path);
@override
int get hashCode => Object.hash(url, ref, path);
@@ -876,7 +896,8 @@
@override
String format() {
- var result = '${GitDescription.prettyUri(description.url)} at '
+ var result =
+ '${GitDescription.prettyUri(description.url)} at '
'${resolvedRef.substring(0, 6)}';
if (description.path != '.') result += ' in ${description.path}';
return result;
@@ -884,12 +905,13 @@
@override
Object? serializeForLockfile({required String? containingDir}) {
- final url = description.relative && containingDir != null
- ? p.url.relative(
- description.url,
- from: Uri.file(p.absolute(containingDir)).toString(),
- )
- : description.url;
+ final url =
+ description.relative && containingDir != null
+ ? p.url.relative(
+ description.url,
+ from: Uri.file(p.absolute(containingDir)).toString(),
+ )
+ : description.url;
return {
'url': url,
'ref': description.ref,
diff --git a/lib/src/source/hosted.dart b/lib/src/source/hosted.dart
index dd0fe9e..1956d9a 100644
--- a/lib/src/source/hosted.dart
+++ b/lib/src/source/hosted.dart
@@ -59,11 +59,7 @@
try {
u = Uri.parse(hostedUrl);
} on FormatException catch (e) {
- throw FormatException(
- 'invalid url: ${e.message}',
- e.source,
- e.offset,
- );
+ throw FormatException('invalid url: ${e.message}', e.source, e.offset);
}
if (!u.hasScheme || (u.scheme != 'http' && u.scheme != 'https')) {
throw FormatException('url scheme must be https:// or http://', hostedUrl);
@@ -177,7 +173,7 @@
if (runningFromTest) {
defaultHostedUrl =
io.Platform.environment['_PUB_TEST_DEFAULT_HOSTED_URL'] ??
- defaultHostedUrl;
+ defaultHostedUrl;
}
return validateAndNormalizeHostedUrl(
io.Platform.environment['PUB_HOSTED_URL'] ?? defaultHostedUrl,
@@ -384,23 +380,19 @@
if (languageVersion.forbidsUnknownDescriptionKeys) {
for (final key in description.keys) {
if (!['url', 'name'].contains(key)) {
- throw FormatException(
- 'Unknown key "$key" in description.',
- );
+ throw FormatException('Unknown key "$key" in description.');
}
}
}
return HostedDescription(name, url as String);
}
- static final RegExp _looksLikePackageName =
- RegExp(r'^[a-zA-Z_]+[a-zA-Z0-9_]*$');
+ static final RegExp _looksLikePackageName = RegExp(
+ r'^[a-zA-Z_]+[a-zA-Z0-9_]*$',
+ );
late final RateLimitedScheduler<_RefAndCache, List<_VersionInfo>> _scheduler =
- RateLimitedScheduler(
- _fetchVersions,
- maxConcurrentOperations: 10,
- );
+ RateLimitedScheduler(_fetchVersions, maxConcurrentOperations: 10);
List<_VersionInfo> _versionInfoFromPackageListing(
Map body,
@@ -496,16 +488,19 @@
try {
// TODO(sigurdm): Implement cancellation of requests. This probably
// requires resolution of: https://github.com/dart-lang/http/issues/424.
- bodyText = await withAuthenticatedClient(cache, Uri.parse(hostedUrl),
- (client) async {
+ bodyText = await withAuthenticatedClient(cache, Uri.parse(hostedUrl), (
+ client,
+ ) async {
return await retryForHttp(
- 'fetching versions for "$packageName" from "$url"', () async {
- final request = http.Request('GET', url);
- request.attachPubApiHeaders();
- request.attachMetadataHeaders();
- final response = await client.fetch(request);
- return response.body;
- });
+ 'fetching versions for "$packageName" from "$url"',
+ () async {
+ final request = http.Request('GET', url);
+ request.attachPubApiHeaders();
+ request.attachMetadataHeaders();
+ final response = await client.fetch(request);
+ return response.body;
+ },
+ );
});
final decoded = jsonDecode(bodyText);
if (decoded is! Map<String, dynamic>) {
@@ -598,16 +593,19 @@
final Map<String, dynamic> body;
final List<Advisory>? result;
try {
- bodyText = await withAuthenticatedClient(cache, Uri.parse(hostedUrl),
- (client) async {
+ bodyText = await withAuthenticatedClient(cache, Uri.parse(hostedUrl), (
+ client,
+ ) async {
return await retryForHttp(
- 'fetching advisories for "$packageName" from "$url"', () async {
- final request = http.Request('GET', url);
- request.attachPubApiHeaders();
- request.attachMetadataHeaders();
- final response = await client.fetch(request);
- return response.body;
- });
+ 'fetching advisories for "$packageName" from "$url"',
+ () async {
+ final request = http.Request('GET', url);
+ request.attachPubApiHeaders();
+ request.attachMetadataHeaders();
+ final response = await client.fetch(request);
+ return response.body;
+ },
+ );
});
final decoded = jsonDecode(bodyText);
if (decoded is! Map<String, dynamic>) {
@@ -617,9 +615,10 @@
result = _extractAdvisoryDetailsForPackage(decoded, ref.name);
} on FormatException catch (error, stackTrace) {
log.warning(
- 'Failed to decode advisories for $packageName from $hostedUrl.\n'
- '$error\n'
- '${Chain.forTrace(stackTrace)}');
+ 'Failed to decode advisories for $packageName from $hostedUrl.\n'
+ '$error\n'
+ '${Chain.forTrace(stackTrace)}',
+ );
return null;
} on PubHttpResponseException catch (error, stackTrace) {
if (isPubDevUrl(hostedUrl)) {
@@ -775,8 +774,12 @@
Duration? maxAge,
) async {
final advisoriesUpdated =
- (await status(id.toRef(), id.version, cache, maxAge: maxAge))
- .advisoriesUpdated;
+ (await status(
+ id.toRef(),
+ id.version,
+ cache,
+ maxAge: maxAge,
+ )).advisoriesUpdated;
if (advisoriesUpdated == null) return null;
Future<List<Advisory>?> readAdvisoriesFromCache() async {
@@ -799,16 +802,17 @@
if (cachedAdvisoriesUpdated is! String) {
throw const FormatException('Broken cached advisories response');
}
- final parsedCacheAdvisoriesUpdated =
- DateTime.parse(cachedAdvisoriesUpdated);
+ final parsedCacheAdvisoriesUpdated = DateTime.parse(
+ cachedAdvisoriesUpdated,
+ );
final advisoriesUpdated =
(await status(id.toRef(), id.version, cache)).advisoriesUpdated;
if (
- // We could not obtain the timestamp of latest advisory update.
- advisoriesUpdated == null ||
- // The cached entry is too old.
- advisoriesUpdated.isAfter(parsedCacheAdvisoriesUpdated)) {
+ // We could not obtain the timestamp of latest advisory update.
+ advisoriesUpdated == null ||
+ // The cached entry is too old.
+ advisoriesUpdated.isAfter(parsedCacheAdvisoriesUpdated)) {
tryDeleteEntry(advisoriesCachePath);
} else {
return _extractAdvisoryDetailsForPackage(doc, id.toRef().name);
@@ -914,14 +918,7 @@
try {
ensureDir(p.dirname(path));
- writeTextFile(
- path,
- jsonEncode(
- <String, dynamic>{
- ...body,
- },
- ),
- );
+ writeTextFile(path, jsonEncode(<String, dynamic>{...body}));
} on io.IOException catch (e) {
// Not being able to write this cache is not fatal. Just move on...
log.fine('Failed writing cache file. $e');
@@ -939,12 +936,10 @@
ensureDir(p.dirname(path));
await writeTextFileAsync(
path,
- jsonEncode(
- <String, dynamic>{
- ...body,
- '_fetchedAt': DateTime.now().toIso8601String(),
- },
- ),
+ jsonEncode(<String, dynamic>{
+ ...body,
+ '_fetchedAt': DateTime.now().toIso8601String(),
+ }),
);
// Delete the entry in the in-memory cache to maintain the invariant that
// cached information in memory is the same as that on the disk.
@@ -993,8 +988,11 @@
var versionListing = _scheduler.peek(_RefAndCache(ref, cache));
if (maxAge != null) {
// Do we have a cached version response on disk?
- versionListing ??=
- await _cachedVersionListingResponse(ref, cache, maxAge: maxAge);
+ versionListing ??= await _cachedVersionListingResponse(
+ ref,
+ cache,
+ maxAge: maxAge,
+ );
}
// Otherwise retrieve the info from the host.
versionListing ??= await _scheduler
@@ -1017,29 +1015,19 @@
final dir = _urlToDirectory(description.url);
// Use a dot-dir because older versions of pub won't choke on that
// name when iterating the cache (it is not listed by [listDir]).
- return p.join(
- cache.rootDirForSource(this),
- dir,
- _versionListingDirectory,
- );
+ return p.join(cache.rootDirForSource(this), dir, _versionListingDirectory);
}
static const _versionListingDirectory = '.cache';
// The path where the response from the package-listing api is cached.
String _versionListingCachePath(PackageRef ref, SystemCache cache) {
- return p.join(
- _cacheDirPath(ref, cache),
- '${ref.name}-versions.json',
- );
+ return p.join(_cacheDirPath(ref, cache), '${ref.name}-versions.json');
}
// The path where the response from the advisories api is cached.
String _advisoriesCachePath(PackageRef ref, SystemCache cache) {
- return p.join(
- _cacheDirPath(ref, cache),
- '${ref.name}-advisories.json',
- );
+ return p.join(_cacheDirPath(ref, cache), '${ref.name}-advisories.json');
}
/// Downloads a list of all versions of a package that are available from the
@@ -1061,11 +1049,14 @@
log.io('Finding versions of ${ref.name} in $dir');
List<PackageId> offlineVersions;
if (dirExists(dir)) {
- offlineVersions = listDir(dir)
- .where(_looksLikePackageDir)
- .map((entry) => _idForBasename(p.basename(entry), url))
- .where((id) => id.name == ref.name && id.version != Version.none)
- .toList();
+ offlineVersions =
+ listDir(dir)
+ .where(_looksLikePackageDir)
+ .map((entry) => _idForBasename(p.basename(entry), url))
+ .where(
+ (id) => id.name == ref.name && id.version != Version.none,
+ )
+ .toList();
} else {
offlineVersions = [];
}
@@ -1083,8 +1074,11 @@
var versionListing = _scheduler.peek(_RefAndCache(ref, cache));
if (maxAge != null) {
// Do we have a cached version response on disk?
- versionListing ??=
- await _cachedVersionListingResponse(ref, cache, maxAge: maxAge);
+ versionListing ??= await _cachedVersionListingResponse(
+ ref,
+ cache,
+ maxAge: maxAge,
+ );
}
versionListing ??= await _scheduler.schedule(_RefAndCache(ref, cache));
return versionListing
@@ -1121,8 +1115,9 @@
return advisories
.where(
- (advisory) => advisory.affectedVersions
- .contains(id.version.canonicalizedVersion),
+ (advisory) => advisory.affectedVersions.contains(
+ id.version.canonicalizedVersion,
+ ),
)
.toList();
}
@@ -1148,8 +1143,9 @@
/// a given package.
Uri _listAdvisoriesUrl(PackageRef ref) {
final (description, package) = _parseRef(ref);
- return Uri.parse(description.url)
- .resolve('api/packages/$package/advisories');
+ return Uri.parse(
+ description.url,
+ ).resolve('api/packages/$package/advisories');
}
/// Retrieves the pubspec for a specific version of a package that is
@@ -1196,7 +1192,8 @@
maxAge: const Duration(days: 3),
);
- final expectedContentHash = versionInfo?.archiveSha256 ??
+ final expectedContentHash =
+ versionInfo?.archiveSha256 ??
// Handling of legacy server - we use the hash from the id (typically
// from the lockfile) to compare to the existing download.
(id.description as ResolvedHostedDescription).sha256;
@@ -1336,17 +1333,9 @@
);
} catch (error, stackTrace) {
log.error('Failed to load package', error, stackTrace);
- final id = _idForBasename(
- p.basename(entry),
- url,
- );
+ final id = _idForBasename(p.basename(entry), url);
results.add(
- RepairResult(
- id.name,
- id.version,
- this,
- success: false,
- ),
+ RepairResult(id.name, id.version, this, success: false),
);
tryDeleteEntry(entry);
}
@@ -1357,43 +1346,37 @@
packages.sort(Package.orderByNameAndVersion);
- return results
- ..addAll(
- await Future.wait(
- packages.map((package) async {
- final id = PackageId(
- package.name,
- package.version,
- ResolvedHostedDescription(
- HostedDescription._(package.name, url),
- sha256: null,
- ),
- );
- try {
- deleteEntry(package.dir);
- await _download(id, package.dir, cache);
- return RepairResult(id.name, id.version, this, success: true);
- } catch (error, stackTrace) {
- var message = 'Failed to repair ${log.bold(package.name)} '
- '${package.version}';
- if (url != defaultUrl) message += ' from $url';
- log.error('$message. Error:\n$error');
- log.fine(stackTrace.toString());
+ return results..addAll(
+ await Future.wait(
+ packages.map((package) async {
+ final id = PackageId(
+ package.name,
+ package.version,
+ ResolvedHostedDescription(
+ HostedDescription._(package.name, url),
+ sha256: null,
+ ),
+ );
+ try {
+ deleteEntry(package.dir);
+ await _download(id, package.dir, cache);
+ return RepairResult(id.name, id.version, this, success: true);
+ } catch (error, stackTrace) {
+ var message =
+ 'Failed to repair ${log.bold(package.name)} '
+ '${package.version}';
+ if (url != defaultUrl) message += ' from $url';
+ log.error('$message. Error:\n$error');
+ log.fine(stackTrace.toString());
- tryDeleteEntry(package.dir);
- return RepairResult(
- id.name,
- id.version,
- this,
- success: false,
- );
- }
- }),
- ),
- );
+ tryDeleteEntry(package.dir);
+ return RepairResult(id.name, id.version, this, success: false);
+ }
+ }),
+ ),
+ );
}),
- ))
- .expand((x) => x);
+ )).expand((x) => x);
}
/// Returns the best-guess package ID for [basename], which should be a
@@ -1432,8 +1415,10 @@
@override
List<Package> getCachedPackages(SystemCache cache) {
final root = cache.rootDirForSource(HostedSource.instance);
- final cacheDir =
- p.join(root, _urlToDirectory(HostedSource.instance.defaultUrl));
+ final cacheDir = p.join(
+ root,
+ _urlToDirectory(HostedSource.instance.defaultUrl),
+ );
if (!dirExists(cacheDir)) return [];
return listDir(cacheDir)
@@ -1445,9 +1430,11 @@
loadPubspec: Pubspec.loadRootWithSources(cache.sources),
);
} catch (error, stackTrace) {
- log.fine('Failed to load package from $entry:\n'
- '$error\n'
- '${Chain.forTrace(stackTrace)}');
+ log.fine(
+ 'Failed to load package from $entry:\n'
+ '$error\n'
+ '${Chain.forTrace(stackTrace)}',
+ );
return null;
}
})
@@ -1455,11 +1442,7 @@
.toList();
}
- Future<void> downloadInto(
- PackageId id,
- String destPath,
- SystemCache cache,
- ) =>
+ Future<void> downloadInto(PackageId id, String destPath, SystemCache cache) =>
_download(id, destPath, cache);
/// Downloads package [id] from the archive_url and unpacks it into
@@ -1489,8 +1472,9 @@
// query-string as is the case with signed S3 URLs. And we wish to allow for
// such URLs to be used.
final versions = await _scheduler.schedule(_RefAndCache(id.toRef(), cache));
- final versionInfo =
- versions.firstWhereOrNull((i) => i.version == id.version);
+ final versionInfo = versions.firstWhereOrNull(
+ (i) => i.version == id.version,
+ );
final packageName = id.name;
final version = id.version;
if (versionInfo == null) {
@@ -1545,8 +1529,9 @@
final expectedSha256 = versionInfo.archiveSha256;
try {
- await withAuthenticatedClient(cache, Uri.parse(description.url),
- (client) async {
+ await withAuthenticatedClient(cache, Uri.parse(description.url), (
+ client,
+ ) async {
// In addition to HTTP errors, this will retry crc32c/sha256 errors as
// well because [PackageIntegrityException] subclasses
// [PubHttpException].
@@ -1609,10 +1594,7 @@
void writeHash(PackageId id, SystemCache cache, List<int> bytes) {
final path = hashPath(id, cache);
ensureDir(p.dirname(path));
- writeTextFile(
- path,
- hexEncode(bytes),
- );
+ writeTextFile(path, hexEncode(bytes));
}
/// Installs a tar.gz file in [archivePath] as if it was downloaded from a
@@ -1656,9 +1638,9 @@
tempDir,
cache.sources,
containingDescription:
- // Dummy description. As we never use the dependencies, they don't
- // need to be resolved.
- RootDescription('.'),
+ // Dummy description. As we never use the dependencies, they don't
+ // need to be resolved.
+ RootDescription('.'),
);
final errors = pubspec.dependencyErrors;
if (errors.isNotEmpty) {
@@ -1736,12 +1718,14 @@
assert(error.statusCode == 401 || error.statusCode == 403);
if (error.statusCode == 401) {
- hint = '$hostedUrl package repository requested authentication!\n'
+ hint =
+ '$hostedUrl package repository requested authentication!\n'
'You can provide credentials using:\n'
' dart pub token add $hostedUrl';
}
if (error.statusCode == 403) {
- hint = 'Insufficient permissions to the resource at the $hostedUrl '
+ hint =
+ 'Insufficient permissions to the resource at the $hostedUrl '
'package repository.\nYou can modify credentials using:\n'
' dart pub token add $hostedUrl';
message = 'authorization failed';
@@ -1936,15 +1920,17 @@
String _urlToDirectory(String hostedUrl) {
// Normalize all loopback URLs to "localhost".
final url = hostedUrl.replaceAllMapped(
- RegExp(r'^(https?://)(127\.0\.0\.1|\[::1\]|localhost)?'), (match) {
- // Don't include the scheme for HTTPS URLs. This makes the directory names
- // nice for the default and most recommended scheme. We also don't include
- // it for localhost URLs, since they're always known to be HTTP.
- final localhost = match[2] == null ? '' : 'localhost';
- final scheme =
- match[1] == 'https://' || localhost.isNotEmpty ? '' : match[1];
- return '$scheme$localhost';
- });
+ RegExp(r'^(https?://)(127\.0\.0\.1|\[::1\]|localhost)?'),
+ (match) {
+ // Don't include the scheme for HTTPS URLs. This makes the directory names
+ // nice for the default and most recommended scheme. We also don't include
+ // it for localhost URLs, since they're always known to be HTTP.
+ final localhost = match[2] == null ? '' : 'localhost';
+ final scheme =
+ match[1] == 'https://' || localhost.isNotEmpty ? '' : match[1];
+ return '$scheme$localhost';
+ },
+ );
return replace(
url,
RegExp(r'[<>:"\\/|?*%]'),
@@ -2031,14 +2017,16 @@
final actualChecksum = crc32c.finalize();
log.fine(
- 'Computed checksum $actualChecksum for ${id.name} ${id.version} with '
- 'expected CRC32C of $expectedChecksum.');
+ 'Computed checksum $actualChecksum for ${id.name} ${id.version} with '
+ 'expected CRC32C of $expectedChecksum.',
+ );
if (actualChecksum != expectedChecksum) {
throw PackageIntegrityException(
- 'Package archive for ${id.name} ${id.version} downloaded from '
- '"$archiveUrl" has "x-goog-hash: crc32c=$expectedChecksum", which '
- 'doesn\'t match the checksum of the archive downloaded.');
+ 'Package archive for ${id.name} ${id.version} downloaded from '
+ '"$archiveUrl" has "x-goog-hash: crc32c=$expectedChecksum", which '
+ 'doesn\'t match the checksum of the archive downloaded.',
+ );
}
}
@@ -2079,8 +2067,9 @@
} on FormatException catch (e, s) {
log.exception(e, s);
throw PackageIntegrityException(
- 'Package archive "$fileName" has a malformed CRC32C checksum in '
- 'its response headers');
+ 'Package archive "$fileName" has a malformed CRC32C checksum in '
+ 'its response headers',
+ );
}
}
}
diff --git a/lib/src/source/path.dart b/lib/src/source/path.dart
index a33eba9..0125d85 100644
--- a/lib/src/source/path.dart
+++ b/lib/src/source/path.dart
@@ -80,10 +80,7 @@
name,
PathDescription(
p.normalize(
- p.join(
- p.absolute(containingDescription.path),
- description,
- ),
+ p.join(p.absolute(containingDescription.path), description),
),
isRelative,
),
@@ -96,10 +93,7 @@
);
}
final resolvedPath = p.url.normalize(
- p.url.joinAll([
- containingDescription.path,
- ...p.posix.split(dir),
- ]),
+ p.url.joinAll([containingDescription.path, ...p.posix.split(dir)]),
);
if (!(p.isWithin('.', resolvedPath) || p.equals('.', resolvedPath))) {
throw FormatException(
@@ -118,13 +112,17 @@
);
} else if (containingDescription is HostedDescription) {
if (isRelative) {
- throw FormatException('"$description" is a relative path, but this '
- 'isn\'t a local pubspec.');
+ throw FormatException(
+ '"$description" is a relative path, but this '
+ 'isn\'t a local pubspec.',
+ );
}
return PackageRef(name, PathDescription(dir, false));
} else {
- throw FormatException('"$description" is a path, but this '
- 'isn\'t a local pubspec.');
+ throw FormatException(
+ '"$description" is a path, but this '
+ 'isn\'t a local pubspec.',
+ );
}
}
@@ -140,13 +138,17 @@
}
var path = description['path'];
if (path is! String) {
- throw const FormatException("The 'path' field of the description must "
- 'be a string.');
+ throw const FormatException(
+ "The 'path' field of the description must "
+ 'be a string.',
+ );
}
final relative = description['relative'];
if (relative is! bool) {
- throw const FormatException("The 'relative' field of the description "
- 'must be a boolean.');
+ throw const FormatException(
+ "The 'relative' field of the description "
+ 'must be a boolean.',
+ );
}
// Resolve the path relative to the containing file path.
@@ -154,13 +156,13 @@
// Relative paths coming from lockfiles that are not on the local file
// system aren't allowed.
if (containingDir == null) {
- throw FormatException('"$description" is a relative path, but this '
- 'isn\'t a local pubspec.');
+ throw FormatException(
+ '"$description" is a relative path, but this '
+ 'isn\'t a local pubspec.',
+ );
}
- path = p.normalize(
- p.absolute(p.join(containingDir, path)),
- );
+ path = p.normalize(p.absolute(p.join(containingDir, path)));
}
return PackageId(
@@ -286,8 +288,8 @@
}) {
return relative
? PathSource.relativePathWithPosixSeparators(
- p.relative(path, from: containingDir),
- )
+ p.relative(path, from: containingDir),
+ )
: path;
}
diff --git a/lib/src/source/root.dart b/lib/src/source/root.dart
index 2513b4d..d6f971a 100644
--- a/lib/src/source/root.dart
+++ b/lib/src/source/root.dart
@@ -29,10 +29,7 @@
String get name => 'root';
@override
- Future<Pubspec> doDescribe(
- PackageId id,
- SystemCache cache,
- ) async {
+ Future<Pubspec> doDescribe(PackageId id, SystemCache cache) async {
throw UnsupportedError('Cannot describe the root');
}
diff --git a/lib/src/source/sdk.dart b/lib/src/source/sdk.dart
index 3dfa037..daaca09 100644
--- a/lib/src/source/sdk.dart
+++ b/lib/src/source/sdk.dart
@@ -79,10 +79,7 @@
}
@override
- Future<Pubspec> doDescribe(
- PackageId id,
- SystemCache cache,
- ) async =>
+ Future<Pubspec> doDescribe(PackageId id, SystemCache cache) async =>
_loadPubspec(id.toRef(), cache);
/// Loads the pubspec for the SDK package named [ref].
@@ -100,8 +97,9 @@
/// Validate that there are no non-sdk dependencies if the SDK does not
/// allow them.
if (ref.description case final SdkDescription description) {
- if (sdks[description.sdk]
- case Sdk(allowsNonSdkDepsInSdkPackages: false)) {
+ if (sdks[description.sdk] case Sdk(
+ allowsNonSdkDepsInSdkPackages: false,
+ )) {
for (var dep in pubspec.dependencies.entries) {
if (dep.value.source is! SdkSource) {
throw UnsupportedError(
diff --git a/lib/src/source/unknown.dart b/lib/src/source/unknown.dart
index 3ba7221..50102ea 100644
--- a/lib/src/source/unknown.dart
+++ b/lib/src/source/unknown.dart
@@ -39,8 +39,7 @@
Object? description, {
required Description containingDescription,
LanguageVersion? languageVersion,
- }) =>
- PackageRef(name, UnknownDescription(description, this));
+ }) => PackageRef(name, UnknownDescription(description, this));
@override
PackageId parseId(
@@ -48,12 +47,11 @@
Version version,
Object? description, {
String? containingDir,
- }) =>
- PackageId(
- name,
- version,
- ResolvedUnknownDescription(UnknownDescription(description, this)),
- );
+ }) => PackageId(
+ name,
+ version,
+ ResolvedUnknownDescription(UnknownDescription(description, this)),
+ );
@override
Future<List<PackageId>> doGetVersions(
diff --git a/lib/src/system_cache.dart b/lib/src/system_cache.dart
index ad6bc03..bf52180 100644
--- a/lib/src/system_cache.dart
+++ b/lib/src/system_cache.dart
@@ -38,33 +38,34 @@
String get tempDir => p.join(rootDir, '_temp');
- static String defaultDir = (() {
- final envCache = Platform.environment['PUB_CACHE'];
- if (envCache != null) {
- return envCache;
- } else if (Platform.isWindows) {
- // %LOCALAPPDATA% is used as the cache location over %APPDATA%, because
- // the latter is synchronised between devices when the user roams between
- // them, whereas the former is not.
- final localAppData = Platform.environment['LOCALAPPDATA'];
- if (localAppData == null) {
- dataError('''
+ static String defaultDir =
+ (() {
+ final envCache = Platform.environment['PUB_CACHE'];
+ if (envCache != null) {
+ return envCache;
+ } else if (Platform.isWindows) {
+ // %LOCALAPPDATA% is used as the cache location over %APPDATA%,
+ // because the latter is synchronised between devices when the user
+ // roams between them, whereas the former is not.
+ final localAppData = Platform.environment['LOCALAPPDATA'];
+ if (localAppData == null) {
+ dataError('''
Could not find the pub cache. No `LOCALAPPDATA` environment variable exists.
Consider setting the `PUB_CACHE` variable manually.
''');
- }
- return p.join(localAppData, 'Pub', 'Cache');
- } else {
- final home = Platform.environment['HOME'];
- if (home == null) {
- dataError('''
+ }
+ return p.join(localAppData, 'Pub', 'Cache');
+ } else {
+ final home = Platform.environment['HOME'];
+ if (home == null) {
+ dataError('''
Could not find the pub cache. No `HOME` environment variable exists.
Consider setting the `PUB_CACHE` variable manually.
''');
- }
- return p.join(home, '.pub-cache');
- }
- })();
+ }
+ return p.join(home, '.pub-cache');
+ }
+ })();
/// The available sources.
late final _sources = {
@@ -104,8 +105,8 @@
/// If [isOffline] is `true`, then the offline hosted source will be used.
/// Defaults to `false`.
SystemCache({String? rootDir, this.isOffline = false})
- : _rootDir = rootDir,
- tokenStore = TokenStore(dartConfigDir);
+ : _rootDir = rootDir,
+ tokenStore = TokenStore(dartConfigDir);
/// Loads the package identified by [id].
///
@@ -178,23 +179,22 @@
}) async {
var versions = await ref.source.doGetVersions(ref, maxAge, this);
- versions = (await Future.wait(
- versions.map((id) async {
- final packageStatus = await ref.source.status(
- id.toRef(),
- id.version,
- this,
- maxAge: maxAge,
- );
- if (!packageStatus.isRetracted ||
- id.version == allowedRetractedVersion) {
- return id;
- }
- return null;
- }),
- ))
- .nonNulls
- .toList();
+ versions =
+ (await Future.wait(
+ versions.map((id) async {
+ final packageStatus = await ref.source.status(
+ id.toRef(),
+ id.version,
+ this,
+ maxAge: maxAge,
+ );
+ if (!packageStatus.isRetracted ||
+ id.version == allowedRetractedVersion) {
+ return id;
+ }
+ return null;
+ }),
+ )).nonNulls.toList();
return versions;
}
@@ -336,8 +336,10 @@
final appData = Platform.environment['APPDATA'];
if (appData == null) return;
final legacyCacheLocation = p.join(appData, 'Pub', 'Cache');
- final legacyCacheDeprecatedFile =
- p.join(legacyCacheLocation, 'DEPRECATED.md');
+ final legacyCacheDeprecatedFile = p.join(
+ legacyCacheLocation,
+ 'DEPRECATED.md',
+ );
final stat = tryStatFile(legacyCacheDeprecatedFile);
if ((stat == null ||
DateTime.now().difference(stat.changed) >
diff --git a/lib/src/utils.dart b/lib/src/utils.dart
index fd7b45f..2a62a94 100644
--- a/lib/src/utils.dart
+++ b/lib/src/utils.dart
@@ -135,9 +135,10 @@
}) {
final completer = Completer<T>();
void wrappedCallback() {
- Future.sync(callback)
- .then(completer.complete)
- .catchError((Object e, StackTrace? stackTrace) {
+ Future.sync(callback).then(completer.complete).catchError((
+ Object e,
+ StackTrace? stackTrace,
+ ) {
// [stackTrace] can be null if we're running without [captureStackChains],
// since dart:io will often throw errors without stack traces.
if (stackTrace != null) {
@@ -295,8 +296,7 @@
T maxAll<T extends Comparable>(
Iterable<T> iter, [
int Function(T, T) compare = Comparable.compare,
-]) =>
- iter.reduce((max, element) => compare(element, max) > 0 ? element : max);
+]) => iter.reduce((max, element) => compare(element, max) > 0 ? element : max);
/// Returns the element of [values] for which [orderBy] returns the smallest
/// value.
@@ -345,12 +345,11 @@
Iterable<T> values,
S initialValue,
Future<S> Function(S previous, T element) combine,
-) =>
- values.fold(
- Future.value(initialValue),
- (previousFuture, element) =>
- previousFuture.then((previous) => combine(previous, element)),
- );
+) => values.fold(
+ Future.value(initialValue),
+ (previousFuture, element) =>
+ previousFuture.then((previous) => combine(previous, element)),
+);
/// Replace each instance of [matcher] in [source] with the return value of
/// [fn].
@@ -412,9 +411,10 @@
// If we're using verbose logging, be more verbose but more accurate when
// reporting timing information.
- final msString = log.verbosity.isLevelVisible(log.Level.fine)
- ? _padLeft(ms.toString(), 3, '0')
- : (ms ~/ 100).toString();
+ final msString =
+ log.verbosity.isLevelVisible(log.Level.fine)
+ ? _padLeft(ms.toString(), 3, '0')
+ : (ms ~/ 100).toString();
return "$result${hasMinutes ? _padLeft(s.toString(), 2, '0') : s}"
'.${msString}s';
@@ -426,11 +426,7 @@
String _urlDecode(String encoded) =>
Uri.decodeComponent(encoded.replaceAll('+', ' '));
-enum ForceColorOption {
- always,
- never,
- auto,
-}
+enum ForceColorOption { always, never, auto }
/// Change to decide if ANSI colors should be output regardless of terminalD.
ForceColorOption forceColors = ForceColorOption.auto;
@@ -581,10 +577,11 @@
bytes[6] = (bytes[6] & 0x0F) | 0x40;
bytes[8] = (bytes[8] & 0x3f) | 0x80;
- final chars = bytes
- .map((b) => b.toRadixString(16).padLeft(2, '0'))
- .join()
- .toUpperCase();
+ final chars =
+ bytes
+ .map((b) => b.toRadixString(16).padLeft(2, '0'))
+ .join()
+ .toUpperCase();
return ''
'${chars.substring(0, 8)}-'
@@ -609,34 +606,37 @@
return text;
}
- return text.split('\n').map((originalLine) {
- final buffer = StringBuffer();
- var lengthSoFar = 0;
- var firstLine = true;
- for (var word in originalLine.split(' ')) {
- final wordLength = _withoutColors(word).length;
- if (wordLength > lineLength) {
- if (lengthSoFar != 0) buffer.writeln();
- if (!firstLine) buffer.write(prefix);
- buffer.writeln(word);
- firstLine = false;
- } else if (lengthSoFar == 0) {
- if (!firstLine) buffer.write(prefix);
- buffer.write(word);
- lengthSoFar = wordLength + prefix.length;
- } else if (lengthSoFar + 1 + wordLength > lineLength) {
- buffer.writeln();
- buffer.write(prefix);
- buffer.write(word);
- lengthSoFar = wordLength + prefix.length;
- firstLine = false;
- } else {
- buffer.write(' $word');
- lengthSoFar += 1 + wordLength;
- }
- }
- return buffer.toString();
- }).join('\n');
+ return text
+ .split('\n')
+ .map((originalLine) {
+ final buffer = StringBuffer();
+ var lengthSoFar = 0;
+ var firstLine = true;
+ for (var word in originalLine.split(' ')) {
+ final wordLength = _withoutColors(word).length;
+ if (wordLength > lineLength) {
+ if (lengthSoFar != 0) buffer.writeln();
+ if (!firstLine) buffer.write(prefix);
+ buffer.writeln(word);
+ firstLine = false;
+ } else if (lengthSoFar == 0) {
+ if (!firstLine) buffer.write(prefix);
+ buffer.write(word);
+ lengthSoFar = wordLength + prefix.length;
+ } else if (lengthSoFar + 1 + wordLength > lineLength) {
+ buffer.writeln();
+ buffer.write(prefix);
+ buffer.write(word);
+ lengthSoFar = wordLength + prefix.length;
+ firstLine = false;
+ } else {
+ buffer.write(' $word');
+ lengthSoFar += 1 + wordLength;
+ }
+ }
+ return buffer.toString();
+ })
+ .join('\n');
}
/// A regular expression matching terminal color codes.
@@ -766,8 +766,8 @@
/// Only allowd printable ASCII, map anything else to whitespace, take at-most
/// 1024 characters.
String sanitizeForTerminal(String input) => String.fromCharCodes(
- input.runes.map((r) => 32 <= r && r <= 127 ? r : 32).take(1024),
- );
+ input.runes.map((r) => 32 <= r && r <= 127 ? r : 32).take(1024),
+);
extension ExpectField on YamlMap {
/// Looks up the [key] in this map, and validates that it is of type [T],
@@ -815,13 +815,13 @@
/// Throws a [SourceSpanApplicationException] for the first entry that does
/// not have a value of type [T].
List<T> expectElements<T extends Object?>() => [
- for (var node in nodes)
- if (node.value case final T value)
- value
- else
- throw SourceSpanApplicationException(
- 'Elements must be of type $T.',
- node.span,
- ),
- ];
+ for (var node in nodes)
+ if (node.value case final T value)
+ value
+ else
+ throw SourceSpanApplicationException(
+ 'Elements must be of type $T.',
+ node.span,
+ ),
+ ];
}
diff --git a/lib/src/validator.dart b/lib/src/validator.dart
index ee69ee6..8fa64f9 100644
--- a/lib/src/validator.dart
+++ b/lib/src/validator.dart
@@ -96,20 +96,23 @@
final allowedSdks = VersionRange(
min: firstSdkVersion,
includeMin: true,
- max: firstSdkVersion.isPreRelease
- ? firstSdkVersion.nextPatch
- : firstSdkVersion.nextBreaking,
+ max:
+ firstSdkVersion.isPreRelease
+ ? firstSdkVersion.nextPatch
+ : firstSdkVersion.nextBreaking,
);
var newSdkConstraint = package.pubspec.dartSdkConstraint.originalConstraint
.intersect(allowedSdks);
if (newSdkConstraint.isEmpty) newSdkConstraint = allowedSdks;
- errors.add('$message\n'
- 'Make sure your SDK constraint excludes old versions:\n'
- '\n'
- 'environment:\n'
- ' sdk: "${newSdkConstraint.asCompatibleWithIfPossible()}"');
+ errors.add(
+ '$message\n'
+ 'Make sure your SDK constraint excludes old versions:\n'
+ '\n'
+ 'environment:\n'
+ ' sdk: "${newSdkConstraint.asCompatibleWithIfPossible()}"',
+ );
}
/// Returns whether [version1] and [version2] are pre-releases of the same
@@ -181,8 +184,9 @@
}),
).then((_) {
hints.addAll([for (final validator in validators) ...validator.hints]);
- warnings
- .addAll([for (final validator in validators) ...validator.warnings]);
+ warnings.addAll([
+ for (final validator in validators) ...validator.warnings,
+ ]);
errors.addAll([for (final validator in validators) ...validator.errors]);
String presentDiagnostics(List<String> diagnostics) => diagnostics
diff --git a/lib/src/validator/analyze.dart b/lib/src/validator/analyze.dart
index 3e1cb00..a806cb9 100644
--- a/lib/src/validator/analyze.dart
+++ b/lib/src/validator/analyze.dart
@@ -27,14 +27,16 @@
final entries = _entriesToAnalyze
.map((dir) => p.join(package.dir, dir))
.where(entryExists);
- final result = await runProcess(
- Platform.resolvedExecutable,
- ['analyze', ...entries, p.join(package.dir, 'pubspec.yaml')],
- );
+ final result = await runProcess(Platform.resolvedExecutable, [
+ 'analyze',
+ ...entries,
+ p.join(package.dir, 'pubspec.yaml'),
+ ]);
if (result.exitCode != 0) {
final limitedOutput = limitLength(result.stdout, 1000);
- warnings
- .add('`dart analyze` found the following issue(s):\n$limitedOutput');
+ warnings.add(
+ '`dart analyze` found the following issue(s):\n$limitedOutput',
+ );
}
}
}
diff --git a/lib/src/validator/changelog.dart b/lib/src/validator/changelog.dart
index 129c1fd..21c3753 100644
--- a/lib/src/validator/changelog.dart
+++ b/lib/src/validator/changelog.dart
@@ -17,19 +17,24 @@
class ChangelogValidator extends Validator {
@override
Future<void> validate() async {
- final changelog = filesBeneath('.', recursive: false).firstWhereOrNull(
- (entry) => p.basename(entry).contains(_changelogRegexp),
- );
+ final changelog = filesBeneath(
+ '.',
+ recursive: false,
+ ).firstWhereOrNull((entry) => p.basename(entry).contains(_changelogRegexp));
if (changelog == null) {
- warnings.add('Please add a `CHANGELOG.md` to your package. '
- 'See https://dart.dev/tools/pub/publishing#important-files.');
+ warnings.add(
+ 'Please add a `CHANGELOG.md` to your package. '
+ 'See https://dart.dev/tools/pub/publishing#important-files.',
+ );
return;
}
if (p.basename(changelog) != 'CHANGELOG.md') {
- warnings.add('Please consider renaming $changelog to `CHANGELOG.md`. '
- 'See https://dart.dev/tools/pub/publishing#important-files.');
+ warnings.add(
+ 'Please consider renaming $changelog to `CHANGELOG.md`. '
+ 'See https://dart.dev/tools/pub/publishing#important-files.',
+ );
}
final bytes = readBinaryFile(changelog);
@@ -39,9 +44,11 @@
// utf8.decode doesn't allow invalid UTF-8.
contents = utf8.decode(bytes);
} on FormatException catch (_) {
- warnings.add('$changelog contains invalid UTF-8.\n'
- 'This will cause it to be displayed incorrectly on '
- 'the Pub site (https://pub.dev).');
+ warnings.add(
+ '$changelog contains invalid UTF-8.\n'
+ 'This will cause it to be displayed incorrectly on '
+ 'the Pub site (https://pub.dev).',
+ );
// Failed to decode contents, so there's nothing else to check.
return;
}
@@ -49,9 +56,11 @@
final version = package.pubspec.version.toString();
if (!contents.contains(version)) {
- warnings.add("$changelog doesn't mention current version ($version).\n"
- 'Consider updating it with notes on this version prior to '
- 'publication.');
+ warnings.add(
+ "$changelog doesn't mention current version ($version).\n"
+ 'Consider updating it with notes on this version prior to '
+ 'publication.',
+ );
}
}
}
diff --git a/lib/src/validator/compiled_dartdoc.dart b/lib/src/validator/compiled_dartdoc.dart
index e74628e..9b031e2 100644
--- a/lib/src/validator/compiled_dartdoc.dart
+++ b/lib/src/validator/compiled_dartdoc.dart
@@ -28,10 +28,12 @@
];
if (files.every(fileExists)) {
- warnings.add('Avoid putting generated documentation in '
- '${p.relative(dir)}.\n'
- 'Generated documentation bloats the package with redundant '
- 'data.');
+ warnings.add(
+ 'Avoid putting generated documentation in '
+ '${p.relative(dir)}.\n'
+ 'Generated documentation bloats the package with redundant '
+ 'data.',
+ );
}
}
});
diff --git a/lib/src/validator/dependency.dart b/lib/src/validator/dependency.dart
index 3133e16..b945a97 100644
--- a/lib/src/validator/dependency.dart
+++ b/lib/src/validator/dependency.dart
@@ -56,14 +56,16 @@
// Path sources are errors. Other sources are just warnings.
final messages = dep.source is PathSource ? errors : warnings;
- messages.add('Don\'t depend on "${dep.name}" from the ${dep.source} '
- 'source. Use the hosted source instead. For example:\n'
- '\n'
- 'dependencies:\n'
- ' ${dep.name}: $constraint\n'
- '\n'
- 'Using the hosted source ensures that everyone can download your '
- 'package\'s dependencies along with your package.');
+ messages.add(
+ 'Don\'t depend on "${dep.name}" from the ${dep.source} '
+ 'source. Use the hosted source instead. For example:\n'
+ '\n'
+ 'dependencies:\n'
+ ' ${dep.name}: $constraint\n'
+ '\n'
+ 'Using the hosted source ensures that everyone can download your '
+ 'package\'s dependencies along with your package.',
+ );
}
/// Warn about improper dependencies on Flutter.
@@ -73,44 +75,52 @@
return;
}
- errors.add('Don\'t depend on "${dep.name}" from the ${dep.source} '
- 'source. Use the SDK source instead. For example:\n'
- '\n'
- 'dependencies:\n'
- ' ${dep.name}:\n'
- ' sdk: ${dep.constraint}\n'
- '\n'
- 'The Flutter SDK is downloaded and managed outside of pub.');
+ errors.add(
+ 'Don\'t depend on "${dep.name}" from the ${dep.source} '
+ 'source. Use the SDK source instead. For example:\n'
+ '\n'
+ 'dependencies:\n'
+ ' ${dep.name}:\n'
+ ' sdk: ${dep.constraint}\n'
+ '\n'
+ 'The Flutter SDK is downloaded and managed outside of pub.',
+ );
}
/// Warn that dependencies should have version constraints.
void warnAboutNoConstraint(PackageRange dep) {
- var message = 'Your dependency on "${dep.name}" should have a version '
+ var message =
+ 'Your dependency on "${dep.name}" should have a version '
'constraint.';
final locked = context.entrypoint.lockFile.packages[dep.name];
if (locked != null) {
- message = '$message For example:\n'
+ message =
+ '$message For example:\n'
'\n'
'dependencies:\n'
' ${dep.name}: ^${locked.version}\n';
}
- warnings.add('$message\n'
- 'Without a constraint, you\'re promising to support '
- '${log.bold("all")} future versions of "${dep.name}".');
+ warnings.add(
+ '$message\n'
+ 'Without a constraint, you\'re promising to support '
+ '${log.bold("all")} future versions of "${dep.name}".',
+ );
}
/// Warn that dependencies should allow more than a single version.
void warnAboutSingleVersionConstraint(PackageRange dep) {
- warnings.add('Your dependency on "${dep.name}" '
- 'should allow more than one version. '
- 'For example:\n'
- '\n'
- 'dependencies:\n'
- ' ${dep.name}: ^${dep.constraint}\n'
- '\n'
- 'Constraints that are too tight will make it difficult for people to '
- 'use your package\n'
- 'along with other packages that also depend on "${dep.name}".');
+ warnings.add(
+ 'Your dependency on "${dep.name}" '
+ 'should allow more than one version. '
+ 'For example:\n'
+ '\n'
+ 'dependencies:\n'
+ ' ${dep.name}: ^${dep.constraint}\n'
+ '\n'
+ 'Constraints that are too tight will make it difficult for people to '
+ 'use your package\n'
+ 'along with other packages that also depend on "${dep.name}".',
+ );
}
/// Warn that dependencies should have lower bounds on their constraints.
@@ -126,14 +136,17 @@
constraint = '">=${locked.version} ${dep.constraint}"';
}
- message = '$message For example:\n'
+ message =
+ '$message For example:\n'
'\n'
'dependencies:\n'
' ${dep.name}: $constraint\n';
}
- warnings.add('$message\n'
- 'Without a constraint, you\'re promising to support '
- '${log.bold("all")} previous versions of "${dep.name}".');
+ warnings.add(
+ '$message\n'
+ 'Without a constraint, you\'re promising to support '
+ '${log.bold("all")} previous versions of "${dep.name}".',
+ );
}
/// Warn that dependencies should have upper bounds on their constraints.
@@ -142,20 +155,22 @@
if ((dep.constraint as VersionRange).includeMin) {
constraint = '^${(dep.constraint as VersionRange).min}';
} else {
- constraint = '"${dep.constraint} '
+ constraint =
+ '"${dep.constraint} '
'<${(dep.constraint as VersionRange).min!.nextBreaking}"';
}
// TODO: Handle the case where `dep.constraint.min` is null.
warnings.add(
- 'Your dependency on "${dep.name}" should have an upper bound. For '
- 'example:\n'
- '\n'
- 'dependencies:\n'
- ' ${dep.name}: $constraint\n'
- '\n'
- 'Without an upper bound, you\'re promising to support '
- '${log.bold("all")} future versions of ${dep.name}.');
+ 'Your dependency on "${dep.name}" should have an upper bound. For '
+ 'example:\n'
+ '\n'
+ 'dependencies:\n'
+ ' ${dep.name}: $constraint\n'
+ '\n'
+ 'Without an upper bound, you\'re promising to support '
+ '${log.bold("all")} future versions of ${dep.name}.',
+ );
}
void warnAboutPrerelease(String dependencyName, VersionRange constraint) {
@@ -163,12 +178,14 @@
if (constraint.min != null &&
constraint.min!.isPreRelease &&
!packageVersion.isPreRelease) {
- warnings.add('Packages dependent on a pre-release of another package '
- 'should themselves be published as a pre-release version. '
- 'If this package needs $dependencyName version ${constraint.min}, '
- 'consider publishing the package as a pre-release instead.\n'
- 'See https://dart.dev/tools/pub/publishing#publishing-prereleases '
- 'For more information on pre-releases.');
+ warnings.add(
+ 'Packages dependent on a pre-release of another package '
+ 'should themselves be published as a pre-release version. '
+ 'If this package needs $dependencyName version ${constraint.min}, '
+ 'consider publishing the package as a pre-release instead.\n'
+ 'See https://dart.dev/tools/pub/publishing#publishing-prereleases '
+ 'For more information on pre-releases.',
+ );
}
}
diff --git a/lib/src/validator/dependency_override.dart b/lib/src/validator/dependency_override.dart
index feeea6e..8f88631 100644
--- a/lib/src/validator/dependency_override.dart
+++ b/lib/src/validator/dependency_override.dart
@@ -13,13 +13,15 @@
class DependencyOverrideValidator extends Validator {
@override
Future<void> validate() async {
- final overridden =
- MapKeySet(context.entrypoint.workspaceRoot.allOverridesInWorkspace);
+ final overridden = MapKeySet(
+ context.entrypoint.workspaceRoot.allOverridesInWorkspace,
+ );
final dev = MapKeySet(package.devDependencies);
if (overridden.difference(dev).isNotEmpty) {
- final overridesFile = package.pubspec.dependencyOverridesFromOverridesFile
- ? package.pubspecOverridesPath
- : package.pubspecPath;
+ final overridesFile =
+ package.pubspec.dependencyOverridesFromOverridesFile
+ ? package.pubspecOverridesPath
+ : package.pubspecPath;
hints.add('''
Non-dev dependencies are overridden in $overridesFile.
diff --git a/lib/src/validator/deprecated_fields.dart b/lib/src/validator/deprecated_fields.dart
index 788996c..733fa15 100644
--- a/lib/src/validator/deprecated_fields.dart
+++ b/lib/src/validator/deprecated_fields.dart
@@ -12,20 +12,28 @@
@override
Future validate() async {
if (package.pubspec.fields.containsKey('transformers')) {
- warnings.add('Your pubspec.yaml includes a "transformers" section which'
- ' is no longer used and may be removed.');
+ warnings.add(
+ 'Your pubspec.yaml includes a "transformers" section which'
+ ' is no longer used and may be removed.',
+ );
}
if (package.pubspec.fields.containsKey('web')) {
- warnings.add('Your pubspec.yaml includes a "web" section which'
- ' is no longer used and may be removed.');
+ warnings.add(
+ 'Your pubspec.yaml includes a "web" section which'
+ ' is no longer used and may be removed.',
+ );
}
if (package.pubspec.fields.containsKey('author')) {
- warnings.add('Your pubspec.yaml includes an "author" section which'
- ' is no longer used and may be removed.');
+ warnings.add(
+ 'Your pubspec.yaml includes an "author" section which'
+ ' is no longer used and may be removed.',
+ );
}
if (package.pubspec.fields.containsKey('authors')) {
- warnings.add('Your pubspec.yaml includes an "authors" section which'
- ' is no longer used and may be removed.');
+ warnings.add(
+ 'Your pubspec.yaml includes an "authors" section which'
+ ' is no longer used and may be removed.',
+ );
}
}
}
diff --git a/lib/src/validator/devtools_extension.dart b/lib/src/validator/devtools_extension.dart
index 9776c9a..9d2887c 100644
--- a/lib/src/validator/devtools_extension.dart
+++ b/lib/src/validator/devtools_extension.dart
@@ -19,12 +19,7 @@
if (!files.any(
(f) => p.equals(
f,
- p.join(
- package.dir,
- 'extension',
- 'devtools',
- 'config.yaml',
- ),
+ p.join(package.dir, 'extension', 'devtools', 'config.yaml'),
),
) ||
!files.any(
diff --git a/lib/src/validator/directory.dart b/lib/src/validator/directory.dart
index 71f5598..db254b4 100644
--- a/lib/src/validator/directory.dart
+++ b/lib/src/validator/directory.dart
@@ -37,18 +37,22 @@
if (_pluralNames.contains(dirName)) {
// Cut off the "s"
final singularName = dirName.substring(0, dirName.length - 1);
- warnings.add('Rename the top-level "$dirName" directory to '
- '"$singularName".\n'
- 'The Pub layout convention is to use singular directory '
- 'names.\n'
- 'Plural names won\'t be correctly identified by Pub and other '
- 'tools.\n$docRef');
+ warnings.add(
+ 'Rename the top-level "$dirName" directory to '
+ '"$singularName".\n'
+ 'The Pub layout convention is to use singular directory '
+ 'names.\n'
+ 'Plural names won\'t be correctly identified by Pub and other '
+ 'tools.\n$docRef',
+ );
}
if (dirName.contains(RegExp(r'^samples?$'))) {
- warnings.add('Rename the top-level "$dirName" directory to "example".\n'
- 'This allows Pub to find your examples and create "packages" '
- 'directories for them.\n$docRef');
+ warnings.add(
+ 'Rename the top-level "$dirName" directory to "example".\n'
+ 'This allows Pub to find your examples and create "packages" '
+ 'directories for them.\n$docRef',
+ );
}
}
}
diff --git a/lib/src/validator/executable.dart b/lib/src/validator/executable.dart
index 2f29cc2..c5d5282 100644
--- a/lib/src/validator/executable.dart
+++ b/lib/src/validator/executable.dart
@@ -13,15 +13,19 @@
class ExecutableValidator extends Validator {
@override
Future validate() async {
- final binFiles =
- filesBeneath('bin', recursive: false).map(package.relative);
+ final binFiles = filesBeneath(
+ 'bin',
+ recursive: false,
+ ).map(package.relative);
package.pubspec.executables.forEach((executable, script) {
final scriptPath = p.join('bin', '$script.dart');
if (binFiles.contains(scriptPath)) return;
- warnings.add('Your pubspec.yaml lists an executable "$executable" that '
- 'points to a script "$scriptPath" that does not exist.');
+ warnings.add(
+ 'Your pubspec.yaml lists an executable "$executable" that '
+ 'points to a script "$scriptPath" that does not exist.',
+ );
});
}
}
diff --git a/lib/src/validator/flutter_constraint.dart b/lib/src/validator/flutter_constraint.dart
index 7b0527b..8b25424 100644
--- a/lib/src/validator/flutter_constraint.dart
+++ b/lib/src/validator/flutter_constraint.dart
@@ -21,10 +21,11 @@
if (flutterConstraint is String) {
final constraint = VersionConstraint.parse(flutterConstraint);
if (constraint is VersionRange && constraint.max != null) {
- final replacement = constraint.min == null
- ? 'You can replace the constraint with `any`.'
- : 'You can replace that with '
- 'just the lower bound: `>=${constraint.min}`.';
+ final replacement =
+ constraint.min == null
+ ? 'You can replace the constraint with `any`.'
+ : 'You can replace that with '
+ 'just the lower bound: `>=${constraint.min}`.';
warnings.add('''
The Flutter constraint should not have an upper bound.
diff --git a/lib/src/validator/flutter_plugin_format.dart b/lib/src/validator/flutter_plugin_format.dart
index 70f4459..3ac173f 100644
--- a/lib/src/validator/flutter_plugin_format.dart
+++ b/lib/src/validator/flutter_plugin_format.dart
@@ -46,18 +46,22 @@
includeMin: true,
),
))) {
- errors.add('pubspec.yaml allows Flutter SDK version 1.9.x, which does '
- 'not support the flutter.plugin.platforms key.\n'
- 'Please consider increasing the Flutter SDK requirement to '
- '^1.10.0 (environment.sdk.flutter)\n\nSee $_pluginDocsUrl');
+ errors.add(
+ 'pubspec.yaml allows Flutter SDK version 1.9.x, which does '
+ 'not support the flutter.plugin.platforms key.\n'
+ 'Please consider increasing the Flutter SDK requirement to '
+ '^1.10.0 (environment.sdk.flutter)\n\nSee $_pluginDocsUrl',
+ );
return;
}
if (usesOldPluginFormat) {
- errors.add('In pubspec.yaml the '
- 'flutter.plugin.{androidPackage,iosPrefix,pluginClass} keys are '
- 'deprecated. Instead use the flutter.plugin.platforms key '
- 'introduced in Flutter 1.10.0\n\nSee $_pluginDocsUrl');
+ errors.add(
+ 'In pubspec.yaml the '
+ 'flutter.plugin.{androidPackage,iosPrefix,pluginClass} keys are '
+ 'deprecated. Instead use the flutter.plugin.platforms key '
+ 'introduced in Flutter 1.10.0\n\nSee $_pluginDocsUrl',
+ );
}
} else {
// Ignore all packages that do not have the `flutter.plugin` property.
diff --git a/lib/src/validator/git_status.dart b/lib/src/validator/git_status.dart
index c060925..a4d2585 100644
--- a/lib/src/validator/git_status.dart
+++ b/lib/src/validator/git_status.dart
@@ -35,16 +35,13 @@
return;
}
reporoot = maybeReporoot;
- output = git.runSyncBytes(
- [
- 'status',
- '-z', // Machine parsable
- '--no-renames', // We don't care about renames.
+ output = git.runSyncBytes([
+ 'status',
+ '-z', // Machine parsable
+ '--no-renames', // We don't care about renames.
- '--untracked-files=no', // Don't show untracked files.
- ],
- workingDir: package.dir,
- );
+ '--untracked-files=no', // Don't show untracked files.
+ ], workingDir: package.dir);
} on git.GitException catch (e) {
log.fine('Could not run `git status` files in repo (${e.message}).');
// This validation is only a warning.
@@ -53,25 +50,26 @@
}
final List<String> modifiedFiles;
try {
- modifiedFiles = git
- .splitZeroTerminated(output, skipPrefix: 3)
- .map((bytes) {
- try {
- final filename = utf8.decode(bytes);
- final fullPath = p.join(reporoot, filename);
- if (!files.any((f) => p.equals(fullPath, f))) {
- // File is not in the published set - ignore.
- return null;
- }
- return p.relative(fullPath);
- } on FormatException catch (e) {
- // Filename is not utf8 - ignore.
- log.fine('Cannot decode file name: $e');
- return null;
- }
- })
- .nonNulls
- .toList();
+ modifiedFiles =
+ git
+ .splitZeroTerminated(output, skipPrefix: 3)
+ .map((bytes) {
+ try {
+ final filename = utf8.decode(bytes);
+ final fullPath = p.join(reporoot, filename);
+ if (!files.any((f) => p.equals(fullPath, f))) {
+ // File is not in the published set - ignore.
+ return null;
+ }
+ return p.relative(fullPath);
+ } on FormatException catch (e) {
+ // Filename is not utf8 - ignore.
+ log.fine('Cannot decode file name: $e');
+ return null;
+ }
+ })
+ .nonNulls
+ .toList();
} on FormatException catch (e) {
// Malformed output from `git status`. Skip this validation.
log.fine('Malformed output from `git status -z`: $e');
diff --git a/lib/src/validator/gitignore.dart b/lib/src/validator/gitignore.dart
index 884528b..4afea11 100644
--- a/lib/src/validator/gitignore.dart
+++ b/lib/src/validator/gitignore.dart
@@ -25,18 +25,15 @@
if (package.inGitRepo) {
final Uint8List output;
try {
- output = git.runSyncBytes(
- [
- '-c',
- 'core.quotePath=false',
- 'ls-files',
- '-z',
- '--cached',
- '--exclude-standard',
- '--recurse-submodules',
- ],
- workingDir: package.dir,
- );
+ output = git.runSyncBytes([
+ '-c',
+ 'core.quotePath=false',
+ 'ls-files',
+ '-z',
+ '--cached',
+ '--exclude-standard',
+ '--recurse-submodules',
+ ], workingDir: package.dir);
} on git.GitException catch (e) {
log.fine('Could not run `git ls-files` files in repo (${e.message}).');
// This validation is only a warning.
@@ -47,9 +44,10 @@
final List<String> checkedIntoGit;
try {
- checkedIntoGit = git.splitZeroTerminated(output).map((b) {
- return utf8.decode(b);
- }).toList();
+ checkedIntoGit =
+ git.splitZeroTerminated(output).map((b) {
+ return utf8.decode(b);
+ }).toList();
} on FormatException catch (e) {
log.fine('Failed decoding git output. Skipping validation. $e.');
return;
@@ -68,35 +66,40 @@
return p.join(root, path);
}
- final unignoredByGitignore = Ignore.listFiles(
- beneath: beneath,
- listDir: (dir) {
- final contents = Directory(resolve(dir)).listSync(followLinks: false);
- return contents.map(
- (entity) =>
- p.posix.joinAll(p.split(p.relative(entity.path, from: root))),
- );
- },
- ignoreForDir: (dir) {
- final gitIgnore = resolve('$dir/.gitignore');
- final rules = [
- if (fileExists(gitIgnore)) readTextFile(gitIgnore),
- ];
- return rules.isEmpty ? null : Ignore(rules);
- },
- isDir: (dir) {
- final resolved = resolve(dir);
- return dirExists(resolved) && !linkExists(resolved);
- },
- ).map((file) {
- final relative = p.relative(resolve(file), from: package.dir);
- return Platform.isWindows
- ? p.posix.joinAll(p.split(relative))
- : relative;
- }).toSet();
- final ignoredFilesCheckedIn = checkedIntoGit
- .where((file) => !unignoredByGitignore.contains(file))
- .toList();
+ final unignoredByGitignore =
+ Ignore.listFiles(
+ beneath: beneath,
+ listDir: (dir) {
+ final contents = Directory(
+ resolve(dir),
+ ).listSync(followLinks: false);
+ return contents.map(
+ (entity) => p.posix.joinAll(
+ p.split(p.relative(entity.path, from: root)),
+ ),
+ );
+ },
+ ignoreForDir: (dir) {
+ final gitIgnore = resolve('$dir/.gitignore');
+ final rules = [
+ if (fileExists(gitIgnore)) readTextFile(gitIgnore),
+ ];
+ return rules.isEmpty ? null : Ignore(rules);
+ },
+ isDir: (dir) {
+ final resolved = resolve(dir);
+ return dirExists(resolved) && !linkExists(resolved);
+ },
+ ).map((file) {
+ final relative = p.relative(resolve(file), from: package.dir);
+ return Platform.isWindows
+ ? p.posix.joinAll(p.split(relative))
+ : relative;
+ }).toSet();
+ final ignoredFilesCheckedIn =
+ checkedIntoGit
+ .where((file) => !unignoredByGitignore.contains(file))
+ .toList();
if (ignoredFilesCheckedIn.isNotEmpty) {
warnings.add('''
diff --git a/lib/src/validator/leak_detection.dart b/lib/src/validator/leak_detection.dart
index 0f25cf6..3da1359 100644
--- a/lib/src/validator/leak_detection.dart
+++ b/lib/src/validator/leak_detection.dart
@@ -73,8 +73,10 @@
if (leaks.length > 3) {
errors.addAll(leaks.take(2).map((leak) => leak.describe()));
- final files =
- leaks.map((leak) => leak.url).toSet().toList(growable: false)..sort();
+ final files = leaks
+ .map((leak) => leak.url)
+ .toSet()
+ .toList(growable: false)..sort();
final s = files.length > 1 ? 's' : '';
errors.add(
@@ -128,9 +130,10 @@
'at offset $start:$end.\n\n'
'```\n${content.substring(start, end)}\n```\n';
}
- return SourceFile.fromString(content, url: url)
- .span(start, end)
- .message('Potential leak of ${pattern.kind} detected.');
+ return SourceFile.fromString(
+ content,
+ url: url,
+ ).span(start, end).message('Potential leak of ${pattern.kind} detected.');
}
}
@@ -176,11 +179,11 @@
Map<int, double> entropyThresholds = const <int, double>{},
Iterable<String> testsWithLeaks = const <String>[],
Iterable<String> testsWithNoLeaks = const <String>[],
- }) : _pattern = RegExp(pattern),
- _allowed = List.unmodifiable(allowed),
- _entropyThresholds = Map.unmodifiable(entropyThresholds),
- testsWithLeaks = List.unmodifiable(testsWithLeaks),
- testsWithNoLeaks = List.unmodifiable(testsWithNoLeaks);
+ }) : _pattern = RegExp(pattern),
+ _allowed = List.unmodifiable(allowed),
+ _entropyThresholds = Map.unmodifiable(entropyThresholds),
+ testsWithLeaks = List.unmodifiable(testsWithLeaks),
+ testsWithNoLeaks = List.unmodifiable(testsWithNoLeaks);
/// Find possible leaks using this [LeakPattern].
///
@@ -194,8 +197,9 @@
if (_allowed.any((s) => m.group(0)!.contains(s))) {
continue;
}
- if (_entropyThresholds.entries
- .any((entry) => _entropy(m.group(entry.key)!) < entry.value)) {
+ if (_entropyThresholds.entries.any(
+ (entry) => _entropy(m.group(entry.key)!) < entry.value,
+ )) {
continue;
}
@@ -246,7 +250,8 @@
//
// Maximum length of an access key is specified as 128 here:
// https://docs.aws.amazon.com/IAM/latest/APIReference/API_AccessKey.html#API_AccessKey_Contents
- pattern: r'[^A-Z0-9]'
+ pattern:
+ r'[^A-Z0-9]'
r'('
r'(?:A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)'
r'[A-Z0-9]{12,128}'
@@ -398,7 +403,7 @@
final id = "191919191919-onesonesonesonesonesonesonesones.apps.googleusercontent.com";
// This will count as being leaked
final superSecret = '204799038523-t6juuc8cvsvn7bdq0chhihkejuru0bkj.apps.googleusercontent.com';
- '''
+ ''',
],
testsWithNoLeaks: [
// Not enough entropy:
@@ -564,18 +569,19 @@
),
LeakPattern._(
kind: 'PGP Private Key',
- pattern: [
- _pemBegin('PGP PRIVATE KEY BLOCK'),
- // Allow "Armor Headers" from:
- // https://www.rfc-editor.org/rfc/rfc4880.html#section-6.2
- '(?:\\w+: [^\\n]{1,1024}$_pemRequireLineBreak$_pemWSP)*',
- _pemBase64Block(),
- // Require a line break, and a 24-bit base64 encoded checksum prefixed '='
- // https://www.rfc-editor.org/rfc/rfc4880.html#section-6
- '$_pemRequireLineBreak$_pemWSP',
- '=(?:(?:[a-zA-Z0-9+/]$_pemWSP){4})',
- _pemEnd('PGP PRIVATE KEY BLOCK'),
- ].join(),
+ pattern:
+ [
+ _pemBegin('PGP PRIVATE KEY BLOCK'),
+ // Allow "Armor Headers" from:
+ // https://www.rfc-editor.org/rfc/rfc4880.html#section-6.2
+ '(?:\\w+: [^\\n]{1,1024}$_pemRequireLineBreak$_pemWSP)*',
+ _pemBase64Block(),
+ // Require a line break, and a 24-bit base64 encoded checksum prefixed '='
+ // https://www.rfc-editor.org/rfc/rfc4880.html#section-6
+ '$_pemRequireLineBreak$_pemWSP',
+ '=(?:(?:[a-zA-Z0-9+/]$_pemWSP){4})',
+ _pemEnd('PGP PRIVATE KEY BLOCK'),
+ ].join(),
testsWithLeaks: [
'''
-----BEGIN PGP PRIVATE KEY BLOCK-----
@@ -620,7 +626,8 @@
// is in a JSON string. We just require something to indicate line break.
String _pemRequireLineBreak = r'\s*(?:\\r|\\n|\r|\n)\s*';
-String _pemBegin(String label) => [
+String _pemBegin(String label) =>
+ [
// Require a boundary
'-----BEGIN $label-----',
// Require \n, \r, \\r, or \\n, backslash escaping is allowed if the key
@@ -630,7 +637,8 @@
_pemWSP,
].join();
-String _pemBase64Block() => [
+String _pemBase64Block() =>
+ [
// Require base64 character in blocks of 4, allow arbirary whitespace
// and escaped line breaks in between.
'(?:(?:[a-zA-Z0-9+/]$_pemWSP){4})+',
@@ -655,7 +663,8 @@
'))?',
].join();
-String _pemEnd(String label) => [
+String _pemEnd(String label) =>
+ [
// Require \n, \r, \\r, or \\n, backslash escaping is allowed if the key
// is in a JSON string. We just require something to indicate line break.
_pemRequireLineBreak,
@@ -664,8 +673,5 @@
'-----END $label-----',
].join();
-String _pemKeyFormat(String label) => [
- _pemBegin(label),
- _pemBase64Block(),
- _pemEnd(label),
- ].join();
+String _pemKeyFormat(String label) =>
+ [_pemBegin(label), _pemBase64Block(), _pemEnd(label)].join();
diff --git a/lib/src/validator/license.dart b/lib/src/validator/license.dart
index 993a48f..41bfe64 100644
--- a/lib/src/validator/license.dart
+++ b/lib/src/validator/license.dart
@@ -8,29 +8,37 @@
import '../validator.dart';
-final licenseLike =
- RegExp(r'^(([a-zA-Z0-9]+[-_])?(LICENSE|COPYING)|UNLICENSE)(\..*)?$');
+final licenseLike = RegExp(
+ r'^(([a-zA-Z0-9]+[-_])?(LICENSE|COPYING)|UNLICENSE)(\..*)?$',
+);
/// A validator that checks that a LICENSE-like file exists.
class LicenseValidator extends Validator {
@override
Future validate() {
return Future.sync(() {
- final candidates = filesBeneath('.', recursive: false)
- .where((file) => licenseLike.hasMatch(p.basename(file)));
+ final candidates = filesBeneath(
+ '.',
+ recursive: false,
+ ).where((file) => licenseLike.hasMatch(p.basename(file)));
if (candidates.isNotEmpty) {
- if (!candidates
- .any((candidate) => p.basename(candidate) == 'LICENSE')) {
+ if (!candidates.any(
+ (candidate) => p.basename(candidate) == 'LICENSE',
+ )) {
final firstCandidate = candidates.first;
- warnings.add('Please consider renaming $firstCandidate to `LICENSE`. '
- 'See https://dart.dev/tools/pub/publishing#important-files.');
+ warnings.add(
+ 'Please consider renaming $firstCandidate to `LICENSE`. '
+ 'See https://dart.dev/tools/pub/publishing#important-files.',
+ );
}
return;
}
- errors.add('You must have a LICENSE file in the root directory.\n'
- 'An open-source license helps ensure people can legally use your '
- 'code.');
+ errors.add(
+ 'You must have a LICENSE file in the root directory.\n'
+ 'An open-source license helps ensure people can legally use your '
+ 'code.',
+ );
});
}
}
diff --git a/lib/src/validator/name.dart b/lib/src/validator/name.dart
index 20c2a80..1988f6a 100644
--- a/lib/src/validator/name.dart
+++ b/lib/src/validator/name.dart
@@ -22,9 +22,11 @@
if (libraries.length == 1) {
final libName = p.basenameWithoutExtension(libraries[0]);
if (libName == package.name) return;
- warnings.add('The name of "${libraries[0]}", "$libName", should match '
- 'the name of the package, "${package.name}".\n'
- 'This helps users know what library to import.');
+ warnings.add(
+ 'The name of "${libraries[0]}", "$libName", should match '
+ 'the name of the package, "${package.name}".\n'
+ 'This helps users know what library to import.',
+ );
}
});
}
@@ -47,18 +49,26 @@
if (name == '') {
errors.add('$description may not be empty.');
} else if (!RegExp(r'^[a-zA-Z0-9_]*$').hasMatch(name)) {
- errors.add('$description may only contain letters, numbers, and '
- 'underscores.\n'
- 'Using a valid Dart identifier makes the name usable in Dart code.');
+ errors.add(
+ '$description may only contain letters, numbers, and '
+ 'underscores.\n'
+ 'Using a valid Dart identifier makes the name usable in Dart code.',
+ );
} else if (!RegExp(r'^[a-zA-Z_]').hasMatch(name)) {
- errors.add('$description must begin with a letter or underscore.\n'
- 'Using a valid Dart identifier makes the name usable in Dart code.');
+ errors.add(
+ '$description must begin with a letter or underscore.\n'
+ 'Using a valid Dart identifier makes the name usable in Dart code.',
+ );
} else if (reservedWords.contains(name.toLowerCase())) {
- errors.add('$description may not be a reserved word in Dart.\n'
- 'Using a valid Dart identifier makes the name usable in Dart code.');
+ errors.add(
+ '$description may not be a reserved word in Dart.\n'
+ 'Using a valid Dart identifier makes the name usable in Dart code.',
+ );
} else if (RegExp(r'[A-Z]').hasMatch(name)) {
- warnings.add('$description should be lower-case. Maybe use '
- '"${_unCamelCase(name)}"?');
+ warnings.add(
+ '$description should be lower-case. Maybe use '
+ '"${_unCamelCase(name)}"?',
+ );
}
}
diff --git a/lib/src/validator/pubspec.dart b/lib/src/validator/pubspec.dart
index ef9cff8..f04ecaf 100644
--- a/lib/src/validator/pubspec.dart
+++ b/lib/src/validator/pubspec.dart
@@ -15,8 +15,10 @@
class PubspecValidator extends Validator {
@override
Future validate() async {
- if (!filesBeneath('.', recursive: false)
- .any((file) => p.basename(file) == 'pubspec.yaml')) {
+ if (!filesBeneath(
+ '.',
+ recursive: false,
+ ).any((file) => p.basename(file) == 'pubspec.yaml')) {
errors.add('The pubspec is hidden, probably by .gitignore or pubignore.');
}
}
diff --git a/lib/src/validator/pubspec_field.dart b/lib/src/validator/pubspec_field.dart
index 1bab65d..138c4e3 100644
--- a/lib/src/validator/pubspec_field.dart
+++ b/lib/src/validator/pubspec_field.dart
@@ -44,8 +44,10 @@
if (value == null) {
errors.add('Your pubspec.yaml is missing a "$field" field.');
} else if (value is! String) {
- errors.add('Your pubspec.yaml\'s "$field" field must be a string, but '
- 'it was "$value".');
+ errors.add(
+ 'Your pubspec.yaml\'s "$field" field must be a string, but '
+ 'it was "$value".',
+ );
}
}
@@ -55,15 +57,19 @@
if (url == null) return;
if (url is! String) {
- errors.add('Your pubspec.yaml\'s "$field" field must be a string, but '
- 'it was "$url".');
+ errors.add(
+ 'Your pubspec.yaml\'s "$field" field must be a string, but '
+ 'it was "$url".',
+ );
return;
}
final goodScheme = RegExp(r'^https?:');
if (!goodScheme.hasMatch(url)) {
- errors.add('Your pubspec.yaml\'s "$field" field must be an "http:" or '
- '"https:" URL, but it was "$url".');
+ errors.add(
+ 'Your pubspec.yaml\'s "$field" field must be an "http:" or '
+ '"https:" URL, but it was "$url".',
+ );
}
}
}
diff --git a/lib/src/validator/pubspec_typo.dart b/lib/src/validator/pubspec_typo.dart
index 4ca08fa..e872071 100644
--- a/lib/src/validator/pubspec_typo.dart
+++ b/lib/src/validator/pubspec_typo.dart
@@ -38,8 +38,10 @@
/// 0.21 is a magic value determined by looking at the most common typos
/// in all the pubspecs on pub.dev.
if (bestLevenshteinRatio > 0 && bestLevenshteinRatio < 0.21) {
- warnings.add('"$key" is not a key recognized by pub - '
- 'did you mean "$closestKey"?');
+ warnings.add(
+ '"$key" is not a key recognized by pub - '
+ 'did you mean "$closestKey"?',
+ );
warningCount++;
if (warningCount == 3) break;
diff --git a/lib/src/validator/readme.dart b/lib/src/validator/readme.dart
index fbcc7c5..815c96b 100644
--- a/lib/src/validator/readme.dart
+++ b/lib/src/validator/readme.dart
@@ -21,8 +21,10 @@
// If multiple READMEs are found, this uses the same conventions as
// pub.dev for choosing the primary one: the README with the fewest
// extensions that is lexically ordered first is chosen.
- final readmes = filesBeneath('.', recursive: false)
- .where((file) => p.basename(file).contains(_readmeRegexp));
+ final readmes = filesBeneath(
+ '.',
+ recursive: false,
+ ).where((file) => p.basename(file).contains(_readmeRegexp));
if (readmes.isEmpty) {
warnings.add('Please add a README.md file that describes your package.');
@@ -38,8 +40,10 @@
});
if (p.basename(readme) != 'README.md') {
- warnings.add('Please consider renaming $readme to `README.md`. '
- 'See https://dart.dev/tools/pub/publishing#important-files.');
+ warnings.add(
+ 'Please consider renaming $readme to `README.md`. '
+ 'See https://dart.dev/tools/pub/publishing#important-files.',
+ );
}
final bytes = readBinaryFile(readme);
@@ -47,9 +51,11 @@
// utf8.decode doesn't allow invalid UTF-8.
utf8.decode(bytes);
} on FormatException catch (_) {
- warnings.add('$readme contains invalid UTF-8.\n'
- 'This will cause it to be displayed incorrectly on '
- 'the Pub site (https://pub.dev).');
+ warnings.add(
+ '$readme contains invalid UTF-8.\n'
+ 'This will cause it to be displayed incorrectly on '
+ 'the Pub site (https://pub.dev).',
+ );
}
}
}
diff --git a/lib/src/validator/relative_version_numbering.dart b/lib/src/validator/relative_version_numbering.dart
index dac2422..f9c18be 100644
--- a/lib/src/validator/relative_version_numbering.dart
+++ b/lib/src/validator/relative_version_numbering.dart
@@ -29,10 +29,7 @@
List<PackageId> existingVersions;
try {
existingVersions = await cache.getVersions(
- hostedSource.refFor(
- package.name,
- url: serverUrl.toString(),
- ),
+ hostedSource.refFor(package.name, url: serverUrl.toString()),
);
} on PackageNotFoundException {
existingVersions = [];
@@ -49,8 +46,9 @@
Your version $currentVersion is earlier than that.''');
}
- final previousRelease =
- existingVersions.lastWhereOrNull((id) => id.version < package.version);
+ final previousRelease = existingVersions.lastWhereOrNull(
+ (id) => id.version < package.version,
+ );
if (previousRelease == null) return;
@@ -110,9 +108,9 @@
extension on Version {
Version withoutBuild() => Version(
- major,
- minor,
- patch,
- pre: preRelease.isEmpty ? null : preRelease.join('.'),
- );
+ major,
+ minor,
+ patch,
+ pre: preRelease.isEmpty ? null : preRelease.join('.'),
+ );
}
diff --git a/lib/src/validator/sdk_constraint.dart b/lib/src/validator/sdk_constraint.dart
index b5a0adb..d9068cf 100644
--- a/lib/src/validator/sdk_constraint.dart
+++ b/lib/src/validator/sdk_constraint.dart
@@ -23,11 +23,12 @@
if (originalConstraint is VersionRange) {
if (originalConstraint.max == null) {
errors.add(
- 'Published packages should have an upper bound constraint on the '
- 'Dart SDK (typically this should restrict to less than the next '
- 'major version to guard against breaking changes).\n'
- 'See https://dart.dev/tools/pub/pubspec#sdk-constraints for '
- 'instructions on setting an sdk version constraint.');
+ 'Published packages should have an upper bound constraint on the '
+ 'Dart SDK (typically this should restrict to less than the next '
+ 'major version to guard against breaking changes).\n'
+ 'See https://dart.dev/tools/pub/pubspec#sdk-constraints for '
+ 'instructions on setting an sdk version constraint.',
+ );
}
final constraintMin = originalConstraint.min;
@@ -37,19 +38,20 @@
constraintMin.isPreRelease &&
!packageVersion.isPreRelease) {
warnings.add(
- 'Packages with an SDK constraint on a pre-release of the Dart SDK '
- 'should themselves be published as a pre-release version. '
- 'If this package needs Dart version $constraintMin, consider '
- 'publishing the package as a pre-release instead.\n'
- 'See https://dart.dev/tools/pub/publishing#publishing-prereleases '
- 'For more information on pre-releases.');
+ 'Packages with an SDK constraint on a pre-release of the Dart SDK '
+ 'should themselves be published as a pre-release version. '
+ 'If this package needs Dart version $constraintMin, consider '
+ 'publishing the package as a pre-release instead.\n'
+ 'See https://dart.dev/tools/pub/publishing#publishing-prereleases '
+ 'For more information on pre-releases.',
+ );
}
if (
- // We only want to give this hint if there was no other problems with
- // the sdk constraint.
- warnings.isEmpty &&
- errors.isEmpty &&
- originalConstraint != effectiveConstraint) {
+ // We only want to give this hint if there was no other problems with
+ // the sdk constraint.
+ warnings.isEmpty &&
+ errors.isEmpty &&
+ originalConstraint != effectiveConstraint) {
hints.add('''
The declared SDK constraint is '$originalConstraint', this is interpreted as '$effectiveConstraint'.
diff --git a/lib/src/validator/size.dart b/lib/src/validator/size.dart
index b25a842..54dd143 100644
--- a/lib/src/validator/size.dart
+++ b/lib/src/validator/size.dart
@@ -25,11 +25,15 @@
Consider the impact large downloads can have on the package consumer.''');
if (ignoreExists && !package.inGitRepo) {
- hint.write('\nYour .gitignore has no effect since your project '
- 'does not appear to be in version control.');
+ hint.write(
+ '\nYour .gitignore has no effect since your project '
+ 'does not appear to be in version control.',
+ );
} else if (!ignoreExists && package.inGitRepo) {
- hint.write('\nConsider adding a .gitignore to avoid including '
- 'temporary files.');
+ hint.write(
+ '\nConsider adding a .gitignore to avoid including '
+ 'temporary files.',
+ );
}
hints.add(hint.toString());
diff --git a/lib/src/validator/strict_dependencies.dart b/lib/src/validator/strict_dependencies.dart
index 362fcb9..966e03e 100644
--- a/lib/src/validator/strict_dependencies.dart
+++ b/lib/src/validator/strict_dependencies.dart
@@ -104,12 +104,12 @@
Iterable<_Usage> _usagesBeneath(List<String> paths) {
return _findPackages(
- paths.expand(
- (path) {
- return filesBeneath(path, recursive: true)
- .where((file) => p.extension(file) == '.dart');
- },
- ),
+ paths.expand((path) {
+ return filesBeneath(
+ path,
+ recursive: true,
+ ).where((file) => p.extension(file) == '.dart');
+ }),
);
}
}
@@ -155,22 +155,25 @@
/// Returns an error message saying the package is not listed in
/// `dependencies`.
- String dependencyMissingMessage() =>
- _toMessage('This package does not have $package in the `dependencies` '
- 'section of `pubspec.yaml`.');
+ String dependencyMissingMessage() => _toMessage(
+ 'This package does not have $package in the `dependencies` '
+ 'section of `pubspec.yaml`.',
+ );
/// Returns an error message saying the package is not listed in
/// `dependencies` or `dev_dependencies`.
- String dependenciesMissingMessage() =>
- _toMessage('This package does not have $package in the `dependencies` '
- 'or `dev_dependencies` section of `pubspec.yaml`.');
+ String dependenciesMissingMessage() => _toMessage(
+ 'This package does not have $package in the `dependencies` '
+ 'or `dev_dependencies` section of `pubspec.yaml`.',
+ );
/// Returns an error message saying the package should be in `dependencies`.
String dependencyMisplaceMessage() {
final shortFile = p.split(p.relative(_file)).first;
return _toMessage(
- '$package is in the `dev_dependencies` section of `pubspec.yaml`. '
- 'Packages used in $shortFile/ must be declared in the `dependencies` '
- 'section.');
+ '$package is in the `dev_dependencies` section of `pubspec.yaml`. '
+ 'Packages used in $shortFile/ must be declared in the `dependencies` '
+ 'section.',
+ );
}
}
diff --git a/pubspec.lock b/pubspec.lock
index cc88a70..ac0dd37 100644
--- a/pubspec.lock
+++ b/pubspec.lock
@@ -34,10 +34,10 @@
dependency: "direct main"
description:
name: async
- sha256: d2872f9c19731c2e5f10444b14686eb7cc85c76274bd6c16e1816bff9a3bab63
+ sha256: "758e6d74e971c3e5aceb4110bfd6698efc7f501675bcfe0c775459a8140750eb"
url: "https://pub.dev"
source: hosted
- version: "2.12.0"
+ version: "2.13.0"
boolean_selector:
dependency: transitive
description:
@@ -122,10 +122,10 @@
dependency: transitive
description:
name: glob
- sha256: "0e7014b3b7d4dac1ca4d6114f82bf1782ee86745b9b42a92c9289c23d8a0ab63"
+ sha256: c3f1ee72c96f8f78935e18aa8cecced9ab132419e8625dc187e1c2408efc20de
url: "https://pub.dev"
source: hosted
- version: "2.1.2"
+ version: "2.1.3"
graphs:
dependency: "direct main"
description:
@@ -138,10 +138,10 @@
dependency: "direct main"
description:
name: http
- sha256: b9c29a161230ee03d3ccf545097fccd9b87a5264228c5d348202e0f0c28f9010
+ sha256: fe7ab022b76f3034adc518fb6ea04a82387620e19977665ea18d30a1cf43442f
url: "https://pub.dev"
source: hosted
- version: "1.2.2"
+ version: "1.3.0"
http_multi_server:
dependency: "direct main"
description:
@@ -170,10 +170,10 @@
dependency: transitive
description:
name: js
- sha256: c1b2e9b5ea78c45e1a0788d29606ba27dc5f71f019f32ca5140f61ef071838cf
+ sha256: "53385261521cc4a0c4658fd0ad07a7d14591cf8fc33abbceae306ddb974888dc"
url: "https://pub.dev"
source: hosted
- version: "0.7.1"
+ version: "0.7.2"
lints:
dependency: transitive
description:
@@ -298,10 +298,10 @@
dependency: transitive
description:
name: shelf_web_socket
- sha256: cc36c297b52866d203dbf9332263c94becc2fe0ceaa9681d07b6ef9807023b67
+ sha256: "3632775c8e90d6c9712f883e633716432a27758216dfb61bd86a8321c0580925"
url: "https://pub.dev"
source: hosted
- version: "2.0.1"
+ version: "3.0.0"
source_map_stack_trace:
dependency: transitive
description:
@@ -370,10 +370,10 @@
dependency: "direct dev"
description:
name: test
- sha256: "8391fbe68d520daf2314121764d38e37f934c02fd7301ad18307bd93bd6b725d"
+ sha256: "301b213cd241ca982e9ba50266bd3f5bd1ea33f1455554c5abb85d1be0e2d87e"
url: "https://pub.dev"
source: hosted
- version: "1.25.14"
+ version: "1.25.15"
test_api:
dependency: transitive
description:
@@ -450,10 +450,10 @@
dependency: transitive
description:
name: web_socket_channel
- sha256: "9f187088ed104edd8662ca07af4b124465893caf063ba29758f97af57e61da8f"
+ sha256: "0b8e2457400d8a859b7b2030786835a28a8e80836ef64402abef392ff4f1d0e5"
url: "https://pub.dev"
source: hosted
- version: "3.0.1"
+ version: "3.0.2"
webkit_inspection_protocol:
dependency: transitive
description:
@@ -479,4 +479,4 @@
source: hosted
version: "2.2.2"
sdks:
- dart: ">=3.7.0-0 <4.0.0"
+ dart: ">=3.7.0 <4.0.0"
diff --git a/pubspec.yaml b/pubspec.yaml
index 54f6649..91acead 100644
--- a/pubspec.yaml
+++ b/pubspec.yaml
@@ -1,7 +1,7 @@
name: pub
environment:
- sdk: ^3.5.0
+ sdk: ^3.7.0
dependencies:
analyzer: 7.1.0
diff --git a/test/add/common/add_test.dart b/test/add/common/add_test.dart
index 645877b..15e8be6 100644
--- a/test/add/common/add_test.dart
+++ b/test/add/common/add_test.dart
@@ -42,11 +42,13 @@
await d.appPackageConfigFile([
d.packageConfigEntry(name: 'fo_o1.a', path: '../foo'),
]).validate();
- await d.appDir(
- dependencies: {
- 'fo_o1.a': {'path': '../foo'},
- },
- ).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'fo_o1.a': {'path': '../foo'},
+ },
+ )
+ .validate();
});
group('normally', () {
@@ -75,21 +77,24 @@
await pubAdd(args: ['foo:1.2.3', 'bar:1.1.0', 'baz:2.5.3']);
- await d.cacheDir(
- {'foo': '1.2.3', 'bar': '1.1.0', 'baz': '2.5.3'},
- ).validate();
+ await d.cacheDir({
+ 'foo': '1.2.3',
+ 'bar': '1.1.0',
+ 'baz': '2.5.3',
+ }).validate();
await d.appPackageConfigFile([
d.packageConfigEntry(name: 'foo', version: '1.2.3'),
d.packageConfigEntry(name: 'bar', version: '1.1.0'),
d.packageConfigEntry(name: 'baz', version: '2.5.3'),
]).validate();
- await d.appDir(
- dependencies: {'foo': '1.2.3', 'bar': '1.1.0', 'baz': '2.5.3'},
- ).validate();
+ await d
+ .appDir(
+ dependencies: {'foo': '1.2.3', 'bar': '1.1.0', 'baz': '2.5.3'},
+ )
+ .validate();
});
- test(
- 'does not remove empty dev_dependencies '
+ test('does not remove empty dev_dependencies '
'while adding to normal dependencies', () async {
await servePackages()
..serve('foo', '1.2.3')
@@ -123,29 +128,31 @@
]).validate();
});
- test('dry run does not actually add the package or modify the pubspec',
- () async {
- final server = await servePackages();
- server.serve('foo', '1.2.3');
-
- await d.appDir(dependencies: {}).create();
-
- await pubAdd(
- args: ['foo:1.2.3', '--dry-run'],
- output: allOf(
- [contains('Would change 1 dependency'), contains('+ foo 1.2.3')],
- ),
- );
-
- await d.appDir(dependencies: {}).validate();
- await d.dir(appPath, [
- d.nothing('.dart_tool/package_config.json'),
- d.nothing('pubspec.lock'),
- ]).validate();
- });
-
test(
- 'adds a package from a pub server '
+ 'dry run does not actually add the package or modify the pubspec',
+ () async {
+ final server = await servePackages();
+ server.serve('foo', '1.2.3');
+
+ await d.appDir(dependencies: {}).create();
+
+ await pubAdd(
+ args: ['foo:1.2.3', '--dry-run'],
+ output: allOf([
+ contains('Would change 1 dependency'),
+ contains('+ foo 1.2.3'),
+ ]),
+ );
+
+ await d.appDir(dependencies: {}).validate();
+ await d.dir(appPath, [
+ d.nothing('.dart_tool/package_config.json'),
+ d.nothing('pubspec.lock'),
+ ]).validate();
+ },
+ );
+
+ test('adds a package from a pub server '
'even when dependencies key does not exist', () async {
final server = await servePackages();
server.serve('foo', '1.2.3');
@@ -175,36 +182,38 @@
await d.appDir(dependencies: {'foo': '1.2.3'}).validate();
});
- test('Inserts correctly when the pubspec is flow-style at top-level',
- () async {
- final server = await servePackages();
- server.serve('foo', '1.2.3');
+ test(
+ 'Inserts correctly when the pubspec is flow-style at top-level',
+ () async {
+ final server = await servePackages();
+ server.serve('foo', '1.2.3');
- await d.dir(appPath, [
- d.file(
- 'pubspec.yaml',
- '{"name":"myapp", "environment": {"sdk": "$defaultSdkConstraint"}}',
- ),
- ]).create();
+ await d.dir(appPath, [
+ d.file(
+ 'pubspec.yaml',
+ '{"name":"myapp", "environment": {"sdk": "$defaultSdkConstraint"}}',
+ ),
+ ]).create();
- await pubAdd(args: ['foo:1.2.3']);
+ await pubAdd(args: ['foo:1.2.3']);
- final yaml = loadYaml(
- File(p.join(d.sandbox, appPath, 'pubspec.yaml')).readAsStringSync(),
- );
+ final yaml = loadYaml(
+ File(p.join(d.sandbox, appPath, 'pubspec.yaml')).readAsStringSync(),
+ );
- expect(
- ((yaml as YamlMap).nodes['dependencies'] as YamlMap).style,
- CollectionStyle.FLOW,
- reason: 'Should not break a pubspec in flow-style',
- );
+ expect(
+ ((yaml as YamlMap).nodes['dependencies'] as YamlMap).style,
+ CollectionStyle.FLOW,
+ reason: 'Should not break a pubspec in flow-style',
+ );
- await d.cacheDir({'foo': '1.2.3'}).validate();
- await d.appPackageConfigFile([
- d.packageConfigEntry(name: 'foo', version: '1.2.3'),
- ]).validate();
- await d.appDir(dependencies: {'foo': '1.2.3'}).validate();
- });
+ await d.cacheDir({'foo': '1.2.3'}).validate();
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(name: 'foo', version: '1.2.3'),
+ ]).validate();
+ await d.appDir(dependencies: {'foo': '1.2.3'}).validate();
+ },
+ );
group('notifies user about existing constraint', () {
test('if package is added without a version constraint', () async {
@@ -282,8 +291,10 @@
await pubAdd(
args: ['foo:1.2.3'],
output: allOf(
- contains('"foo" was found in dev_dependencies. Removing "foo" and '
- 'adding it to dependencies instead.'),
+ contains(
+ '"foo" was found in dev_dependencies. Removing "foo" and '
+ 'adding it to dependencies instead.',
+ ),
contains(
'> foo 1.2.3 (was 1.2.2) '
'(from dev dependency to direct dependency)',
@@ -304,12 +315,13 @@
]).validate();
});
- test('changing from a dev to non-dev_dependency is considered a change',
- () async {
- (await servePackages()).serve('foo', '1.2.3');
+ test(
+ 'changing from a dev to non-dev_dependency is considered a change',
+ () async {
+ (await servePackages()).serve('foo', '1.2.3');
- await d.dir(appPath, [
- d.file('pubspec.yaml', '''
+ await d.dir(appPath, [
+ d.file('pubspec.yaml', '''
name: myapp
dependencies:
@@ -318,33 +330,34 @@
environment:
sdk: '$defaultSdkConstraint'
'''),
- ]).create();
- await pubGet();
+ ]).create();
+ await pubGet();
- await pubAdd(
- args: ['foo:1.2.3'],
- output: allOf(
- contains('"foo" was found in dev_dependencies. Removing "foo" and '
- 'adding it to dependencies instead.'),
- contains(
- ' foo 1.2.3 (from dev dependency to direct dependency)',
+ await pubAdd(
+ args: ['foo:1.2.3'],
+ output: allOf(
+ contains(
+ '"foo" was found in dev_dependencies. Removing "foo" and '
+ 'adding it to dependencies instead.',
+ ),
+ contains(' foo 1.2.3 (from dev dependency to direct dependency)'),
+ contains('Changed 1 dependency!'),
),
- contains('Changed 1 dependency!'),
- ),
- );
+ );
- await d.cacheDir({'foo': '1.2.3'}).validate();
- await d.appPackageConfigFile([
- d.packageConfigEntry(name: 'foo', version: '1.2.3'),
- ]).validate();
+ await d.cacheDir({'foo': '1.2.3'}).validate();
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(name: 'foo', version: '1.2.3'),
+ ]).validate();
- await d.dir(appPath, [
- d.pubspec({
- 'name': 'myapp',
- 'dependencies': {'foo': '1.2.3'},
- }),
- ]).validate();
- });
+ await d.dir(appPath, [
+ d.pubspec({
+ 'name': 'myapp',
+ 'dependencies': {'foo': '1.2.3'},
+ }),
+ ]).validate();
+ },
+ );
group('dependency override', () {
test('passes if package does not specify a range', () async {
@@ -379,10 +392,10 @@
final server = await servePackages();
server.serve('foo', '1.2.3');
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.2.3')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.2.3'),
+ ]).create();
await d.dir(appPath, [
d.pubspec({
@@ -410,10 +423,10 @@
test('passes if constraint matches path dependency override', () async {
final server = await servePackages();
server.serve('foo', '1.2.2');
- await d.dir(
- 'foo',
- [d.libDir('foo'), d.libPubspec('foo', '1.2.2')],
- ).create();
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.2.2'),
+ ]).create();
await d.dir(appPath, [
d.pubspec({
@@ -449,8 +462,10 @@
await pubAdd(
args: ['foo:one-two-three'],
exitCode: exit_codes.DATA,
- error: contains('Invalid version constraint: Could '
- 'not parse version "one-two-three".'),
+ error: contains(
+ 'Invalid version constraint: Could '
+ 'not parse version "one-two-three".',
+ ),
);
await d.dir(appPath, [
@@ -477,8 +492,9 @@
args: ['foo:1.2.3'],
exitCode: exit_codes.DATA,
error: contains(
- '"foo" resolved to "1.2.2" which does not satisfy constraint '
- '"1.2.3". This could be caused by "dependency_overrides".'),
+ '"foo" resolved to "1.2.2" which does not satisfy constraint '
+ '"1.2.3". This could be caused by "dependency_overrides".',
+ ),
);
await d.dir(appPath, [
@@ -496,10 +512,10 @@
final server = await servePackages();
server.serve('foo', '1.2.3');
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
await d.dir(appPath, [
d.pubspec({
@@ -515,8 +531,9 @@
args: ['foo:1.2.3'],
exitCode: exit_codes.DATA,
error: contains(
- '"foo" resolved to "1.0.0" which does not satisfy constraint '
- '"1.2.3". This could be caused by "dependency_overrides".'),
+ '"foo" resolved to "1.0.0" which does not satisfy constraint '
+ '"1.2.3". This could be caused by "dependency_overrides".',
+ ),
);
await d.dir(appPath, [
@@ -532,45 +549,48 @@
]).validate();
});
- test('fails if constraint does not match path dependency override',
- () async {
- final server = await servePackages();
- server.serve('foo', '1.2.2');
- await d.dir(
- 'foo',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ test(
+ 'fails if constraint does not match path dependency override',
+ () async {
+ final server = await servePackages();
+ server.serve('foo', '1.2.2');
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
- await d.dir(appPath, [
- d.pubspec({
- 'name': 'myapp',
- 'dependencies': {},
- 'dependency_overrides': {
- 'foo': {'path': '../foo'},
- },
- }),
- ]).create();
+ await d.dir(appPath, [
+ d.pubspec({
+ 'name': 'myapp',
+ 'dependencies': {},
+ 'dependency_overrides': {
+ 'foo': {'path': '../foo'},
+ },
+ }),
+ ]).create();
- await pubAdd(
- args: ['foo:1.2.2'],
- exitCode: exit_codes.DATA,
- error: contains(
+ await pubAdd(
+ args: ['foo:1.2.2'],
+ exitCode: exit_codes.DATA,
+ error: contains(
'"foo" resolved to "1.0.0" which does not satisfy constraint '
- '"1.2.2". This could be caused by "dependency_overrides".'),
- );
+ '"1.2.2". This could be caused by "dependency_overrides".',
+ ),
+ );
- await d.dir(appPath, [
- d.pubspec({
- 'name': 'myapp',
- 'dependencies': {},
- 'dependency_overrides': {
- 'foo': {'path': '../foo'},
- },
- }),
- d.nothing('.dart_tool/package_config.json'),
- d.nothing('pubspec.lock'),
- ]).validate();
- });
+ await d.dir(appPath, [
+ d.pubspec({
+ 'name': 'myapp',
+ 'dependencies': {},
+ 'dependency_overrides': {
+ 'foo': {'path': '../foo'},
+ },
+ }),
+ d.nothing('.dart_tool/package_config.json'),
+ d.nothing('pubspec.lock'),
+ ]).validate();
+ },
+ );
});
});
@@ -627,28 +647,30 @@
);
});
- test('dev: adds packages to dev_dependencies instead without a descriptor',
- () async {
- final server = await servePackages();
- server.serve('foo', '1.2.3');
+ test(
+ 'dev: adds packages to dev_dependencies instead without a descriptor',
+ () async {
+ final server = await servePackages();
+ server.serve('foo', '1.2.3');
- await d.dir(appPath, [
- d.pubspec({'name': 'myapp', 'dev_dependencies': {}}),
- ]).create();
+ await d.dir(appPath, [
+ d.pubspec({'name': 'myapp', 'dev_dependencies': {}}),
+ ]).create();
- await pubAdd(args: ['dev:foo:1.2.3']);
+ await pubAdd(args: ['dev:foo:1.2.3']);
- await d.appPackageConfigFile([
- d.packageConfigEntry(name: 'foo', version: '1.2.3'),
- ]).validate();
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(name: 'foo', version: '1.2.3'),
+ ]).validate();
- await d.dir(appPath, [
- d.pubspec({
- 'name': 'myapp',
- 'dev_dependencies': {'foo': '1.2.3'},
- }),
- ]).validate();
- });
+ await d.dir(appPath, [
+ d.pubspec({
+ 'name': 'myapp',
+ 'dev_dependencies': {'foo': '1.2.3'},
+ }),
+ ]).validate();
+ },
+ );
test('Cannot combine --dev with :dev', () async {
await d.dir('foo', [d.libPubspec('foo', '1.2.3')]).create();
@@ -807,10 +829,10 @@
test('passes if constraint is git dependency', () async {
final server = await servePackages();
server.serve('foo', '1.2.3');
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.2.3')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.2.3'),
+ ]).create();
await d.dir(appPath, [
d.pubspec({
@@ -838,10 +860,10 @@
test('passes if constraint matches path dependency override', () async {
final server = await servePackages();
server.serve('foo', '1.2.2');
- await d.dir(
- 'foo',
- [d.libDir('foo'), d.libPubspec('foo', '1.2.2')],
- ).create();
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.2.2'),
+ ]).create();
await d.dir(appPath, [
d.pubspec({
@@ -883,8 +905,9 @@
args: ['foo:1.2.3', '--dev'],
exitCode: exit_codes.DATA,
error: contains(
- '"foo" resolved to "1.2.2" which does not satisfy constraint '
- '"1.2.3". This could be caused by "dependency_overrides".'),
+ '"foo" resolved to "1.2.2" which does not satisfy constraint '
+ '"1.2.3". This could be caused by "dependency_overrides".',
+ ),
);
await d.dir(appPath, [
@@ -902,10 +925,10 @@
final server = await servePackages();
server.serve('foo', '1.2.3');
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
await d.dir(appPath, [
d.pubspec({
@@ -921,8 +944,9 @@
args: ['foo:1.2.3'],
exitCode: exit_codes.DATA,
error: contains(
- '"foo" resolved to "1.0.0" which does not satisfy constraint '
- '"1.2.3". This could be caused by "dependency_overrides".'),
+ '"foo" resolved to "1.0.0" which does not satisfy constraint '
+ '"1.2.3". This could be caused by "dependency_overrides".',
+ ),
);
await d.dir(appPath, [
@@ -938,50 +962,52 @@
]).validate();
});
- test('fails if constraint does not match path dependency override',
- () async {
- final server = await servePackages();
- server.serve('foo', '1.2.2');
+ test(
+ 'fails if constraint does not match path dependency override',
+ () async {
+ final server = await servePackages();
+ server.serve('foo', '1.2.2');
- await d.dir(
- 'foo',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
- await d.dir(appPath, [
- d.pubspec({
- 'name': 'myapp',
- 'dev_dependencies': {},
- 'dependency_overrides': {
- 'foo': {'path': '../foo'},
- },
- }),
- ]).create();
+ await d.dir(appPath, [
+ d.pubspec({
+ 'name': 'myapp',
+ 'dev_dependencies': {},
+ 'dependency_overrides': {
+ 'foo': {'path': '../foo'},
+ },
+ }),
+ ]).create();
- await pubAdd(
- args: ['foo:1.2.2', '--dev'],
- exitCode: exit_codes.DATA,
- error: contains(
+ await pubAdd(
+ args: ['foo:1.2.2', '--dev'],
+ exitCode: exit_codes.DATA,
+ error: contains(
'"foo" resolved to "1.0.0" which does not satisfy constraint '
- '"1.2.2". This could be caused by "dependency_overrides".'),
- );
+ '"1.2.2". This could be caused by "dependency_overrides".',
+ ),
+ );
- await d.dir(appPath, [
- d.pubspec({
- 'name': 'myapp',
- 'dev_dependencies': {},
- 'dependency_overrides': {
- 'foo': {'path': '../foo'},
- },
- }),
- d.nothing('.dart_tool/package_config.json'),
- d.nothing('pubspec.lock'),
- ]).validate();
- });
+ await d.dir(appPath, [
+ d.pubspec({
+ 'name': 'myapp',
+ 'dev_dependencies': {},
+ 'dependency_overrides': {
+ 'foo': {'path': '../foo'},
+ },
+ }),
+ d.nothing('.dart_tool/package_config.json'),
+ d.nothing('pubspec.lock'),
+ ]).validate();
+ },
+ );
});
- test(
- 'prints information saying that package is already a dependency if it '
+ test('prints information saying that package is already a dependency if it '
'already exists and exits with a usage exception', () async {
await servePackages()
..serve('foo', '1.2.3')
@@ -997,9 +1023,11 @@
await pubAdd(
args: ['foo:1.2.3', '--dev'],
- error: contains('"foo" is already in "dependencies". Use '
- '"pub remove foo" to remove it before adding it to '
- '"dev_dependencies"'),
+ error: contains(
+ '"foo" is already in "dependencies". Use '
+ '"pub remove foo" to remove it before adding it to '
+ '"dev_dependencies"',
+ ),
exitCode: exit_codes.DATA,
);
@@ -1131,11 +1159,7 @@
final server = await servePackages();
server.serve('foo', '1.0.0');
await d.dir('bar', [d.libPubspec('bar', '1.0.0')]).create();
- await d.appDir(
- dependencies: {
- 'bar': '^1.0.0',
- },
- ).create();
+ await d.appDir(dependencies: {'bar': '^1.0.0'}).create();
await d.dir(appPath, [
d.pubspecOverrides({
'dependency_overrides': {
diff --git a/test/add/common/invalid_options.dart b/test/add/common/invalid_options.dart
index b97044c..ae834fa 100644
--- a/test/add/common/invalid_options.dart
+++ b/test/add/common/invalid_options.dart
@@ -12,20 +12,24 @@
test('cannot use both --path and --git-<option> flags', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
- await d
- .dir('bar', [d.libDir('bar'), d.libPubspec('foo', '0.0.1')]).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
+ await d.dir('bar', [
+ d.libDir('bar'),
+ d.libPubspec('foo', '0.0.1'),
+ ]).create();
await d.appDir(dependencies: {}).create();
await pubAdd(
args: ['foo', '--git-url', '../foo.git', '--path', '../bar'],
error: allOf([
- contains('Packages can only have one source, pub add flags '
- '"--git-url" and "--path" are'),
+ contains(
+ 'Packages can only have one source, pub add flags '
+ '"--git-url" and "--path" are',
+ ),
contains('conflicting.'),
]),
exitCode: exit_codes.USAGE,
@@ -46,8 +50,10 @@
final server = await startPackageServer();
server.serve('foo', '1.2.3');
- await d
- .dir('bar', [d.libDir('bar'), d.libPubspec('foo', '0.0.1')]).create();
+ await d.dir('bar', [
+ d.libDir('bar'),
+ d.libPubspec('foo', '0.0.1'),
+ ]).create();
await d.appDir(dependencies: {}).create();
await pubAdd(
@@ -59,8 +65,10 @@
'../bar',
],
error: allOf([
- contains('Packages can only have one source, pub add flags '
- '"--hosted-url" and "--path" are'),
+ contains(
+ 'Packages can only have one source, pub add flags '
+ '"--hosted-url" and "--path" are',
+ ),
contains('conflicting.'),
]),
exitCode: exit_codes.USAGE,
@@ -83,10 +91,10 @@
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
await d.appDir(dependencies: {}).create();
await pubAdd(
@@ -98,8 +106,10 @@
'../foo.git',
],
error: allOf([
- contains('Packages can only have one source, pub add flags '
- '"--git-url" and "--hosted-url"'),
+ contains(
+ 'Packages can only have one source, pub add flags '
+ '"--git-url" and "--hosted-url"',
+ ),
contains('are conflicting.'),
]),
exitCode: exit_codes.USAGE,
diff --git a/test/add/common/version_constraint_test.dart b/test/add/common/version_constraint_test.dart
index 6441a70..5e030fa 100644
--- a/test/add/common/version_constraint_test.dart
+++ b/test/add/common/version_constraint_test.dart
@@ -93,26 +93,29 @@
});
test(
- 'empty constraint allows it to choose the latest version not in conflict',
- () async {
- await servePackages()
- ..serve('foo', '0.1.0')
- ..serve('foo', '1.2.3', deps: {'bar': '2.0.4'})
- ..serve('bar', '2.0.3')
- ..serve('bar', '2.0.4');
+ 'empty constraint allows it to choose the latest version not in conflict',
+ () async {
+ await servePackages()
+ ..serve('foo', '0.1.0')
+ ..serve('foo', '1.2.3', deps: {'bar': '2.0.4'})
+ ..serve('bar', '2.0.3')
+ ..serve('bar', '2.0.4');
- await d.appDir(dependencies: {'bar': '2.0.3'}).create();
+ await d.appDir(dependencies: {'bar': '2.0.3'}).create();
- await pubAdd(args: ['foo']);
+ await pubAdd(args: ['foo']);
- await d.appDir(dependencies: {'foo': '^0.1.0', 'bar': '2.0.3'}).validate();
+ await d
+ .appDir(dependencies: {'foo': '^0.1.0', 'bar': '2.0.3'})
+ .validate();
- await d.cacheDir({'foo': '0.1.0', 'bar': '2.0.3'}).validate();
- await d.appPackageConfigFile([
- d.packageConfigEntry(name: 'foo', version: '0.1.0'),
- d.packageConfigEntry(name: 'bar', version: '2.0.3'),
- ]).validate();
- });
+ await d.cacheDir({'foo': '0.1.0', 'bar': '2.0.3'}).validate();
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(name: 'foo', version: '0.1.0'),
+ d.packageConfigEntry(name: 'bar', version: '2.0.3'),
+ ]).validate();
+ },
+ );
group('does not update pubspec if no available version found', () {
test('simple', () async {
@@ -123,9 +126,10 @@
await pubAdd(
args: ['foo:>1.2.0 <2.0.0'],
- error:
- contains("Because myapp depends on foo >1.2.0 <2.0.0 which doesn't "
- 'match any versions, version solving failed.'),
+ error: contains(
+ "Because myapp depends on foo >1.2.0 <2.0.0 which doesn't "
+ 'match any versions, version solving failed.',
+ ),
exitCode: exit_codes.DATA,
);
@@ -147,8 +151,9 @@
await pubAdd(
args: ['foo:1.2.3'],
error: contains(
- 'Because every version of foo depends on bar 2.0.4 and myapp '
- 'depends on bar 2.0.3, foo is forbidden.'),
+ 'Because every version of foo depends on bar 2.0.4 and myapp '
+ 'depends on bar 2.0.3, foo is forbidden.',
+ ),
exitCode: exit_codes.DATA,
);
diff --git a/test/add/common/version_resolution_test.dart b/test/add/common/version_resolution_test.dart
index 5e73a86..1b3f9f0 100644
--- a/test/add/common/version_resolution_test.dart
+++ b/test/add/common/version_resolution_test.dart
@@ -30,9 +30,7 @@
await pubAdd(
args: ['foo', '--dry-run'],
- output: allOf(
- contains('> foo 3.5.0 (was 3.2.1)'),
- ),
+ output: allOf(contains('> foo 3.5.0 (was 3.2.1)')),
);
await pubAdd(args: ['foo']);
@@ -44,55 +42,63 @@
]).validate();
});
- test('chooses the appropriate version to not break other dependencies',
- () async {
- /// The server used to only have the foo v3.2.1 as the latest,
- /// so pub get will create a pubspec.lock to foo 3.2.1
- final server = await servePackages();
+ test(
+ 'chooses the appropriate version to not break other dependencies',
+ () async {
+ /// The server used to only have the foo v3.2.1 as the latest,
+ /// so pub get will create a pubspec.lock to foo 3.2.1
+ final server = await servePackages();
- server.serve('foo', '3.2.1');
- server.serve('bar', '1.0.0', deps: {'foo': '^3.2.1'});
+ server.serve('foo', '3.2.1');
+ server.serve('bar', '1.0.0', deps: {'foo': '^3.2.1'});
- await d.appDir(dependencies: {'bar': '1.0.0'}).create();
- await pubGet();
+ await d.appDir(dependencies: {'bar': '1.0.0'}).create();
+ await pubGet();
- server.serve('foo', '4.0.0');
- server.serve('foo', '2.0.0');
+ server.serve('foo', '4.0.0');
+ server.serve('foo', '2.0.0');
- await pubAdd(args: ['foo']);
+ await pubAdd(args: ['foo']);
- await d.appDir(dependencies: {'foo': '^3.2.1', 'bar': '1.0.0'}).validate();
- await d.cacheDir({'foo': '3.2.1', 'bar': '1.0.0'}).validate();
- await d.appPackageConfigFile([
- d.packageConfigEntry(name: 'foo', version: '3.2.1'),
- d.packageConfigEntry(name: 'bar', version: '1.0.0'),
- ]).validate();
- });
+ await d
+ .appDir(dependencies: {'foo': '^3.2.1', 'bar': '1.0.0'})
+ .validate();
+ await d.cacheDir({'foo': '3.2.1', 'bar': '1.0.0'}).validate();
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(name: 'foo', version: '3.2.1'),
+ d.packageConfigEntry(name: 'bar', version: '1.0.0'),
+ ]).validate();
+ },
+ );
- test('may upgrade other packages if they allow a later version to be chosen',
- () async {
- /// The server used to only have the foo v3.2.1 as the latest,
- /// so pub get will create a pubspec.lock to foo 3.2.1
- final server = await servePackages();
+ test(
+ 'may upgrade other packages if they allow a later version to be chosen',
+ () async {
+ /// The server used to only have the foo v3.2.1 as the latest,
+ /// so pub get will create a pubspec.lock to foo 3.2.1
+ final server = await servePackages();
- server.serve('foo', '3.2.1');
- server.serve('bar', '1.0.0', deps: {'foo': '^3.2.1'});
+ server.serve('foo', '3.2.1');
+ server.serve('bar', '1.0.0', deps: {'foo': '^3.2.1'});
- await d.appDir(dependencies: {'bar': '^1.0.0'}).create();
- await pubGet();
+ await d.appDir(dependencies: {'bar': '^1.0.0'}).create();
+ await pubGet();
- server.serve('foo', '5.0.0');
- server.serve('foo', '4.0.0');
- server.serve('foo', '2.0.0');
- server.serve('bar', '1.5.0', deps: {'foo': '^4.0.0'});
+ server.serve('foo', '5.0.0');
+ server.serve('foo', '4.0.0');
+ server.serve('foo', '2.0.0');
+ server.serve('bar', '1.5.0', deps: {'foo': '^4.0.0'});
- await pubAdd(args: ['foo']);
+ await pubAdd(args: ['foo']);
- await d.appDir(dependencies: {'foo': '^4.0.0', 'bar': '^1.0.0'}).validate();
- await d.cacheDir({'foo': '4.0.0', 'bar': '1.5.0'}).validate();
- await d.appPackageConfigFile([
- d.packageConfigEntry(name: 'foo', version: '4.0.0'),
- d.packageConfigEntry(name: 'bar', version: '1.5.0'),
- ]).validate();
- });
+ await d
+ .appDir(dependencies: {'foo': '^4.0.0', 'bar': '^1.0.0'})
+ .validate();
+ await d.cacheDir({'foo': '4.0.0', 'bar': '1.5.0'}).validate();
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(name: 'foo', version: '4.0.0'),
+ d.packageConfigEntry(name: 'bar', version: '1.5.0'),
+ ]).validate();
+ },
+ );
}
diff --git a/test/add/git/git_test.dart b/test/add/git/git_test.dart
index 19938e3..90d8f39 100644
--- a/test/add/git/git_test.dart
+++ b/test/add/git/git_test.dart
@@ -11,10 +11,10 @@
test('adds a package from git', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
await d.appDir(dependencies: {}).create();
@@ -27,20 +27,22 @@
]),
]).validate();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .validate();
});
test('adds a package from git with relative url and --directory', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
await d.appDir(dependencies: {}).create();
@@ -57,27 +59,30 @@
]),
]).validate();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .validate();
});
test('fails with invalid --git-url', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
await d.appDir(dependencies: {}).create();
await pubAdd(
args: ['foo', '--git-url', ':'],
- error:
- contains('The --git-url must be a valid url: Invalid empty scheme.'),
+ error: contains(
+ 'The --git-url must be a valid url: Invalid empty scheme.',
+ ),
exitCode: exit_codes.USAGE,
);
});
@@ -85,10 +90,10 @@
test('adds a package from git with version constraint', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
await d.appDir(dependencies: {}).create();
@@ -101,28 +106,31 @@
]),
]).validate();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git', 'version': '1.0.0'},
- },
- ).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git', 'version': '1.0.0'},
+ },
+ )
+ .validate();
});
test('fails when adding with an invalid version constraint', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
await d.appDir(dependencies: {}).create();
await pubAdd(
args: ['foo:2.0.0', '--git-url', '../foo.git'],
error: equalsIgnoringWhitespace(
- 'Because myapp depends on foo 2.0.0 from git which doesn\'t match '
- 'any versions, version solving failed.'),
+ 'Because myapp depends on foo 2.0.0 from git which doesn\'t match '
+ 'any versions, version solving failed.',
+ ),
exitCode: exit_codes.DATA,
);
@@ -140,8 +148,10 @@
await pubAdd(
args: ['foo', '--git-url', '../foo.git'],
- error: contains('Unable to resolve package "foo" with the given '
- 'git parameters'),
+ error: contains(
+ 'Unable to resolve package "foo" with the given '
+ 'git parameters',
+ ),
exitCode: exit_codes.DATA,
);
@@ -174,10 +184,10 @@
final server = await servePackages();
server.serve('foo', '1.2.2');
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
await d.dir(appPath, [
d.pubspec({
@@ -231,34 +241,33 @@
output: contains('Changed 1 dependency in `myapp`!'),
);
- await d.appDir(
- dependencies: {
- 'foo': {
- 'git': {'url': '../foo.git', 'path': 'subdir'},
- },
- },
- ).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'git': {'url': '../foo.git', 'path': 'subdir'},
+ },
+ },
+ )
+ .validate();
});
test('Can add multiple git packages using descriptors', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
- await d.git(
- 'bar.git',
- [d.libDir('foo'), d.libPubspec('bar', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
+ await d.git('bar.git', [
+ d.libDir('foo'),
+ d.libPubspec('bar', '1.0.0'),
+ ]).create();
await d.appDir(dependencies: {}).create();
await pubAdd(
- args: [
- 'foo:{"git":"../foo.git"}',
- 'bar:{"git":"../bar.git"}',
- ],
+ args: ['foo:{"git":"../foo.git"}', 'bar:{"git":"../bar.git"}'],
);
await d.dir(appPath, [
diff --git a/test/add/git/ref_test.dart b/test/add/git/ref_test.dart
index 2e182fd..910dddc 100644
--- a/test/add/git/ref_test.dart
+++ b/test/add/git/ref_test.dart
@@ -12,17 +12,17 @@
test('adds a package from git with ref', () async {
ensureGit();
- final repo = d.git(
- 'foo.git',
- [d.libDir('foo', 'foo 1'), d.libPubspec('foo', '1.0.0')],
- );
+ final repo = d.git('foo.git', [
+ d.libDir('foo', 'foo 1'),
+ d.libPubspec('foo', '1.0.0'),
+ ]);
await repo.create();
await repo.runGit(['branch', 'old']);
- await d.git(
- 'foo.git',
- [d.libDir('foo', 'foo 2'), d.libPubspec('foo', '1.0.0')],
- ).commit();
+ await d.git('foo.git', [
+ d.libDir('foo', 'foo 2'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).commit();
await d.appDir(dependencies: {}).create();
@@ -30,36 +30,36 @@
await d.dir(cachePath, [
d.dir('git', [
- d.dir('cache', [
- d.gitPackageRepoCacheDir('foo'),
- ]),
+ d.dir('cache', [d.gitPackageRepoCacheDir('foo')]),
d.gitPackageRevisionCacheDir('foo', modifier: 1),
]),
]).validate();
- await d.appDir(
- dependencies: {
- 'foo': {
- 'git': {'url': '../foo.git', 'ref': 'old'},
- },
- },
- ).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'git': {'url': '../foo.git', 'ref': 'old'},
+ },
+ },
+ )
+ .validate();
});
test('fails when adding from an invalid ref', () async {
ensureGit();
- final repo = d.git(
- 'foo.git',
- [d.libDir('foo', 'foo 1'), d.libPubspec('foo', '1.0.0')],
- );
+ final repo = d.git('foo.git', [
+ d.libDir('foo', 'foo 1'),
+ d.libPubspec('foo', '1.0.0'),
+ ]);
await repo.create();
await repo.runGit(['branch', 'new']);
- await d.git(
- 'foo.git',
- [d.libDir('foo', 'foo 2'), d.libPubspec('foo', '1.0.0')],
- ).commit();
+ await d.git('foo.git', [
+ d.libDir('foo', 'foo 2'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).commit();
await d.appDir(dependencies: {}).create();
diff --git a/test/add/git/subdir_test.dart b/test/add/git/subdir_test.dart
index b17ebde..0c9a695 100644
--- a/test/add/git/subdir_test.dart
+++ b/test/add/git/subdir_test.dart
@@ -38,13 +38,15 @@
),
]).validate();
- await d.appDir(
- dependencies: {
- 'sub': {
- 'git': {'url': '../foo.git', 'path': 'subdir'},
- },
- },
- ).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'sub': {
+ 'git': {'url': '../foo.git', 'path': 'subdir'},
+ },
+ },
+ )
+ .validate();
});
test('adds a package in a deep subdirectory', () async {
@@ -81,12 +83,14 @@
),
]).validate();
- await d.appDir(
- dependencies: {
- 'sub': {
- 'git': {'url': '../foo.git', 'path': 'sub/dir'},
- },
- },
- ).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'sub': {
+ 'git': {'url': '../foo.git', 'path': 'sub/dir'},
+ },
+ },
+ )
+ .validate();
});
}
diff --git a/test/add/hosted/non_default_pub_server_test.dart b/test/add/hosted/non_default_pub_server_test.dart
index e58ea3d..505a21d 100644
--- a/test/add/hosted/non_default_pub_server_test.dart
+++ b/test/add/hosted/non_default_pub_server_test.dart
@@ -31,11 +31,13 @@
d.packageConfigEntry(name: 'foo', version: '1.2.3', server: server),
]).validate();
- await d.appDir(
- dependencies: {
- 'foo': {'version': '1.2.3', 'hosted': url},
- },
- ).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'version': '1.2.3', 'hosted': url},
+ },
+ )
+ .validate();
});
test('Uses old syntax when needed', () async {
@@ -53,24 +55,23 @@
},
};
- await d.appDir(
- dependencies: {},
- pubspec: oldSyntaxSdkConstraint,
- ).create();
+ await d.appDir(dependencies: {}, pubspec: oldSyntaxSdkConstraint).create();
final url = server.url;
await pubAdd(args: ['foo:1.2.3', '--hosted-url', url]);
- await d.appDir(
- dependencies: {
- 'foo': {
- 'version': '1.2.3',
- 'hosted': {'name': 'foo', 'url': url},
- },
- },
- pubspec: oldSyntaxSdkConstraint,
- ).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'version': '1.2.3',
+ 'hosted': {'name': 'foo', 'url': url},
+ },
+ },
+ pubspec: oldSyntaxSdkConstraint,
+ )
+ .validate();
});
test('adds multiple packages from a non-default pub server', () async {
@@ -94,10 +95,11 @@
args: ['foo:1.2.3', 'bar:3.2.3', 'baz:1.3.5', '--hosted-url', url],
);
- await d.cacheDir(
- {'foo': '1.2.3', 'bar': '3.2.3', 'baz': '1.3.5'},
- port: server.port,
- ).validate();
+ await d.cacheDir({
+ 'foo': '1.2.3',
+ 'bar': '3.2.3',
+ 'baz': '1.3.5',
+ }, port: server.port).validate();
await d.appPackageConfigFile([
d.packageConfigEntry(name: 'foo', version: '1.2.3', server: server),
@@ -105,13 +107,15 @@
d.packageConfigEntry(name: 'baz', version: '1.3.5', server: server),
]).validate();
- await d.appDir(
- dependencies: {
- 'foo': {'version': '1.2.3', 'hosted': url},
- 'bar': {'version': '3.2.3', 'hosted': url},
- 'baz': {'version': '1.3.5', 'hosted': url},
- },
- ).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'version': '1.2.3', 'hosted': url},
+ 'bar': {'version': '3.2.3', 'hosted': url},
+ 'baz': {'version': '1.3.5', 'hosted': url},
+ },
+ )
+ .validate();
});
test('fails when adding from an invalid url', () async {
@@ -121,8 +125,10 @@
await pubAdd(
args: ['foo', '--hosted-url', 'https://invalid-url.foo'],
- error: contains('Got socket error trying to find package foo at '
- 'https://invalid-url.foo.'),
+ error: contains(
+ 'Got socket error trying to find package foo at '
+ 'https://invalid-url.foo.',
+ ),
exitCode: exit_codes.DATA,
environment: {
// Limit the retries - the url will never go valid.
@@ -138,64 +144,70 @@
});
test(
- 'adds a package from a non-default pub server with no version constraint',
- () async {
- // Make the default server serve errors. Only the custom server should
- // be accessed.
- (await servePackages()).serveErrors();
+ 'adds a package from a non-default pub server with no version constraint',
+ () async {
+ // Make the default server serve errors. Only the custom server should
+ // be accessed.
+ (await servePackages()).serveErrors();
- final server = await startPackageServer();
- server.serve('foo', '0.2.5');
- server.serve('foo', '1.1.0');
- server.serve('foo', '1.2.3');
+ final server = await startPackageServer();
+ server.serve('foo', '0.2.5');
+ server.serve('foo', '1.1.0');
+ server.serve('foo', '1.2.3');
- await d.appDir(dependencies: {}).create();
+ await d.appDir(dependencies: {}).create();
- final url = server.url;
+ final url = server.url;
- await pubAdd(args: ['foo', '--hosted-url', url]);
+ await pubAdd(args: ['foo', '--hosted-url', url]);
- await d.cacheDir({'foo': '1.2.3'}, port: server.port).validate();
- await d.appPackageConfigFile([
- d.packageConfigEntry(name: 'foo', version: '1.2.3', server: server),
- ]).validate();
- await d.appDir(
- dependencies: {
- 'foo': {'version': '^1.2.3', 'hosted': url},
- },
- ).validate();
- });
-
- test('adds a package from a non-default pub server with a version constraint',
- () async {
- // Make the default server serve errors. Only the custom server should
- // be accessed.
- (await servePackages()).serveErrors();
-
- final server = await startPackageServer();
- server.serve('foo', '0.2.5');
- server.serve('foo', '1.1.0');
- server.serve('foo', '1.2.3');
-
- await d.appDir(dependencies: {}).create();
-
- final url = server.url;
-
- await pubAdd(args: ['foo', '--hosted-url', url]);
-
- await d.cacheDir({'foo': '1.2.3'}, port: server.port).validate();
- await d.appPackageConfigFile([
- d.packageConfigEntry(name: 'foo', version: '1.2.3', server: server),
- ]).validate();
- await d.appDir(
- dependencies: {
- 'foo': {'version': '^1.2.3', 'hosted': url},
- },
- ).validate();
- });
+ await d.cacheDir({'foo': '1.2.3'}, port: server.port).validate();
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(name: 'foo', version: '1.2.3', server: server),
+ ]).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'version': '^1.2.3', 'hosted': url},
+ },
+ )
+ .validate();
+ },
+ );
test(
- 'adds a package from a non-default pub server with the "any" version '
+ 'adds a package from a non-default pub server with a version constraint',
+ () async {
+ // Make the default server serve errors. Only the custom server should
+ // be accessed.
+ (await servePackages()).serveErrors();
+
+ final server = await startPackageServer();
+ server.serve('foo', '0.2.5');
+ server.serve('foo', '1.1.0');
+ server.serve('foo', '1.2.3');
+
+ await d.appDir(dependencies: {}).create();
+
+ final url = server.url;
+
+ await pubAdd(args: ['foo', '--hosted-url', url]);
+
+ await d.cacheDir({'foo': '1.2.3'}, port: server.port).validate();
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(name: 'foo', version: '1.2.3', server: server),
+ ]).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'version': '^1.2.3', 'hosted': url},
+ },
+ )
+ .validate();
+ },
+ );
+
+ test('adds a package from a non-default pub server with the "any" version '
'constraint', () async {
// Make the default server serve errors. Only the custom server should
// be accessed.
@@ -216,10 +228,12 @@
await d.appPackageConfigFile([
d.packageConfigEntry(name: 'foo', version: '1.2.3', server: server),
]).validate();
- await d.appDir(
- dependencies: {
- 'foo': {'hosted': url},
- },
- ).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'hosted': url},
+ },
+ )
+ .validate();
});
}
diff --git a/test/add/path/absolute_path_test.dart b/test/add/path/absolute_path_test.dart
index 4b1c172..b7de909 100644
--- a/test/add/path/absolute_path_test.dart
+++ b/test/add/path/absolute_path_test.dart
@@ -11,8 +11,10 @@
void main() {
test('path dependency with absolute path', () async {
- await d
- .dir('foo', [d.libDir('foo'), d.libPubspec('foo', '0.0.1')]).create();
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '0.0.1'),
+ ]).create();
await d.appDir(dependencies: {}).create();
@@ -24,36 +26,42 @@
d.packageConfigEntry(name: 'foo', path: absolutePath),
]).validate();
- await d.appDir(
- dependencies: {
- 'foo': {'path': absolutePath},
- },
- ).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'path': absolutePath},
+ },
+ )
+ .validate();
});
test('adds a package from absolute path with version constraint', () async {
- await d
- .dir('foo', [d.libDir('foo'), d.libPubspec('foo', '0.0.1')]).create();
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '0.0.1'),
+ ]).create();
await d.appDir(dependencies: {}).create();
final absolutePath = p.join(d.sandbox, 'foo');
await pubAdd(args: ['foo:0.0.1', '--path', absolutePath]);
- await d.appDir(
- dependencies: {
- 'foo': {'path': absolutePath, 'version': '0.0.1'},
- },
- ).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'path': absolutePath, 'version': '0.0.1'},
+ },
+ )
+ .validate();
});
test('fails when adding multiple packages through local path', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
await d.appDir(dependencies: {}).create();
final absolutePath = p.join(d.sandbox, 'foo');
@@ -74,10 +82,10 @@
test('fails when adding with an invalid version constraint', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
await d.appDir(dependencies: {}).create();
final absolutePath = p.join(d.sandbox, 'foo');
@@ -85,9 +93,10 @@
await pubAdd(
args: ['foo:2.0.0', '--path', absolutePath],
error: equalsIgnoringWhitespace(
- 'Because myapp depends on foo from path which doesn\'t exist '
- '(could not find package foo at "$absolutePath"), version solving '
- 'failed.'),
+ 'Because myapp depends on foo from path which doesn\'t exist '
+ '(could not find package foo at "$absolutePath"), version solving '
+ 'failed.',
+ ),
exitCode: exit_codes.DATA,
);
@@ -106,9 +115,10 @@
await pubAdd(
args: ['foo', '--path', absolutePath],
error: equalsIgnoringWhitespace(
- 'Because myapp depends on foo from path which doesn\'t exist '
- '(could not find package foo at "$absolutePath"), version solving '
- 'failed.'),
+ 'Because myapp depends on foo from path which doesn\'t exist '
+ '(could not find package foo at "$absolutePath"), version solving '
+ 'failed.',
+ ),
exitCode: exit_codes.DATA,
);
@@ -122,8 +132,10 @@
test('can be overriden by dependency override', () async {
final server = await servePackages();
server.serve('foo', '1.2.2');
- await d
- .dir('foo', [d.libDir('foo'), d.libPubspec('foo', '0.0.1')]).create();
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '0.0.1'),
+ ]).create();
await d.dir(appPath, [
d.pubspec({
diff --git a/test/add/path/relative_path_test.dart b/test/add/path/relative_path_test.dart
index 32ae0d6..2c75967 100644
--- a/test/add/path/relative_path_test.dart
+++ b/test/add/path/relative_path_test.dart
@@ -12,8 +12,10 @@
void main() {
test('can use relative path', () async {
- await d
- .dir('foo', [d.libDir('foo'), d.libPubspec('foo', '0.0.1')]).create();
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '0.0.1'),
+ ]).create();
await d.appDir(dependencies: {}).create();
@@ -23,22 +25,24 @@
d.packageConfigEntry(name: 'foo', path: '../foo'),
]).validate();
- await d.appDir(
- dependencies: {
- 'foo': {'path': '../foo'},
- },
- ).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'path': '../foo'},
+ },
+ )
+ .validate();
});
test('can use relative path with a path descriptor', () async {
- await d
- .dir('foo', [d.libDir('foo'), d.libPubspec('foo', '1.2.3')]).create();
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.2.3'),
+ ]).create();
await d.appDir().create();
- await pubAdd(
- args: ['dev:foo:{"path":"../foo"}'],
- );
+ await pubAdd(args: ['dev:foo:{"path":"../foo"}']);
await d.dir(appPath, [
d.pubspec({
@@ -51,8 +55,10 @@
});
test('can use relative path with --directory', () async {
- await d
- .dir('foo', [d.libDir('foo'), d.libPubspec('foo', '0.0.1')]).create();
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '0.0.1'),
+ ]).create();
await d.appDir(dependencies: {}).create();
@@ -66,11 +72,13 @@
d.packageConfigEntry(name: 'foo', path: '../foo'),
]).validate();
- await d.appDir(
- dependencies: {
- 'foo': {'path': '../foo'},
- },
- ).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'path': '../foo'},
+ },
+ )
+ .validate();
});
test('fails if path does not exist', () async {
@@ -79,9 +87,10 @@
await pubAdd(
args: ['foo', '--path', '../foo'],
error: equalsIgnoringWhitespace(
- 'Because myapp depends on foo from path which doesn\'t exist '
- '(could not find package foo at "..${Platform.pathSeparator}foo"), '
- 'version solving failed.'),
+ 'Because myapp depends on foo from path which doesn\'t exist '
+ '(could not find package foo at "..${Platform.pathSeparator}foo"), '
+ 'version solving failed.',
+ ),
exitCode: exit_codes.DATA,
);
@@ -93,36 +102,41 @@
});
test('adds a package from absolute path with version constraint', () async {
- await d
- .dir('foo', [d.libDir('foo'), d.libPubspec('foo', '0.0.1')]).create();
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '0.0.1'),
+ ]).create();
await d.appDir(dependencies: {}).create();
await pubAdd(args: ['foo:0.0.1', '--path', '../foo']);
- await d.appDir(
- dependencies: {
- 'foo': {'path': '../foo', 'version': '0.0.1'},
- },
- ).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'path': '../foo', 'version': '0.0.1'},
+ },
+ )
+ .validate();
});
test('fails when adding with an invalid version constraint', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
await d.appDir(dependencies: {}).create();
await pubAdd(
args: ['foo:2.0.0', '--path', '../foo'],
error: equalsIgnoringWhitespace(
- 'Because myapp depends on foo from path which doesn\'t exist '
- '(could not find package foo at "..${Platform.pathSeparator}foo"), '
- 'version solving failed.'),
+ 'Because myapp depends on foo from path which doesn\'t exist '
+ '(could not find package foo at "..${Platform.pathSeparator}foo"), '
+ 'version solving failed.',
+ ),
exitCode: exit_codes.DATA,
);
@@ -136,8 +150,10 @@
test('can be overriden by dependency override', () async {
final server = await servePackages();
server.serve('foo', '1.2.2');
- await d
- .dir('foo', [d.libDir('foo'), d.libPubspec('foo', '0.0.1')]).create();
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '0.0.1'),
+ ]).create();
await d.dir(appPath, [
d.pubspec({
@@ -165,10 +181,14 @@
});
test('Can add multiple path packages using descriptors', () async {
- await d
- .dir('foo', [d.libDir('foo'), d.libPubspec('foo', '0.0.1')]).create();
- await d
- .dir('bar', [d.libDir('bar'), d.libPubspec('bar', '0.0.1')]).create();
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '0.0.1'),
+ ]).create();
+ await d.dir('bar', [
+ d.libDir('bar'),
+ d.libPubspec('bar', '0.0.1'),
+ ]).create();
await d.appDir(dependencies: {}).create();
@@ -188,11 +208,13 @@
d.packageConfigEntry(name: 'bar', path: '../bar'),
]).validate();
- await d.appDir(
- dependencies: {
- 'foo': {'path': '../foo'},
- 'bar': {'path': '../bar'},
- },
- ).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'path': '../foo'},
+ 'bar': {'path': '../bar'},
+ },
+ )
+ .validate();
});
}
diff --git a/test/add/sdk/sdk_test.dart b/test/add/sdk/sdk_test.dart
index 8f5c2e8..e3e7c93 100644
--- a/test/add/sdk/sdk_test.dart
+++ b/test/add/sdk/sdk_test.dart
@@ -22,10 +22,10 @@
]),
]),
d.dir('bin/cache/pkg', [
- d.dir(
- 'baz',
- [d.libDir('baz', 'foo 0.0.1'), d.libPubspec('baz', '0.0.1')],
- ),
+ d.dir('baz', [
+ d.libDir('baz', 'foo 0.0.1'),
+ d.libPubspec('baz', '0.0.1'),
+ ]),
]),
d.flutterVersion('1.2.3'),
]).create();
@@ -47,48 +47,53 @@
}),
]).validate();
- await d.appPackageConfigFile(
- [
- d.packageConfigEntry(
- name: 'foo',
- path: p.join(d.sandbox, 'flutter', 'packages', 'foo'),
- ),
- d.packageConfigEntry(name: 'bar', version: '1.0.0'),
- ],
- flutterRoot: p.join(d.sandbox, 'flutter'),
- flutterVersion: '1.2.3',
- ).validate();
+ await d
+ .appPackageConfigFile(
+ [
+ d.packageConfigEntry(
+ name: 'foo',
+ path: p.join(d.sandbox, 'flutter', 'packages', 'foo'),
+ ),
+ d.packageConfigEntry(name: 'bar', version: '1.0.0'),
+ ],
+ flutterRoot: p.join(d.sandbox, 'flutter'),
+ flutterVersion: '1.2.3',
+ )
+ .validate();
});
test(
- "adds an SDK dependency's dependencies with version constraint specified",
- () async {
- await d.appDir(dependencies: {}).create();
- await pubAdd(
- args: ['foo:0.0.1', '--sdk', 'flutter'],
- environment: {'FLUTTER_ROOT': p.join(d.sandbox, 'flutter')},
- );
+ "adds an SDK dependency's dependencies with version constraint specified",
+ () async {
+ await d.appDir(dependencies: {}).create();
+ await pubAdd(
+ args: ['foo:0.0.1', '--sdk', 'flutter'],
+ environment: {'FLUTTER_ROOT': p.join(d.sandbox, 'flutter')},
+ );
- await d.dir(appPath, [
- d.pubspec({
- 'name': 'myapp',
- 'dependencies': {
- 'foo': {'sdk': 'flutter', 'version': '0.0.1'},
- },
- }),
- ]).validate();
- await d.appPackageConfigFile(
- [
- d.packageConfigEntry(
- name: 'foo',
- path: p.join(d.sandbox, 'flutter', 'packages', 'foo'),
- ),
- d.packageConfigEntry(name: 'bar', version: '1.0.0'),
- ],
- flutterRoot: p.join(d.sandbox, 'flutter'),
- flutterVersion: '1.2.3',
- ).validate();
- });
+ await d.dir(appPath, [
+ d.pubspec({
+ 'name': 'myapp',
+ 'dependencies': {
+ 'foo': {'sdk': 'flutter', 'version': '0.0.1'},
+ },
+ }),
+ ]).validate();
+ await d
+ .appPackageConfigFile(
+ [
+ d.packageConfigEntry(
+ name: 'foo',
+ path: p.join(d.sandbox, 'flutter', 'packages', 'foo'),
+ ),
+ d.packageConfigEntry(name: 'bar', version: '1.0.0'),
+ ],
+ flutterRoot: p.join(d.sandbox, 'flutter'),
+ flutterVersion: '1.2.3',
+ )
+ .validate();
+ },
+ );
test('adds an SDK dependency from bin/cache/pkg', () async {
await d.appDir(dependencies: {}).create();
@@ -97,16 +102,18 @@
environment: {'FLUTTER_ROOT': p.join(d.sandbox, 'flutter')},
);
- await d.appPackageConfigFile(
- [
- d.packageConfigEntry(
- name: 'baz',
- path: p.join(d.sandbox, 'flutter', 'bin', 'cache', 'pkg', 'baz'),
- ),
- ],
- flutterRoot: p.join(d.sandbox, 'flutter'),
- flutterVersion: '1.2.3',
- ).validate();
+ await d
+ .appPackageConfigFile(
+ [
+ d.packageConfigEntry(
+ name: 'baz',
+ path: p.join(d.sandbox, 'flutter', 'bin', 'cache', 'pkg', 'baz'),
+ ),
+ ],
+ flutterRoot: p.join(d.sandbox, 'flutter'),
+ flutterVersion: '1.2.3',
+ )
+ .validate();
});
test("fails if the version constraint doesn't match", () async {
diff --git a/test/ascii_tree_test.dart b/test/ascii_tree_test.dart
index c5c186f..eb2c152 100644
--- a/test/ascii_tree_test.dart
+++ b/test/ascii_tree_test.dart
@@ -32,9 +32,7 @@
dir('example', [
file('console_example.dart', bytes(1000)),
file('main.dart', bytes(1024)),
- dir('web copy', [
- file('web_example.dart', bytes(1025)),
- ]),
+ dir('web copy', [file('web_example.dart', bytes(1025))]),
]),
dir('test', [
file('absolute_test.dart', bytes(0)),
@@ -53,15 +51,15 @@
]),
file('.gitignore', bytes(100)),
file('README.md', bytes(100)),
- dir('lib', [
- file('path.dart', bytes(100)),
- ]),
+ dir('lib', [file('path.dart', bytes(100))]),
]).create();
- final files = Package.load(
- path(appPath),
- loadPubspec:
- Pubspec.loadRootWithSources((name) => throw UnimplementedError()),
- ).listFiles();
+ final files =
+ Package.load(
+ path(appPath),
+ loadPubspec: Pubspec.loadRootWithSources(
+ (name) => throw UnimplementedError(),
+ ),
+ ).listFiles();
ctx.expectNextSection(
tree.fromFiles(files, baseDir: path(appPath), showFileSizes: true),
);
diff --git a/test/bump_test.dart b/test/bump_test.dart
index b558a55..67b8bf0 100644
--- a/test/bump_test.dart
+++ b/test/bump_test.dart
@@ -11,15 +11,12 @@
void testBump(String part, String from, String to) {
test('Bumps the $part version from $from to $to', () async {
await dir(appPath, [
- file(
- 'pubspec.yaml',
- '''
+ file('pubspec.yaml', '''
name: myapp
version: $from # comment
environment:
sdk: $defaultSdkConstraint
-''',
- ),
+'''),
]).create();
await runPub(
args: ['bump', part, '--dry-run'],
diff --git a/test/cache/add/adds_latest_matching_version_test.dart b/test/cache/add/adds_latest_matching_version_test.dart
index 6efa953..70b7b48 100644
--- a/test/cache/add/adds_latest_matching_version_test.dart
+++ b/test/cache/add/adds_latest_matching_version_test.dart
@@ -10,8 +10,7 @@
import '../../test_pub.dart';
void main() {
- test(
- 'adds the latest version of the package matching the '
+ test('adds the latest version of the package matching the '
'version constraint', () async {
await servePackages()
..serve('foo', '1.2.2')
diff --git a/test/cache/clean_test.dart b/test/cache/clean_test.dart
index 4f55fa5..611d13f 100644
--- a/test/cache/clean_test.dart
+++ b/test/cache/clean_test.dart
@@ -36,43 +36,41 @@
);
});
- test('running pub cache clean deletes cache only with confirmation',
- () async {
- await servePackages()
- ..serve('foo', '1.1.2')
- ..serve('bar', '1.2.3');
- await d.appDir(dependencies: {'foo': 'any', 'bar': 'any'}).create();
- await pubGet();
- final cache = p.join(d.sandbox, cachePath);
- expect(
- listDir(cache, includeHidden: true),
- contains(pathInCache('hosted')),
- );
- {
- final process = await startPub(
- args: ['cache', 'clean'],
+ test(
+ 'running pub cache clean deletes cache only with confirmation',
+ () async {
+ await servePackages()
+ ..serve('foo', '1.1.2')
+ ..serve('bar', '1.2.3');
+ await d.appDir(dependencies: {'foo': 'any', 'bar': 'any'}).create();
+ await pubGet();
+ final cache = p.join(d.sandbox, cachePath);
+ expect(
+ listDir(cache, includeHidden: true),
+ contains(pathInCache('hosted')),
);
- process.stdin.writeln('n');
- expect(await process.exitCode, 0);
- }
- expect(
- listDir(cache, includeHidden: true),
- contains(pathInCache('hosted')),
- );
+ {
+ final process = await startPub(args: ['cache', 'clean']);
+ process.stdin.writeln('n');
+ expect(await process.exitCode, 0);
+ }
+ expect(
+ listDir(cache, includeHidden: true),
+ contains(pathInCache('hosted')),
+ );
- {
- final process = await startPub(
- args: ['cache', 'clean'],
+ {
+ final process = await startPub(args: ['cache', 'clean']);
+ process.stdin.writeln('y');
+ expect(await process.exitCode, 0);
+ }
+ expect(
+ listDir(
+ cache,
+ includeHidden: true,
+ ), // The README.md will be reconstructed.
+ [pathInCache('README.md')],
);
- process.stdin.writeln('y');
- expect(await process.exitCode, 0);
- }
- expect(
- listDir(
- cache,
- includeHidden: true,
- ), // The README.md will be reconstructed.
- [pathInCache('README.md')],
- );
- });
+ },
+ );
}
diff --git a/test/cache/create_readme_test.dart b/test/cache/create_readme_test.dart
index f2fd5d4..8194505 100644
--- a/test/cache/create_readme_test.dart
+++ b/test/cache/create_readme_test.dart
@@ -7,24 +7,26 @@
import '../test_pub.dart';
void main() async {
- test('PUB_CACHE/README.md gets created by command downloading to pub cache',
- () async {
- final server = await servePackages();
- server.serve('foo', '1.0.0');
- await d.appDir().create();
- await pubGet();
- await d.nothing(cachePath).validate();
+ test(
+ 'PUB_CACHE/README.md gets created by command downloading to pub cache',
+ () async {
+ final server = await servePackages();
+ server.serve('foo', '1.0.0');
+ await d.appDir().create();
+ await pubGet();
+ await d.nothing(cachePath).validate();
- await d.appDir(dependencies: {'foo': '1.0.0'}).create();
- await pubGet();
- await d.dir(cachePath, [
- d.file('README.md', contains('https://dart.dev/go/pub-cache')),
- ]).validate();
- File(pathInCache('README.md')).deleteSync();
- // No new download, so 'README.md' doesn't get updated.
- await pubGet();
- await d.dir(cachePath, [d.nothing('README.md')]).validate();
- });
+ await d.appDir(dependencies: {'foo': '1.0.0'}).create();
+ await pubGet();
+ await d.dir(cachePath, [
+ d.file('README.md', contains('https://dart.dev/go/pub-cache')),
+ ]).validate();
+ File(pathInCache('README.md')).deleteSync();
+ // No new download, so 'README.md' doesn't get updated.
+ await pubGet();
+ await d.dir(cachePath, [d.nothing('README.md')]).validate();
+ },
+ );
test('PUB_CACHE/README.md gets created by `dart pub cache clean`', () async {
final server = await servePackages();
@@ -50,10 +52,7 @@
await d.dir(cachePath, [
d.dir('global_packages', [
d.dir('foo', [
- d.dir(
- 'bin',
- [d.outOfDateSnapshot('foo.dart-3.1.2+3.snapshot')],
- ),
+ d.dir('bin', [d.outOfDateSnapshot('foo.dart-3.1.2+3.snapshot')]),
]),
]),
]).create();
diff --git a/test/cache/detect_deprecated_dir_test.dart b/test/cache/detect_deprecated_dir_test.dart
index 865c2d9..d82e043 100644
--- a/test/cache/detect_deprecated_dir_test.dart
+++ b/test/cache/detect_deprecated_dir_test.dart
@@ -8,27 +8,29 @@
import '../test_pub.dart';
void main() async {
- test('Detects and warns about old cache dir', skip: !Platform.isWindows,
- () async {
- await d.dir('APPDATA', [
- d.dir('Pub', [d.dir('Cache')]),
- ]).create();
- final server = await servePackages();
- server.serve('foo', '1.0.0');
- await d.appDir(dependencies: {'foo': '^1.0.0'}).create();
- await pubGet(
- warning: contains('Found a legacy Pub cache at'),
- environment: {'APPDATA': d.path('APPDATA')},
- );
- expect(
- File(p.join(sandbox, 'APPDATA', 'Pub', 'Cache', 'DEPRECATED.md'))
- .existsSync(),
- isTrue,
- );
- server.serve('foo', '2.0.0');
- await d.appDir(dependencies: {'foo': '^2.0.0'}).create();
- await pubGet(
- warning: isNot(contains('Found a legacy Pub cache')),
- );
- });
+ test(
+ 'Detects and warns about old cache dir',
+ skip: !Platform.isWindows,
+ () async {
+ await d.dir('APPDATA', [
+ d.dir('Pub', [d.dir('Cache')]),
+ ]).create();
+ final server = await servePackages();
+ server.serve('foo', '1.0.0');
+ await d.appDir(dependencies: {'foo': '^1.0.0'}).create();
+ await pubGet(
+ warning: contains('Found a legacy Pub cache at'),
+ environment: {'APPDATA': d.path('APPDATA')},
+ );
+ expect(
+ File(
+ p.join(sandbox, 'APPDATA', 'Pub', 'Cache', 'DEPRECATED.md'),
+ ).existsSync(),
+ isTrue,
+ );
+ server.serve('foo', '2.0.0');
+ await d.appDir(dependencies: {'foo': '^2.0.0'}).create();
+ await pubGet(warning: isNot(contains('Found a legacy Pub cache')));
+ },
+ );
}
diff --git a/test/cache/list_test.dart b/test/cache/list_test.dart
index 2dcb8c8..4f13565 100644
--- a/test/cache/list_test.dart
+++ b/test/cache/list_test.dart
@@ -24,13 +24,8 @@
]).create();
await runPub(
- args: [
- 'cache',
- 'list',
- ],
- outputJson: {
- 'packages': <String, Object>{},
- },
+ args: ['cache', 'list'],
+ outputJson: {'packages': <String, Object>{}},
);
});
diff --git a/test/cache/preload_test.dart b/test/cache/preload_test.dart
index 6e9f0ed..035b13a 100644
--- a/test/cache/preload_test.dart
+++ b/test/cache/preload_test.dart
@@ -42,12 +42,10 @@
await runPub(
args: ['cache', 'preload', archivePath1, archivePath2],
environment: {'_PUB_TEST_DEFAULT_HOSTED_URL': server.url},
- output: allOf(
- [
- contains('Installed $archivePath1 in cache as foo 1.0.0.'),
- contains('Installed $archivePath2 in cache as foo 2.0.0.'),
- ],
- ),
+ output: allOf([
+ contains('Installed $archivePath1 in cache as foo 1.0.0.'),
+ contains('Installed $archivePath2 in cache as foo 2.0.0.'),
+ ]),
);
await d.cacheDir({'foo': '1.0.0'}).validate();
await d.cacheDir({'foo': '2.0.0'}).validate();
@@ -63,8 +61,7 @@
await pubGet(args: ['--offline']);
});
- test(
- 'installs package according to PUB_HOSTED_URL '
+ test('installs package according to PUB_HOSTED_URL '
'even on non-official server', () async {
final server = await servePackages();
server.serve('foo', '1.0.0');
@@ -105,8 +102,9 @@
await runPub(
args: ['cache', 'preload', archivePath],
environment: {'_PUB_TEST_DEFAULT_HOSTED_URL': server.url},
- output:
- allOf([contains('Installed $archivePath in cache as foo 1.0.0.')]),
+ output: allOf([
+ contains('Installed $archivePath in cache as foo 1.0.0.'),
+ ]),
);
server.serve('foo', '1.0.0', contents: [file('new-file.txt')]);
@@ -126,8 +124,9 @@
await runPub(
args: ['cache', 'preload', archivePath],
environment: {'_PUB_TEST_DEFAULT_HOSTED_URL': server.url},
- output:
- allOf([contains('Installed $archivePath in cache as foo 1.0.0.')]),
+ output: allOf([
+ contains('Installed $archivePath in cache as foo 1.0.0.'),
+ ]),
);
await hostedCache([
dir('foo-1.0.0', [file('new-file.txt'), nothing('old-file.txt')]),
@@ -148,8 +147,9 @@
File(archivePath).writeAsBytesSync('garbage'.codeUnits);
await runPub(
args: ['cache', 'preload', archivePath],
- error:
- contains('Failed to extract `$archivePath`: Filter error, bad data.'),
+ error: contains(
+ 'Failed to extract `$archivePath`: Filter error, bad data.',
+ ),
exitCode: DATA,
);
});
@@ -176,9 +176,9 @@
final archivePath = p.join(sandbox, 'archive');
File(archivePath).writeAsBytesSync(
- await tarFromDescriptors([d.file('pubspec.yaml', '{}')])
- .expand((x) => x)
- .toList(),
+ await tarFromDescriptors([
+ d.file('pubspec.yaml', '{}'),
+ ]).expand((x) => x).toList(),
);
await runPub(
diff --git a/test/cache/repair/git_test.dart b/test/cache/repair/git_test.dart
index cadfcf8..0e896be 100644
--- a/test/cache/repair/git_test.dart
+++ b/test/cache/repair/git_test.dart
@@ -14,22 +14,24 @@
group('root-level packages', () {
setUp(() async {
// Create two cached revisions of foo.
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .create();
await pubGet();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.1')],
- ).commit();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.1'),
+ ]).commit();
await pubUpgrade();
});
@@ -37,9 +39,10 @@
test('reinstalls previously cached git packages', () async {
// Find the cached foo packages for each revision.
final gitCacheDir = p.join(d.sandbox, cachePath, 'git');
- final fooDirs = listDir(gitCacheDir)
- .where((dir) => p.basename(dir).startsWith('foo-'))
- .toList();
+ final fooDirs =
+ listDir(
+ gitCacheDir,
+ ).where((dir) => p.basename(dir).startsWith('foo-')).toList();
// Delete "foo.dart" from them.
for (var dir in fooDirs) {
@@ -56,21 +59,23 @@
);
// The missing libraries should have been replaced.
- final fooLibs = fooDirs.map((dir) {
- final fooDirName = p.basename(dir);
- return d.dir(fooDirName, [
- d.dir('lib', [d.file('foo.dart', 'main() => "foo";')]),
- ]);
- }).toList();
+ final fooLibs =
+ fooDirs.map((dir) {
+ final fooDirName = p.basename(dir);
+ return d.dir(fooDirName, [
+ d.dir('lib', [d.file('foo.dart', 'main() => "foo";')]),
+ ]);
+ }).toList();
await d.dir(cachePath, [d.dir('git', fooLibs)]).validate();
});
test('deletes packages without pubspecs', () async {
final gitCacheDir = p.join(d.sandbox, cachePath, 'git');
- final fooDirs = listDir(gitCacheDir)
- .where((dir) => p.basename(dir).startsWith('foo-'))
- .toList();
+ final fooDirs =
+ listDir(
+ gitCacheDir,
+ ).where((dir) => p.basename(dir).startsWith('foo-')).toList();
for (var dir in fooDirs) {
deleteEntry(p.join(dir, 'pubspec.yaml'));
@@ -98,9 +103,10 @@
test('deletes packages with invalid pubspecs', () async {
final gitCacheDir = p.join(d.sandbox, cachePath, 'git');
- final fooDirs = listDir(gitCacheDir)
- .where((dir) => p.basename(dir).startsWith('foo-'))
- .toList();
+ final fooDirs =
+ listDir(
+ gitCacheDir,
+ ).where((dir) => p.basename(dir).startsWith('foo-')).toList();
for (var dir in fooDirs) {
writeTextFile(p.join(dir, 'pubspec.yaml'), '{');
@@ -134,13 +140,15 @@
d.dir('subdir', [d.libDir('sub'), d.libPubspec('sub', '1.0.0')]),
]).create();
- await d.appDir(
- dependencies: {
- 'sub': {
- 'git': {'url': '../foo.git', 'path': 'subdir'},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'sub': {
+ 'git': {'url': '../foo.git', 'path': 'subdir'},
+ },
+ },
+ )
+ .create();
await pubGet();
await d.git('foo.git', [
@@ -153,9 +161,10 @@
test('reinstalls previously cached git packages', () async {
// Find the cached foo packages for each revision.
final gitCacheDir = p.join(d.sandbox, cachePath, 'git');
- final fooDirs = listDir(gitCacheDir)
- .where((dir) => p.basename(dir).startsWith('foo-'))
- .toList();
+ final fooDirs =
+ listDir(
+ gitCacheDir,
+ ).where((dir) => p.basename(dir).startsWith('foo-')).toList();
// Delete "sub.dart" from them.
for (var dir in fooDirs) {
@@ -172,23 +181,25 @@
);
// The missing libraries should have been replaced.
- final fooLibs = fooDirs.map((dir) {
- final fooDirName = p.basename(dir);
- return d.dir(fooDirName, [
- d.dir('subdir', [
- d.dir('lib', [d.file('sub.dart', 'main() => "sub";')]),
- ]),
- ]);
- }).toList();
+ final fooLibs =
+ fooDirs.map((dir) {
+ final fooDirName = p.basename(dir);
+ return d.dir(fooDirName, [
+ d.dir('subdir', [
+ d.dir('lib', [d.file('sub.dart', 'main() => "sub";')]),
+ ]),
+ ]);
+ }).toList();
await d.dir(cachePath, [d.dir('git', fooLibs)]).validate();
});
test('deletes packages without pubspecs', () async {
final gitCacheDir = p.join(d.sandbox, cachePath, 'git');
- final fooDirs = listDir(gitCacheDir)
- .where((dir) => p.basename(dir).startsWith('foo-'))
- .toList();
+ final fooDirs =
+ listDir(
+ gitCacheDir,
+ ).where((dir) => p.basename(dir).startsWith('foo-')).toList();
for (var dir in fooDirs) {
deleteEntry(p.join(dir, 'subdir', 'pubspec.yaml'));
diff --git a/test/cache/repair/handles_corrupted_global_lockfile_test.dart b/test/cache/repair/handles_corrupted_global_lockfile_test.dart
index 275eec5..fe684c4 100644
--- a/test/cache/repair/handles_corrupted_global_lockfile_test.dart
+++ b/test/cache/repair/handles_corrupted_global_lockfile_test.dart
@@ -17,8 +17,10 @@
await runPub(
args: ['cache', 'repair'],
error: contains('Failed to reactivate foo:'),
- output: contains('Failed to reactivate 1 package:\n'
- '- foo'),
+ output: contains(
+ 'Failed to reactivate 1 package:\n'
+ '- foo',
+ ),
exitCode: exit_codes.UNAVAILABLE,
);
});
diff --git a/test/cache/repair/handles_failure_test.dart b/test/cache/repair/handles_failure_test.dart
index 14847ee..d00119d 100644
--- a/test/cache/repair/handles_failure_test.dart
+++ b/test/cache/repair/handles_failure_test.dart
@@ -11,26 +11,27 @@
void main() {
test('handles failure to reinstall some packages', () async {
// Only serve two packages so repairing will have a failure.
- final server = await servePackages()
- ..serve('foo', '1.2.3')
- ..serve('foo', '1.2.5');
+ final server =
+ await servePackages()
+ ..serve('foo', '1.2.3')
+ ..serve('foo', '1.2.5');
// Set up a cache with some packages.
await d.dir(cachePath, [
d.dir('hosted', [
d.dir('localhost%58${server.port}', [
- d.dir(
- 'foo-1.2.3',
- [d.libPubspec('foo', '1.2.3'), d.file('broken.txt')],
- ),
- d.dir(
- 'foo-1.2.4',
- [d.libPubspec('foo', '1.2.4'), d.file('broken.txt')],
- ),
- d.dir(
- 'foo-1.2.5',
- [d.libPubspec('foo', '1.2.5'), d.file('broken.txt')],
- ),
+ d.dir('foo-1.2.3', [
+ d.libPubspec('foo', '1.2.3'),
+ d.file('broken.txt'),
+ ]),
+ d.dir('foo-1.2.4', [
+ d.libPubspec('foo', '1.2.4'),
+ d.file('broken.txt'),
+ ]),
+ d.dir('foo-1.2.5', [
+ d.libPubspec('foo', '1.2.5'),
+ d.file('broken.txt'),
+ ]),
]),
]),
]).create();
@@ -41,8 +42,10 @@
expect(pub.stderr, emits(startsWith('Failed to repair foo 1.2.4. Error:')));
expect(
pub.stderr,
- emits('Package not available '
- '(Package foo has no version 1.2.4).'),
+ emits(
+ 'Package not available '
+ '(Package foo has no version 1.2.4).',
+ ),
);
expect(pub.stdout, emits('Reinstalled 2 packages.'));
diff --git a/test/cache/repair/hosted.dart b/test/cache/repair/hosted.dart
index 3a0fd00..8e5781b 100644
--- a/test/cache/repair/hosted.dart
+++ b/test/cache/repair/hosted.dart
@@ -25,18 +25,18 @@
await d.dir(cachePath, [
d.dir('hosted', [
d.dir('localhost%58${globalServer.port}', [
- d.dir(
- 'foo-1.2.3',
- [d.libPubspec('foo', '1.2.3'), d.file('broken.txt')],
- ),
- d.dir(
- 'foo-1.2.5',
- [d.libPubspec('foo', '1.2.5'), d.file('broken.txt')],
- ),
- d.dir(
- 'bar-1.2.4',
- [d.libPubspec('bar', '1.2.4'), d.file('broken.txt')],
- ),
+ d.dir('foo-1.2.3', [
+ d.libPubspec('foo', '1.2.3'),
+ d.file('broken.txt'),
+ ]),
+ d.dir('foo-1.2.5', [
+ d.libPubspec('foo', '1.2.5'),
+ d.file('broken.txt'),
+ ]),
+ d.dir('bar-1.2.4', [
+ d.libPubspec('bar', '1.2.4'),
+ d.file('broken.txt'),
+ ]),
]),
]),
]).create();
diff --git a/test/cache/repair/recompiles_snapshots_test.dart b/test/cache/repair/recompiles_snapshots_test.dart
index 167675c..c6ac04b 100644
--- a/test/cache/repair/recompiles_snapshots_test.dart
+++ b/test/cache/repair/recompiles_snapshots_test.dart
@@ -21,10 +21,9 @@
await runPub(args: ['global', 'activate', 'foo']);
await d.dir(cachePath, [
- d.dir(
- 'global_packages/foo/bin',
- [d.file('script.dart.snapshot', 'junk')],
- ),
+ d.dir('global_packages/foo/bin', [
+ d.file('script.dart.snapshot', 'junk'),
+ ]),
]).create();
await runPub(
diff --git a/test/cache/repair/updates_binstubs_test.dart b/test/cache/repair/updates_binstubs_test.dart
index 0de080c..74fef30 100644
--- a/test/cache/repair/updates_binstubs_test.dart
+++ b/test/cache/repair/updates_binstubs_test.dart
@@ -27,10 +27,9 @@
'executables': {'foo-script': 'script'},
},
contents: [
- d.dir(
- 'bin',
- [d.file('script.dart', "main(args) => print('ok \$args');")],
- ),
+ d.dir('bin', [
+ d.file('script.dart', "main(args) => print('ok \$args');"),
+ ]),
],
);
diff --git a/test/content_hash_test.dart b/test/content_hash_test.dart
index ca14a52..5ec3d7f 100644
--- a/test/content_hash_test.dart
+++ b/test/content_hash_test.dart
@@ -14,26 +14,29 @@
import 'test_pub.dart';
Future<void> main() async {
- test('archive_sha256 is stored in lockfile and cache upon download',
- () async {
- final server = await servePackages();
- server.serve('foo', '1.0.0');
- server.serveContentHashes = true;
- await appDir(dependencies: {'foo': 'any'}).create();
- await pubGet();
- final lockfile = loadYaml(
- File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync(),
- );
- final sha256 =
- dig<String>(lockfile, ['packages', 'foo', 'description', 'sha256']);
- expect(sha256, hasLength(64));
- await hostedHashesCache([
- file('foo-1.0.0.sha256', sha256),
- ]).validate();
- });
-
test(
- 'archive_sha256 is stored in lockfile upon download on legacy server '
+ 'archive_sha256 is stored in lockfile and cache upon download',
+ () async {
+ final server = await servePackages();
+ server.serve('foo', '1.0.0');
+ server.serveContentHashes = true;
+ await appDir(dependencies: {'foo': 'any'}).create();
+ await pubGet();
+ final lockfile = loadYaml(
+ File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync(),
+ );
+ final sha256 = dig<String>(lockfile, [
+ 'packages',
+ 'foo',
+ 'description',
+ 'sha256',
+ ]);
+ expect(sha256, hasLength(64));
+ await hostedHashesCache([file('foo-1.0.0.sha256', sha256)]).validate();
+ },
+ );
+
+ test('archive_sha256 is stored in lockfile upon download on legacy server '
'without content hashes', () async {
final server = await servePackages();
server.serveContentHashes = false;
@@ -43,12 +46,14 @@
final lockfile = loadYaml(
File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync(),
);
- final sha256 =
- dig<String>(lockfile, ['packages', 'foo', 'description', 'sha256']);
+ final sha256 = dig<String>(lockfile, [
+ 'packages',
+ 'foo',
+ 'description',
+ 'sha256',
+ ]);
expect(sha256, hasLength(64));
- await hostedHashesCache([
- file('foo-1.0.0.sha256', sha256),
- ]).validate();
+ await hostedHashesCache([file('foo-1.0.0.sha256', sha256)]).validate();
});
test('archive_sha256 is checked on download', () async {
@@ -63,176 +68,202 @@
await pubGet(
exitCode: exit_codes.TEMP_FAIL,
silent: contains('Attempt #2'),
- error:
- contains('Downloaded archive for foo-1.0.0 had wrong content-hash.'),
- environment: {
- 'PUB_MAX_HTTP_RETRIES': '2',
- },
+ error: contains(
+ 'Downloaded archive for foo-1.0.0 had wrong content-hash.',
+ ),
+ environment: {'PUB_MAX_HTTP_RETRIES': '2'},
);
});
- test('If content is updated on server we warn and update the lockfile',
- () async {
- final server = await servePackages();
- server.serveContentHashes = true;
- server.serve('foo', '1.0.0');
- await appDir(dependencies: {'foo': 'any'}).create();
- await pubGet();
- server.serve(
- 'foo',
- '1.0.0',
- contents: [file('new_file.txt', 'This file could be malicious.')],
- );
- // Pub get will not revisit the file-listing if everything resolves, and
- // only compare with a cached value.
- await pubGet();
- // Deleting the version-listing cache will cause it to be refetched, and the
- // warning will happen.
- File(p.join(globalServer.cachingPath, '.cache', 'foo-versions.json'))
- .deleteSync();
- await pubGet(
- warning: allOf(
- contains('Cached version of foo-1.0.0 has wrong hash - redownloading.'),
- contains(
- 'The existing content-hash '
- 'from pubspec.lock doesn\'t match contents for:',
+ test(
+ 'If content is updated on server we warn and update the lockfile',
+ () async {
+ final server = await servePackages();
+ server.serveContentHashes = true;
+ server.serve('foo', '1.0.0');
+ await appDir(dependencies: {'foo': 'any'}).create();
+ await pubGet();
+ server.serve(
+ 'foo',
+ '1.0.0',
+ contents: [file('new_file.txt', 'This file could be malicious.')],
+ );
+ // Pub get will not revisit the file-listing if everything resolves, and
+ // only compare with a cached value.
+ await pubGet();
+ // Deleting the version-listing cache will cause it to be refetched, and
+ // the warning will happen.
+ File(
+ p.join(globalServer.cachingPath, '.cache', 'foo-versions.json'),
+ ).deleteSync();
+ await pubGet(
+ warning: allOf(
+ contains(
+ 'Cached version of foo-1.0.0 has wrong hash - redownloading.',
+ ),
+ contains(
+ 'The existing content-hash '
+ 'from pubspec.lock doesn\'t match contents for:',
+ ),
+ contains('* foo-1.0.0 from "${server.url}"\n'),
),
- contains('* foo-1.0.0 from "${server.url}"\n'),
- ),
- exitCode: exit_codes.SUCCESS,
- );
- final lockfile = loadYaml(
- File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync(),
- );
- final newHash =
- dig<String>(lockfile, ['packages', 'foo', 'description', 'sha256']);
- expect(newHash, await server.peekArchiveSha256('foo', '1.0.0'));
- });
+ exitCode: exit_codes.SUCCESS,
+ );
+ final lockfile = loadYaml(
+ File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync(),
+ );
+ final newHash = dig<String>(lockfile, [
+ 'packages',
+ 'foo',
+ 'description',
+ 'sha256',
+ ]);
+ expect(newHash, await server.peekArchiveSha256('foo', '1.0.0'));
+ },
+ );
test(
- 'If content is updated on legacy server, '
- 'and the download needs refreshing we warn and update the lockfile',
- () async {
- final server = await servePackages();
- server.serveContentHashes = false;
- server.serve('foo', '1.0.0');
- await appDir(dependencies: {'foo': 'any'}).create();
- await pubGet();
- server.serve(
- 'foo',
- '1.0.0',
- contents: [file('new_file.txt', 'This file could be malicious.')],
- );
- // Deleting the hash-file cache will cause it to be refetched, and the
- // warning will happen.
- File(p.join(globalServer.hashesCachingPath, 'foo-1.0.0.sha256'))
- .deleteSync();
+ 'If content is updated on legacy server, '
+ 'and the download needs refreshing we warn and update the lockfile',
+ () async {
+ final server = await servePackages();
+ server.serveContentHashes = false;
+ server.serve('foo', '1.0.0');
+ await appDir(dependencies: {'foo': 'any'}).create();
+ await pubGet();
+ server.serve(
+ 'foo',
+ '1.0.0',
+ contents: [file('new_file.txt', 'This file could be malicious.')],
+ );
+ // Deleting the hash-file cache will cause it to be refetched, and the
+ // warning will happen.
+ File(
+ p.join(globalServer.hashesCachingPath, 'foo-1.0.0.sha256'),
+ ).deleteSync();
- await pubGet(
- warning: allOf([
- contains(
- 'The existing content-hash from pubspec.lock '
- 'doesn\'t match contents for:',
+ await pubGet(
+ warning: allOf([
+ contains(
+ 'The existing content-hash from pubspec.lock '
+ 'doesn\'t match contents for:',
+ ),
+ contains('* foo-1.0.0 from "${globalServer.url}"'),
+ ]),
+ exitCode: exit_codes.SUCCESS,
+ );
+ final lockfile = loadYaml(
+ File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync(),
+ );
+ final newHash = dig<String>(lockfile, [
+ 'packages',
+ 'foo',
+ 'description',
+ 'sha256',
+ ]);
+ expect(newHash, await server.peekArchiveSha256('foo', '1.0.0'));
+ },
+ );
+
+ test(
+ 'sha256 in cache is checked on pub get '
+ '- warning and redownload on legacy server without content-hashes',
+ () async {
+ final server = await servePackages();
+ server.serveContentHashes = false;
+ server.serve('foo', '1.0.0');
+ await appDir(dependencies: {'foo': 'any'}).create();
+ await pubGet();
+ final lockfile = loadYaml(
+ File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync(),
+ );
+ final originalHash = dig<String>(lockfile, [
+ 'packages',
+ 'foo',
+ 'description',
+ 'sha256',
+ ]);
+ // Create wrong hash on disk.
+ await hostedHashesCache([
+ file(
+ 'foo-1.0.0.sha256',
+ 'e7a7a0f6d9873e4c40cf68cc3cc9ca5b6c8cef6a2220241bdada4b9cb0083279',
),
- contains('* foo-1.0.0 from "${globalServer.url}"'),
- ]),
- exitCode: exit_codes.SUCCESS,
- );
- final lockfile = loadYaml(
- File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync(),
- );
- final newHash =
- dig<String>(lockfile, ['packages', 'foo', 'description', 'sha256']);
- expect(newHash, await server.peekArchiveSha256('foo', '1.0.0'));
- });
+ ]).create();
+
+ await pubGet(
+ warning: 'Cached version of foo-1.0.0 has wrong hash - redownloading.',
+ );
+ await hostedHashesCache([
+ file('foo-1.0.0.sha256', originalHash),
+ ]).validate();
+ },
+ );
test(
- 'sha256 in cache is checked on pub get '
- '- warning and redownload on legacy server without content-hashes',
- () async {
- final server = await servePackages();
- server.serveContentHashes = false;
- server.serve('foo', '1.0.0');
- await appDir(dependencies: {'foo': 'any'}).create();
- await pubGet();
- final lockfile = loadYaml(
- File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync(),
- );
- final originalHash =
- dig<String>(lockfile, ['packages', 'foo', 'description', 'sha256']);
- // Create wrong hash on disk.
- await hostedHashesCache([
- file(
- 'foo-1.0.0.sha256',
- 'e7a7a0f6d9873e4c40cf68cc3cc9ca5b6c8cef6a2220241bdada4b9cb0083279',
- ),
- ]).create();
+ 'sha256 in cache is checked on pub get - warning and redownload',
+ () async {
+ final server = await servePackages();
+ server.serveContentHashes = true;
+ server.serve('foo', '1.0.0');
+ await appDir(dependencies: {'foo': 'any'}).create();
+ await pubGet();
+ final lockfile = loadYaml(
+ File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync(),
+ );
+ final originalHash = dig<String>(lockfile, [
+ 'packages',
+ 'foo',
+ 'description',
+ 'sha256',
+ ]);
+ await hostedHashesCache([
+ file(
+ 'foo-1.0.0.sha256',
+ 'e7a7a0f6d9873e4c40cf68cc3cc9ca5b6c8cef6a2220241bdada4b9cb0083279',
+ ),
+ ]).create();
- await pubGet(
- warning: 'Cached version of foo-1.0.0 has wrong hash - redownloading.',
- );
- await hostedHashesCache([
- file('foo-1.0.0.sha256', originalHash),
- ]).validate();
- });
-
- test('sha256 in cache is checked on pub get - warning and redownload',
- () async {
- final server = await servePackages();
- server.serveContentHashes = true;
- server.serve('foo', '1.0.0');
- await appDir(dependencies: {'foo': 'any'}).create();
- await pubGet();
- final lockfile = loadYaml(
- File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync(),
- );
- final originalHash =
- dig<String>(lockfile, ['packages', 'foo', 'description', 'sha256']);
- await hostedHashesCache([
- file(
- 'foo-1.0.0.sha256',
- 'e7a7a0f6d9873e4c40cf68cc3cc9ca5b6c8cef6a2220241bdada4b9cb0083279',
- ),
- ]).create();
-
- await pubGet(
- warning: 'Cached version of foo-1.0.0 has wrong hash - redownloading.',
- );
- await hostedHashesCache([
- file('foo-1.0.0.sha256', originalHash),
- ]).validate();
- });
+ await pubGet(
+ warning: 'Cached version of foo-1.0.0 has wrong hash - redownloading.',
+ );
+ await hostedHashesCache([
+ file('foo-1.0.0.sha256', originalHash),
+ ]).validate();
+ },
+ );
test(
- 'Legacy lockfile without content-hashes is updated '
- 'with the hash on pub get on legacy server without content-hashes',
- () async {
- final server = await servePackages();
- server.serve('foo', '1.0.0');
- server.serveContentHashes = false;
- await appDir(dependencies: {'foo': 'any'}).create();
- await pubGet();
- // Pretend we had no hash in the lockfile.
- final lockfile = YamlEditor(
- File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync(),
- );
- final originalContentHash = lockfile
- .remove(['packages', 'foo', 'description', 'sha256']).value as String;
- File(p.join(sandbox, appPath, 'pubspec.lock')).writeAsStringSync(
- lockfile.toString(),
- );
- await pubGet();
- final lockfile2 = YamlEditor(
- File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync(),
- );
- expect(
- lockfile2.parseAt(['packages', 'foo', 'description', 'sha256']).value,
- originalContentHash,
- );
- });
+ 'Legacy lockfile without content-hashes is updated '
+ 'with the hash on pub get on legacy server without content-hashes',
+ () async {
+ final server = await servePackages();
+ server.serve('foo', '1.0.0');
+ server.serveContentHashes = false;
+ await appDir(dependencies: {'foo': 'any'}).create();
+ await pubGet();
+ // Pretend we had no hash in the lockfile.
+ final lockfile = YamlEditor(
+ File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync(),
+ );
+ final originalContentHash =
+ lockfile.remove(['packages', 'foo', 'description', 'sha256']).value
+ as String;
+ File(
+ p.join(sandbox, appPath, 'pubspec.lock'),
+ ).writeAsStringSync(lockfile.toString());
+ await pubGet();
+ final lockfile2 = YamlEditor(
+ File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync(),
+ );
+ expect(
+ lockfile2.parseAt(['packages', 'foo', 'description', 'sha256']).value,
+ originalContentHash,
+ );
+ },
+ );
- test(
- 'Legacy lockfile without content-hashes '
+ test('Legacy lockfile without content-hashes '
'is updated with the hash on pub get', () async {
final server = await servePackages();
server.serve('foo', '1.0.0');
@@ -243,11 +274,12 @@
final lockfile = YamlEditor(
File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync(),
);
- final originalContentHash = lockfile
- .remove(['packages', 'foo', 'description', 'sha256']).value as String;
- File(p.join(sandbox, appPath, 'pubspec.lock')).writeAsStringSync(
- lockfile.toString(),
- );
+ final originalContentHash =
+ lockfile.remove(['packages', 'foo', 'description', 'sha256']).value
+ as String;
+ File(
+ p.join(sandbox, appPath, 'pubspec.lock'),
+ ).writeAsStringSync(lockfile.toString());
await pubGet();
final lockfile2 = YamlEditor(
File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync(),
@@ -267,14 +299,13 @@
final lockfile = loadYaml(
File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync(),
);
- final originalHash =
- dig<String>(lockfile, ['packages', 'foo', 'description', 'sha256']);
- await hostedHashesCache([
- file(
- 'foo-1.0.0.sha256',
- 'e',
- ),
- ]).create();
+ final originalHash = dig<String>(lockfile, [
+ 'packages',
+ 'foo',
+ 'description',
+ 'sha256',
+ ]);
+ await hostedHashesCache([file('foo-1.0.0.sha256', 'e')]).create();
await pubGet(
warning: 'Cached version of foo-1.0.0 has wrong hash - redownloading.',
diff --git a/test/dart3_sdk_constraint_hack_test.dart b/test/dart3_sdk_constraint_hack_test.dart
index 993fea9..2555109 100644
--- a/test/dart3_sdk_constraint_hack_test.dart
+++ b/test/dart3_sdk_constraint_hack_test.dart
@@ -120,8 +120,7 @@
);
});
- test(
- 'The bound of ">=2.12.0 <3.0.0" is not '
+ test('The bound of ">=2.12.0 <3.0.0" is not '
'compatible with prereleases of dart 4', () async {
await d.dir(appPath, [
d.pubspec({
@@ -139,32 +138,36 @@
);
});
- test('When the constraint is not rewritten, a helpful hint is given',
- () async {
- await d.appDir(
- dependencies: {'foo': 'any'},
- pubspec: {
- 'environment': {'sdk': '^2.12.0'},
- },
- ).create();
- final server = await servePackages();
+ test(
+ 'When the constraint is not rewritten, a helpful hint is given',
+ () async {
+ await d
+ .appDir(
+ dependencies: {'foo': 'any'},
+ pubspec: {
+ 'environment': {'sdk': '^2.12.0'},
+ },
+ )
+ .create();
+ final server = await servePackages();
- // foo is not null safe.
- server.serve(
- 'foo',
- '1.0.0',
- pubspec: {
- 'environment': {'sdk': '>=2.10.0 <3.0.0'},
- },
- );
- await pubGet(
- error: contains(
- 'The lower bound of "sdk: \'>=2.10.0 <3.0.0\'" '
- 'must be 2.12.0 or higher to enable null safety.'
- '\nFor details, see https://dart.dev/null-safety',
- ),
- );
- });
+ // foo is not null safe.
+ server.serve(
+ 'foo',
+ '1.0.0',
+ pubspec: {
+ 'environment': {'sdk': '>=2.10.0 <3.0.0'},
+ },
+ );
+ await pubGet(
+ error: contains(
+ 'The lower bound of "sdk: \'>=2.10.0 <3.0.0\'" '
+ 'must be 2.12.0 or higher to enable null safety.'
+ '\nFor details, see https://dart.dev/null-safety',
+ ),
+ );
+ },
+ );
test('Rewrite only happens after Dart 3', () async {
await d.dir(appPath, [
diff --git a/test/dependency_override_test.dart b/test/dependency_override_test.dart
index 7c3d8e1..43578e5 100644
--- a/test/dependency_override_test.dart
+++ b/test/dependency_override_test.dart
@@ -107,8 +107,10 @@
..serve('foo', '1.0.0')
..serve('bar', '1.0.0');
- await d
- .dir('baz', [d.libDir('baz'), d.libPubspec('baz', '0.0.1')]).create();
+ await d.dir('baz', [
+ d.libDir('baz'),
+ d.libPubspec('baz', '0.0.1'),
+ ]).create();
await d.dir(appPath, [
d.pubspec({
diff --git a/test/dependency_services/dependency_services_test.dart b/test/dependency_services/dependency_services_test.dart
index f7339ec..51f2962 100644
--- a/test/dependency_services/dependency_services_test.dart
+++ b/test/dependency_services/dependency_services_test.dart
@@ -54,12 +54,7 @@
buffer.writeln('## Section ${args.join(' ')}');
final process = await Process.start(
Platform.resolvedExecutable,
- [
- '--enable-asserts',
- snapshot,
- '--verbose',
- ...args,
- ],
+ ['--enable-asserts', snapshot, '--verbose', ...args],
environment: {
...getPubTestEnvironment(),
'_PUB_TEST_DEFAULT_HOSTED_URL': globalServer.url,
@@ -106,14 +101,13 @@
}) async {
manifestAndLockfile(context, workspace);
await context.runDependencyServices(['list'], environment: environment);
- final report =
- await context.runDependencyServices(['report'], environment: environment);
+ final report = await context.runDependencyServices([
+ 'report',
+ ], environment: environment);
if (reportAssertions != null) {
reportAssertions(json.decode(report) as Map);
}
- final input = json.encode({
- 'dependencyChanges': upgrades,
- });
+ final input = json.encode({'dependencyChanges': upgrades});
await context.runDependencyServices(
['apply'],
@@ -166,18 +160,17 @@
});
testWithGolden('Removing transitive', (context) async {
- final server = (await servePackages())
- ..serve('foo', '1.2.3', deps: {'transitive': '^1.0.0'})
- ..serve('foo', '2.2.3')
- ..serve('transitive', '1.0.0')
- ..serveContentHashes = true;
+ final server =
+ (await servePackages())
+ ..serve('foo', '1.2.3', deps: {'transitive': '^1.0.0'})
+ ..serve('foo', '2.2.3')
+ ..serve('transitive', '1.0.0')
+ ..serveContentHashes = true;
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0'},
}),
]).create();
await pubGet();
@@ -186,14 +179,8 @@
context,
[_PackageVersion('foo', '2.2.3'), _PackageVersion('transitive', null)],
reportAssertions: (report) {
- expect(
- findChangeVersion(report, 'singleBreaking', 'foo'),
- '2.2.3',
- );
- expect(
- findChangeVersion(report, 'singleBreaking', 'transitive'),
- null,
- );
+ expect(findChangeVersion(report, 'singleBreaking', 'foo'), '2.2.3');
+ expect(findChangeVersion(report, 'singleBreaking', 'transitive'), null);
},
);
});
@@ -205,9 +192,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0'},
}),
]).create();
await pubGet();
@@ -223,20 +208,18 @@
'transitive': ['1.0.0'],
},
resultAssertions: (report) {
- expect(
- findChangeVersion(report, 'compatible', 'foo'),
- '1.0.1',
- );
+ expect(findChangeVersion(report, 'compatible', 'foo'), '1.0.1');
},
);
});
testWithGolden('No pubspec.lock', (context) async {
- final server = (await servePackages())
- ..serve('foo', '1.2.3', deps: {'transitive': '^1.0.0'})
- ..serve('foo', '2.2.3')
- ..serve('transitive', '1.0.0')
- ..serveContentHashes = true;
+ final server =
+ (await servePackages())
+ ..serve('foo', '1.2.3', deps: {'transitive': '^1.0.0'})
+ ..serve('foo', '2.2.3')
+ ..serve('transitive', '1.0.0')
+ ..serveContentHashes = true;
await d.git('bar.git', [d.libPubspec('bar', '1.0.0')]).create();
@@ -253,32 +236,26 @@
]).create();
server.dontAllowDownloads();
- await _listReportApply(
- context,
- [
- _PackageVersion('foo', '2.2.3'),
- _PackageVersion('transitive', null),
- ],
- );
+ await _listReportApply(context, [
+ _PackageVersion('foo', '2.2.3'),
+ _PackageVersion('transitive', null),
+ ]);
});
testWithGolden('Compatible', (context) async {
- final server = (await servePackages())
- ..serve('foo', '1.2.3')
- ..serve('foo', '2.2.3')
- ..serve('bar', '1.2.3')
- ..serve('bar', '2.2.3')
- ..serve('boo', '1.2.3')
- ..serveContentHashes = true;
+ final server =
+ (await servePackages())
+ ..serve('foo', '1.2.3')
+ ..serve('foo', '2.2.3')
+ ..serve('bar', '1.2.3')
+ ..serve('bar', '2.2.3')
+ ..serve('boo', '1.2.3')
+ ..serveContentHashes = true;
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- 'bar': '^1.0.0',
- 'boo': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0', 'bar': '^1.0.0', 'boo': '^1.0.0'},
}),
]).create();
await pubGet();
@@ -289,42 +266,32 @@
await _listReportApply(
context,
- [
- _PackageVersion('foo', '1.2.4'),
- ],
+ [_PackageVersion('foo', '1.2.4')],
reportAssertions: (report) {
- expect(
- findChangeVersion(report, 'compatible', 'foo'),
- '1.2.4',
- );
+ expect(findChangeVersion(report, 'compatible', 'foo'), '1.2.4');
},
);
});
testWithGolden('Preserves no content-hashes', (context) async {
- final server = (await servePackages())
- ..serve('foo', '1.2.3')
- ..serve('foo', '2.2.3')
- ..serve('bar', '1.2.3')
- ..serve('bar', '2.2.3')
- ..serve('boo', '1.2.3')
- ..serveContentHashes = true;
+ final server =
+ (await servePackages())
+ ..serve('foo', '1.2.3')
+ ..serve('foo', '2.2.3')
+ ..serve('bar', '1.2.3')
+ ..serve('bar', '2.2.3')
+ ..serve('boo', '1.2.3')
+ ..serveContentHashes = true;
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- 'bar': '^1.0.0',
- 'boo': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0', 'bar': '^1.0.0', 'boo': '^1.0.0'},
}),
]).create();
await pubGet();
final lockFile = File(path(p.join(appPath, 'pubspec.lock')));
- final lockFileYaml = YamlEditor(
- lockFile.readAsStringSync(),
- );
+ final lockFileYaml = YamlEditor(lockFile.readAsStringSync());
for (final p in (lockFileYaml.parseAt(['packages']) as YamlMap).entries) {
lockFileYaml.remove(['packages', p.key, 'description', 'sha256']);
}
@@ -335,63 +302,53 @@
server.dontAllowDownloads();
- await _listReportApply(context, [
- _PackageVersion('foo', '1.2.4'),
- ]);
+ await _listReportApply(context, [_PackageVersion('foo', '1.2.4')]);
});
testWithGolden('Preserves pub.dartlang.org as hosted url', (context) async {
- final server = (await servePackages())
- ..serve(r'foo', '1.2.3')
- ..serve(r'bar', '1.2.3')
- ..serveContentHashes = true;
+ final server =
+ (await servePackages())
+ ..serve(r'foo', '1.2.3')
+ ..serve(r'bar', '1.2.3')
+ ..serveContentHashes = true;
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- 'bar': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0', 'bar': '^1.0.0'},
}),
]).create();
await pubGet();
final lockFile = File(path(p.join(appPath, 'pubspec.lock')));
- final lockFileYaml = YamlEditor(
- lockFile.readAsStringSync(),
- );
+ final lockFileYaml = YamlEditor(lockFile.readAsStringSync());
for (final p in (lockFileYaml.parseAt(['packages']).value as Map).entries) {
- lockFileYaml.update(
- ['packages', p.key, 'description', 'url'],
- 'https://pub.dartlang.org',
- );
+ lockFileYaml.update([
+ 'packages',
+ p.key,
+ 'description',
+ 'url',
+ ], 'https://pub.dartlang.org');
}
lockFile.writeAsStringSync(lockFileYaml.toString());
server.serve('foo', '1.2.4');
server.serve('boo', '1.2.4');
- await _listReportApply(
- context,
- [
- _PackageVersion('foo', '1.2.4'),
- ],
- );
+ await _listReportApply(context, [_PackageVersion('foo', '1.2.4')]);
});
testWithGolden('Adding transitive', (context) async {
- final server = (await servePackages())
- ..serve('foo', '1.2.3')
- ..serve('foo', '2.2.3', deps: {'transitive': '^1.0.0'})
- ..serve('transitive', '1.0.0')
- ..serveContentHashes = true;
+ final server =
+ (await servePackages())
+ ..serve('foo', '1.2.3')
+ ..serve('foo', '2.2.3', deps: {'transitive': '^1.0.0'})
+ ..serve('transitive', '1.0.0')
+ ..serveContentHashes = true;
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0'},
}),
]).create();
await pubGet();
@@ -401,10 +358,7 @@
context,
[_PackageVersion('foo', '2.2.3'), _PackageVersion('transitive', '1.0.0')],
reportAssertions: (report) {
- expect(
- findChangeVersion(report, 'singleBreaking', 'foo'),
- '2.2.3',
- );
+ expect(findChangeVersion(report, 'singleBreaking', 'foo'), '2.2.3');
expect(
findChangeVersion(report, 'singleBreaking', 'transitive'),
'1.0.0',
@@ -414,11 +368,12 @@
});
testWithGolden('multibreaking', (context) async {
- final server = (await servePackages())
- ..serve('foo', '1.0.0')
- ..serve('bar', '1.0.0')
- ..serve('baz', '1.0.0')
- ..serveContentHashes = true;
+ final server =
+ (await servePackages())
+ ..serve('foo', '1.0.0')
+ ..serve('bar', '1.0.0')
+ ..serve('baz', '1.0.0')
+ ..serveContentHashes = true;
await d.dir(appPath, [
d.pubspec({
@@ -453,14 +408,8 @@
_PackageVersion('bar', '2.0.0'),
],
reportAssertions: (report) {
- expect(
- findChangeVersion(report, 'multiBreaking', 'foo'),
- '3.0.1',
- );
- expect(
- findChangeVersion(report, 'multiBreaking', 'bar'),
- '2.0.0',
- );
+ expect(findChangeVersion(report, 'multiBreaking', 'foo'), '3.0.1');
+ expect(findChangeVersion(report, 'multiBreaking', 'bar'), '2.0.0');
},
);
});
@@ -470,12 +419,14 @@
server.serve('foo', '1.0.0');
await d.dir('bar', [d.libPubspec('bar', '1.0.0')]).create();
- await d.appDir(
- dependencies: {
- 'foo': '^1.0.0',
- 'bar': {'path': '../bar'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': '^1.0.0',
+ 'bar': {'path': '../bar'},
+ },
+ )
+ .create();
await pubGet();
server.serve('foo', '2.0.0');
await _listReportApply(
@@ -488,10 +439,7 @@
),
],
reportAssertions: (report) {
- expect(
- findChangeVersion(report, 'multiBreaking', 'foo'),
- '2.0.0',
- );
+ expect(findChangeVersion(report, 'multiBreaking', 'foo'), '2.0.0');
},
);
});
@@ -501,18 +449,20 @@
await d.git('foo.git', [d.libPubspec('foo', '1.0.0')]).create();
await d.git('bar.git', [d.libPubspec('bar', '1.0.0')]).create();
- await d.appDir(
- dependencies: {
- 'foo': {
- 'git': {'url': '../foo.git'},
- },
- 'bar': {
- // A git dependency with a version constraint.
- 'git': {'url': '../bar.git'},
- 'version': '^1.0.0',
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'git': {'url': '../foo.git'},
+ },
+ 'bar': {
+ // A git dependency with a version constraint.
+ 'git': {'url': '../bar.git'},
+ 'version': '^1.0.0',
+ },
+ },
+ )
+ .create();
await pubGet();
final secondVersion = d.git('foo.git', [d.libPubspec('foo', '2.0.0')]);
await secondVersion.commit();
@@ -523,14 +473,9 @@
await _listReportApply(
context,
- [
- _PackageVersion('foo', newRef),
- ],
+ [_PackageVersion('foo', newRef)],
reportAssertions: (report) {
- expect(
- findChangeVersion(report, 'multiBreaking', 'foo'),
- newRef,
- );
+ expect(findChangeVersion(report, 'multiBreaking', 'foo'), newRef);
},
);
});
@@ -565,8 +510,9 @@
);
});
- testWithGolden('Smallest possible upgrade can upgrade beyond breaking',
- (context) async {
+ testWithGolden('Smallest possible upgrade can upgrade beyond breaking', (
+ context,
+ ) async {
final server = await servePackages();
server.serve('foo', '1.1.1'); // This version will be disallowed.
@@ -593,51 +539,52 @@
});
testWithGolden(
- 'Smallest possible upgrade can upgrade other packages if needed',
- (context) async {
- final server = await servePackages();
- server.serve('bar', '1.0.0');
- server.serve('bar', '2.0.0');
- server.serve('bar', '2.2.0');
+ 'Smallest possible upgrade can upgrade other packages if needed',
+ (context) async {
+ final server = await servePackages();
+ server.serve('bar', '1.0.0');
+ server.serve('bar', '2.0.0');
+ server.serve('bar', '2.2.0');
- server.serve(
- 'foo',
- '1.1.1',
- deps: {'bar': '^1.0.0'},
- ); // This version will be disallowed.
+ server.serve(
+ 'foo',
+ '1.1.1',
+ deps: {'bar': '^1.0.0'},
+ ); // This version will be disallowed.
- await d.appDir(dependencies: {'foo': '^1.0.0', 'bar': '^1.0.0'}).create();
- await pubGet();
+ await d.appDir(dependencies: {'foo': '^1.0.0', 'bar': '^1.0.0'}).create();
+ await pubGet();
- server.serve(
- 'foo',
- '2.0.0',
- deps: {'bar': '^2.0.0'},
- ); // This will also be disallowed, a minimal update should not find this.
- server.serve(
- 'foo',
- '2.0.1',
- deps: {'bar': '^2.0.0'},
- ); // We would like this to be the new version.
- server.serve(
- 'foo',
- '2.0.2',
- deps: {'bar': '^2.0.0'},
- ); // This version would not be a minimal update.
+ server.serve(
+ 'foo',
+ '2.0.0',
+ deps: {'bar': '^2.0.0'},
+ ); // This will also be disallowed, a minimal update should not find this.
+ server.serve(
+ 'foo',
+ '2.0.1',
+ deps: {'bar': '^2.0.0'},
+ ); // We would like this to be the new version.
+ server.serve(
+ 'foo',
+ '2.0.2',
+ deps: {'bar': '^2.0.0'},
+ ); // This version would not be a minimal update.
- await _reportWithForbidden(
- context,
- {
- 'foo': ['1.1.1', '2.0.0'],
- 'bar': ['2.0.0'],
- },
- targetPackage: 'foo',
- resultAssertions: (r) {
- expect(findChangeVersion(r, 'smallestUpdate', 'foo'), '2.0.1');
- expect(findChangeVersion(r, 'smallestUpdate', 'bar'), '2.2.0');
- },
- );
- });
+ await _reportWithForbidden(
+ context,
+ {
+ 'foo': ['1.1.1', '2.0.0'],
+ 'bar': ['2.0.0'],
+ },
+ targetPackage: 'foo',
+ resultAssertions: (r) {
+ expect(findChangeVersion(r, 'smallestUpdate', 'foo'), '2.0.1');
+ expect(findChangeVersion(r, 'smallestUpdate', 'bar'), '2.2.0');
+ },
+ );
+ },
+ );
testWithGolden('can upgrade workspaces', (context) async {
(await servePackages())
@@ -659,10 +606,7 @@
extras: {
'workspace': ['pkgs/a'],
},
- deps: {
- 'foo': '^1.0.0',
- 'bar': '^1.0.0',
- },
+ deps: {'foo': '^1.0.0', 'bar': '^1.0.0'},
sdk: '^3.5.0',
),
dir('pkgs', [
@@ -671,10 +615,7 @@
'a',
'1.1.1',
deps: {'bar': '>=1.2.0 <1.5.0', 'only_a': '^1.0.0'},
- devDeps: {
- 'foo': '^1.2.0',
- 'dev': '^1.0.0',
- },
+ devDeps: {'foo': '^1.2.0', 'dev': '^1.0.0'},
resolutionWorkspace: true,
),
]),
@@ -694,9 +635,7 @@
expect(
result.stderr,
- contains(
- 'Only apply dependency_services to the root of the workspace.',
- ),
+ contains('Only apply dependency_services to the root of the workspace.'),
);
expect(result.exitCode, 1);
@@ -713,10 +652,7 @@
],
workspace: ['.', p.join('pkgs', 'a')],
reportAssertions: (report) {
- expect(
- findChangeVersion(report, 'singleBreaking', 'foo'),
- '2.2.3',
- );
+ expect(findChangeVersion(report, 'singleBreaking', 'foo'), '2.2.3');
expect(
findChangeVersion(report, 'singleBreaking', 'transitive'),
'1.0.0',
@@ -728,10 +664,13 @@
}
String? findChangeVersion(dynamic json, String updateType, String name) {
- return dig<String?>(
- json,
- ['dependencies', ('name', 'foo'), updateType, ('name', name), 'version'],
- );
+ return dig<String?>(json, [
+ 'dependencies',
+ ('name', 'foo'),
+ updateType,
+ ('name', name),
+ 'version',
+ ]);
}
class _PackageVersion {
@@ -741,10 +680,10 @@
_PackageVersion(this.name, this.version, {this.constraint});
Map<String, Object?> toJson() => {
- 'name': name,
- 'version': version,
- if (constraint != null) 'constraint': constraint.toString(),
- };
+ 'name': name,
+ 'version': version,
+ if (constraint != null) 'constraint': constraint.toString(),
+ };
}
extension on PackageServer {
diff --git a/test/deps/executables_test.dart b/test/deps/executables_test.dart
index 86988e2..17b1fd9 100644
--- a/test/deps/executables_test.dart
+++ b/test/deps/executables_test.dart
@@ -34,10 +34,10 @@
testWithGolden('lists Dart executables, without entrypoints', (ctx) async {
await d.dir(appPath, [
d.appPubspec(),
- d.dir(
- 'bin',
- [d.file('foo.dart', _validMain), d.file('bar.dart', _invalidMain)],
- ),
+ d.dir('bin', [
+ d.file('foo.dart', _validMain),
+ d.file('bar.dart', _invalidMain),
+ ]),
]).create();
await ctx.runExecutablesTest();
@@ -72,8 +72,9 @@
await ctx.runExecutablesTest();
});
- testWithGolden('lists executables only from immediate dependencies',
- (ctx) async {
+ testWithGolden('lists executables only from immediate dependencies', (
+ ctx,
+ ) async {
await d.dir(appPath, [
d.appPubspec(
dependencies: {
@@ -114,10 +115,10 @@
await d.dir('foo', [
d.libPubspec('foo', '1.0.0'),
- d.dir(
- 'bin',
- [d.file('baz.dart', _validMain), d.file('foo.dart', _validMain)],
- ),
+ d.dir('bin', [
+ d.file('baz.dart', _validMain),
+ d.file('foo.dart', _validMain),
+ ]),
]).create();
await d.dir('bar', [
@@ -154,10 +155,10 @@
await d.dir('foo-2.0', [
d.libPubspec('foo', '2.0.0'),
- d.dir(
- 'bin',
- [d.file('bar.dart', _validMain), d.file('baz.dart', _validMain)],
- ),
+ d.dir('bin', [
+ d.file('bar.dart', _validMain),
+ d.file('baz.dart', _validMain),
+ ]),
]).create();
await d.dir(appPath, [
diff --git a/test/deps_test.dart b/test/deps_test.dart
index 5983fce..911830a 100644
--- a/test/deps_test.dart
+++ b/test/deps_test.dart
@@ -27,10 +27,10 @@
..serve('circular_a', '1.2.3', deps: {'circular_b': 'any'})
..serve('circular_b', '1.2.3', deps: {'circular_a': 'any'});
- await d.dir(
- 'from_path',
- [d.libDir('from_path'), d.libPubspec('from_path', '1.2.3')],
- ).create();
+ await d.dir('from_path', [
+ d.libDir('from_path'),
+ d.libPubspec('from_path', '1.2.3'),
+ ]).create();
await d.dir(appPath, [
d.pubspec({
diff --git a/test/descriptor.dart b/test/descriptor.dart
index c760f98..31d429a 100644
--- a/test/descriptor.dart
+++ b/test/descriptor.dart
@@ -42,33 +42,32 @@
DirectoryDescriptor validPackage({
String version = '1.0.0',
Map<String, Object?>? pubspecExtras,
-}) =>
- dir(appPath, [
- validPubspec(extras: {'version': version, ...?pubspecExtras}),
- file('LICENSE', 'Eh, do what you want.'),
- file('README.md', "This package isn't real."),
- file('CHANGELOG.md', '# $version\nFirst version\n'),
- dir('lib', [file('test_pkg.dart', 'int i = 1;')]),
- ]);
+}) => dir(appPath, [
+ validPubspec(extras: {'version': version, ...?pubspecExtras}),
+ file('LICENSE', 'Eh, do what you want.'),
+ file('README.md', "This package isn't real."),
+ file('CHANGELOG.md', '# $version\nFirst version\n'),
+ dir('lib', [file('test_pkg.dart', 'int i = 1;')]),
+]);
/// Returns a descriptor of a snapshot that can't be run by the current VM.
///
/// This snapshot was generated using version 2.0.0-dev.58.0 of the VM.
FileDescriptor outOfDateSnapshot(String name) => file(
- name,
- base64.decode(
- 'kKvN7wAAAAYBAAEAAQAAAAAAAAABBgMBBh8AAQEAAAABA'
- 'wofAAAAAAAAAFwBShABHhAGAQABJwIAAAAAEwAAAAAAAA'
- 'AVAAAAOQAAAAEAAAACAAAAJWZpbGU6Ly8vVXNlcnMvcm5'
- '5c3Ryb20vdGVtcC90ZW1wLmRhcnQgdm9pZCBtYWluKCkg'
- 'PT4gcHJpbnQoJ2hlbGxvIScpOwoDACABAAAAUQAAAFQGA'
- 'AMBBAIBAAUEBAUGAAAAAAcABAovN0BFbWFpbmhlbGxvIW'
- 'ZpbGU6Ly8vVXNlcnMvcm55c3Ryb20vdGVtcC90ZW1wLmR'
- 'hcnRAbWV0aG9kc2RhcnQ6Y29yZXByaW50AAAAAE0AAACn'
- 'AAAAtAAAALQAAAC4AAABBQAAAAMAAAAJAAAATQAAAAEAA'
- 'AEy',
- ),
- );
+ name,
+ base64.decode(
+ 'kKvN7wAAAAYBAAEAAQAAAAAAAAABBgMBBh8AAQEAAAABA'
+ 'wofAAAAAAAAAFwBShABHhAGAQABJwIAAAAAEwAAAAAAAA'
+ 'AVAAAAOQAAAAEAAAACAAAAJWZpbGU6Ly8vVXNlcnMvcm5'
+ '5c3Ryb20vdGVtcC90ZW1wLmRhcnQgdm9pZCBtYWluKCkg'
+ 'PT4gcHJpbnQoJ2hlbGxvIScpOwoDACABAAAAUQAAAFQGA'
+ 'AMBBAIBAAUEBAUGAAAAAAcABAovN0BFbWFpbmhlbGxvIW'
+ 'ZpbGU6Ly8vVXNlcnMvcm55c3Ryb20vdGVtcC90ZW1wLmR'
+ 'hcnRAbWV0aG9kc2RhcnQ6Y29yZXByaW50AAAAAE0AAACn'
+ 'AAAAtAAAALQAAAC4AAABBQAAAAMAAAAJAAAATQAAAAEAA'
+ 'AEy',
+ ),
+);
/// Describes a file named `pubspec.yaml` with the given YAML-serialized
/// [contents], which should be a serializable object.
@@ -76,16 +75,16 @@
/// [contents] may contain [Future]s that resolve to serializable objects,
/// which may in turn contain [Future]s recursively.
FileDescriptor pubspec(Map<String, Object?> contents) => YamlDescriptor(
- 'pubspec.yaml',
- yaml({
- ...contents,
- // TODO: Copy-pasting this into all call-sites, or use d.libPubspec
- 'environment': {
- 'sdk': defaultSdkConstraint,
- ...(contents['environment'] ?? {}) as Map,
- },
- }),
- );
+ 'pubspec.yaml',
+ yaml({
+ ...contents,
+ // TODO: Copy-pasting this into all call-sites, or use d.libPubspec
+ 'environment': {
+ 'sdk': defaultSdkConstraint,
+ ...(contents['environment'] ?? {}) as Map,
+ },
+ }),
+);
Descriptor rawPubspec(Map<String, Object> contents) =>
YamlDescriptor('pubspec.yaml', yaml(contents));
@@ -93,10 +92,7 @@
/// Describes a file named `pubspec.yaml` for an application package with the
/// given [dependencies].
Descriptor appPubspec({Map? dependencies, Map<String, Object>? extras}) {
- final map = <String, Object>{
- 'name': 'myapp',
- ...?extras,
- };
+ final map = <String, Object>{'name': 'myapp', ...?extras};
if (dependencies != null) map['dependencies'] = dependencies;
return pubspec(map);
}
@@ -123,13 +119,11 @@
if (sdk != null) {
map['environment'] = {'sdk': sdk};
}
- return pubspec(
- {
- ...map,
- if (resolutionWorkspace) 'resolution': 'workspace',
- ...extras ?? {},
- },
- );
+ return pubspec({
+ ...map,
+ if (resolutionWorkspace) 'resolution': 'workspace',
+ ...extras ?? {},
+ });
}
/// Describes a file named `pubspec_overrides.yaml` by default, with the given
@@ -137,10 +131,8 @@
///
/// [contents] may contain [Future]s that resolve to serializable objects,
/// which may in turn contain [Future]s recursively.
-Descriptor pubspecOverrides(Map<String, Object> contents) => YamlDescriptor(
- 'pubspec_overrides.yaml',
- yaml(contents),
- );
+Descriptor pubspecOverrides(Map<String, Object> contents) =>
+ YamlDescriptor('pubspec_overrides.yaml', yaml(contents));
/// Describes a directory named `lib` containing a single dart file named
/// `<name>.dart` that contains a line of Dart code.
@@ -153,9 +145,9 @@
/// Describes a directory whose name ends with a hyphen followed by an
/// alphanumeric hash.
Descriptor hashDir(String name, Iterable<Descriptor> contents) => pattern(
- RegExp("$name${r'-[a-f0-9]+'}"),
- (dirName) => dir(dirName, contents),
- );
+ RegExp("$name${r'-[a-f0-9]+'}"),
+ (dirName) => dir(dirName, contents),
+);
/// Describes a directory for a Git repo with a dart package.
/// This directory is of the form found in the revision cache of the global
@@ -229,10 +221,9 @@
/// that this cache represents. It defaults to `globalServer.port`.
Descriptor hostedHashesCache(Iterable<Descriptor> contents, {int? port}) {
return dir(cachePath, [
- dir(
- 'hosted-hashes',
- [dir('localhost%58${port ?? globalServer.port}', contents)],
- ),
+ dir('hosted-hashes', [
+ dir('localhost%58${port ?? globalServer.port}', contents),
+ ]),
]);
}
@@ -248,19 +239,17 @@
String? refreshToken,
DateTime? expiration,
}) {
- return configDir(
- [
- file(
- 'pub-credentials.json',
- _credentialsFileContent(
- server,
- accessToken,
- refreshToken: refreshToken,
- expiration: expiration,
- ),
+ return configDir([
+ file(
+ 'pub-credentials.json',
+ _credentialsFileContent(
+ server,
+ accessToken,
+ refreshToken: refreshToken,
+ expiration: expiration,
),
- ],
- );
+ ),
+ ]);
}
Descriptor legacyCredentialsFile(
@@ -269,20 +258,17 @@
String? refreshToken,
DateTime? expiration,
}) {
- return dir(
- cachePath,
- [
- file(
- 'credentials.json',
- _credentialsFileContent(
- server,
- accessToken,
- refreshToken: refreshToken,
- expiration: expiration,
- ),
+ return dir(cachePath, [
+ file(
+ 'credentials.json',
+ _credentialsFileContent(
+ server,
+ accessToken,
+ refreshToken: refreshToken,
+ expiration: expiration,
),
- ],
- );
+ ),
+ ]);
}
String _credentialsFileContent(
@@ -295,10 +281,7 @@
accessToken,
refreshToken: refreshToken,
tokenEndpoint: Uri.parse(server.url).resolve('/token'),
- scopes: [
- 'openid',
- 'https://www.googleapis.com/auth/userinfo.email',
- ],
+ scopes: ['openid', 'https://www.googleapis.com/auth/userinfo.email'],
expiration: expiration,
).toJson();
@@ -326,39 +309,27 @@
String? pubCache,
String? flutterVersion,
String? flutterRoot,
-}) =>
- PackageConfigFileDescriptor(
- packages,
- generatorVersion,
- pubCache ??
- p.join(
- sandbox,
- cachePath,
- ),
- flutterRoot,
- flutterVersion,
- );
+}) => PackageConfigFileDescriptor(
+ packages,
+ generatorVersion,
+ pubCache ?? p.join(sandbox, cachePath),
+ flutterRoot,
+ flutterVersion,
+);
Descriptor appPackageConfigFile(
List<PackageConfigEntry> packages, {
String generatorVersion = '3.1.2+3',
String? flutterRoot,
String? flutterVersion,
-}) =>
- dir(
- appPath,
- [
- packageConfigFile(
- [
- packageConfigEntry(name: 'myapp', path: '.'),
- ...packages,
- ],
- generatorVersion: generatorVersion,
- flutterRoot: flutterRoot,
- flutterVersion: flutterVersion,
- ),
- ],
- );
+}) => dir(appPath, [
+ packageConfigFile(
+ [packageConfigEntry(name: 'myapp', path: '.'), ...packages],
+ generatorVersion: generatorVersion,
+ flutterRoot: flutterRoot,
+ flutterVersion: flutterVersion,
+ ),
+]);
/// Create a [PackageConfigEntry] which assumes package with [name] is either
/// a cached package with given [version] or a path dependency at given [path].
@@ -393,10 +364,9 @@
Descriptor flutterVersion(String version) {
return dir('bin', [
- dir(
- 'cache',
- [file('flutter.version.json', '{"flutterVersion":"$version"}')],
- ),
+ dir('cache', [
+ file('flutter.version.json', '{"flutterVersion":"$version"}'),
+ ]),
]);
}
diff --git a/test/descriptor/git.dart b/test/descriptor/git.dart
index 95ae0b2..c52b7da 100644
--- a/test/descriptor/git.dart
+++ b/test/descriptor/git.dart
@@ -18,11 +18,7 @@
await super.create(parent);
await _runGitCommands(parent, [
['init'],
- [
- 'config',
- 'core.excludesfile',
- '',
- ],
+ ['config', 'core.excludesfile', ''],
['add', '.'],
['commit', '-m', 'initial commit', '--allow-empty'],
]);
diff --git a/test/descriptor/tar.dart b/test/descriptor/tar.dart
index 954a75a..76e92b8 100644
--- a/test/descriptor/tar.dart
+++ b/test/descriptor/tar.dart
@@ -15,8 +15,8 @@
final List<Descriptor> contents;
TarFileDescriptor(super.name, Iterable<Descriptor> contents)
- : contents = contents.toList(),
- super.protected();
+ : contents = contents.toList(),
+ super.protected();
/// Creates the files and directories within this tar file, then archives
/// them, compresses them, and saves the result to [parent].
diff --git a/test/dev_dependency_test.dart b/test/dev_dependency_test.dart
index 3d19911..bc8cbf6 100644
--- a/test/dev_dependency_test.dart
+++ b/test/dev_dependency_test.dart
@@ -9,11 +9,15 @@
void main() {
test("includes root package's dev dependencies", () async {
- await d
- .dir('foo', [d.libDir('foo'), d.libPubspec('foo', '0.0.1')]).create();
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '0.0.1'),
+ ]).create();
- await d
- .dir('bar', [d.libDir('bar'), d.libPubspec('bar', '0.0.1')]).create();
+ await d.dir('bar', [
+ d.libDir('bar'),
+ d.libPubspec('bar', '0.0.1'),
+ ]).create();
await d.dir(appPath, [
d.pubspec({
@@ -45,8 +49,10 @@
),
]).create();
- await d
- .dir('bar', [d.libDir('bar'), d.libPubspec('bar', '0.0.1')]).create();
+ await d.dir('bar', [
+ d.libDir('bar'),
+ d.libPubspec('bar', '0.0.1'),
+ ]).create();
await d.dir(appPath, [
d.pubspec({
@@ -77,8 +83,10 @@
}),
]).create();
- await d
- .dir('bar', [d.libDir('bar'), d.libPubspec('bar', '0.0.1')]).create();
+ await d.dir('bar', [
+ d.libDir('bar'),
+ d.libPubspec('bar', '0.0.1'),
+ ]).create();
await d.dir(appPath, [
d.appPubspec(
diff --git a/test/directory_option_test.dart b/test/directory_option_test.dart
index da0505e..e884dde 100644
--- a/test/directory_option_test.dart
+++ b/test/directory_option_test.dart
@@ -12,24 +12,22 @@
import 'test_pub.dart';
Future<void> main() async {
- testWithGolden('commands taking a --directory/-C parameter work',
- (ctx) async {
+ testWithGolden('commands taking a --directory/-C parameter work', (
+ ctx,
+ ) async {
await servePackages()
..serve('foo', '1.0.0')
..serve('foo', '0.1.2')
..serve('bar', '1.2.3');
await credentialsFile(globalServer, 'access-token').create();
- globalServer.handle(
- RegExp('/api/packages/test_pkg/uploaders'),
- (request) {
- return shelf.Response.ok(
- jsonEncode({
- 'success': {'message': 'Good job!'},
- }),
- headers: {'content-type': 'application/json'},
- );
- },
- );
+ globalServer.handle(RegExp('/api/packages/test_pkg/uploaders'), (request) {
+ return shelf.Response.ok(
+ jsonEncode({
+ 'success': {'message': 'Good job!'},
+ }),
+ headers: {'content-type': 'application/json'},
+ );
+ });
await validPackage().create();
await dir(appPath, [
@@ -78,10 +76,7 @@
];
for (var i = 0; i < cases.length; i++) {
- await ctx.run(
- cases[i],
- workingDirectory: sandbox,
- );
+ await ctx.run(cases[i], workingDirectory: sandbox);
}
});
}
diff --git a/test/downgrade/doesnt_change_git_dependencies_test.dart b/test/downgrade/doesnt_change_git_dependencies_test.dart
index 9d832d8..a3f3ad0 100644
--- a/test/downgrade/doesnt_change_git_dependencies_test.dart
+++ b/test/downgrade/doesnt_change_git_dependencies_test.dart
@@ -11,25 +11,27 @@
test("doesn't change git dependencies", () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .create();
await pubGet();
final originalFooSpec = packageSpec('foo');
- await d.git(
- 'foo.git',
- [d.libDir('foo', 'foo 2'), d.libPubspec('foo', '1.0.0')],
- ).commit();
+ await d.git('foo.git', [
+ d.libDir('foo', 'foo 2'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).commit();
await pubDowngrade();
diff --git a/test/downgrade/dry_run_does_not_apply_changes_test.dart b/test/downgrade/dry_run_does_not_apply_changes_test.dart
index 44b5d26..1c018ad 100644
--- a/test/downgrade/dry_run_does_not_apply_changes_test.dart
+++ b/test/downgrade/dry_run_does_not_apply_changes_test.dart
@@ -29,9 +29,10 @@
// Do the dry run.
await pubDowngrade(
args: ['--dry-run'],
- output: allOf(
- [contains('< foo 1.0.0'), contains('Would change 1 dependency.')],
- ),
+ output: allOf([
+ contains('< foo 1.0.0'),
+ contains('Would change 1 dependency.'),
+ ]),
);
await d.dir(appPath, [
diff --git a/test/downgrade/tighten_test.dart b/test/downgrade/tighten_test.dart
index 3cec338..7cf6692 100644
--- a/test/downgrade/tighten_test.dart
+++ b/test/downgrade/tighten_test.dart
@@ -8,28 +8,32 @@
import '../test_pub.dart';
void main() {
- test('--tighten will set lower bounds to the actually achieved version',
- () async {
- await servePackages()
- ..serve(
- 'foo',
- '1.0.0',
- ) // Because of the bar constraint, this is not achievable.
- ..serve('foo', '2.0.0')
- ..serve('foo', '3.0.0')
- ..serve('bar', '1.0.0', deps: {'foo': '>=2.0.0'});
+ test(
+ '--tighten will set lower bounds to the actually achieved version',
+ () async {
+ await servePackages()
+ ..serve(
+ 'foo',
+ '1.0.0',
+ ) // Because of the bar constraint, this is not achievable.
+ ..serve('foo', '2.0.0')
+ ..serve('foo', '3.0.0')
+ ..serve('bar', '1.0.0', deps: {'foo': '>=2.0.0'});
- await d.appDir(dependencies: {'foo': '>=1.0.0', 'bar': '^1.0.0'}).create();
+ await d
+ .appDir(dependencies: {'foo': '>=1.0.0', 'bar': '^1.0.0'})
+ .create();
- await pubGet(output: contains('foo 3.0.0'));
- await pubDowngrade(
- args: ['--tighten'],
- output: allOf(
- contains('< foo 2.0.0 (was 3.0.0)'),
- contains('foo: >=1.0.0 -> >=2.0.0'),
- ),
- );
- });
+ await pubGet(output: contains('foo 3.0.0'));
+ await pubDowngrade(
+ args: ['--tighten'],
+ output: allOf(
+ contains('< foo 2.0.0 (was 3.0.0)'),
+ contains('foo: >=1.0.0 -> >=2.0.0'),
+ ),
+ );
+ },
+ );
test('--tighten works for workspace with internal dependencies', () async {
await servePackages();
diff --git a/test/downgrade/unlock_if_necessary_test.dart b/test/downgrade/unlock_if_necessary_test.dart
index 96d789d..a073c75 100644
--- a/test/downgrade/unlock_if_necessary_test.dart
+++ b/test/downgrade/unlock_if_necessary_test.dart
@@ -8,8 +8,7 @@
import '../test_pub.dart';
void main() {
- test(
- "downgrades one locked hosted package's dependencies if it's "
+ test("downgrades one locked hosted package's dependencies if it's "
'necessary', () async {
final server = await servePackages();
server.serve('foo', '2.0.0', deps: {'foo_dep': 'any'});
diff --git a/test/embedding/embedding_test.dart b/test/embedding/embedding_test.dart
index 3ec6a00..db63269 100644
--- a/test/embedding/embedding_test.dart
+++ b/test/embedding/embedding_test.dart
@@ -54,8 +54,9 @@
'\$ $_commandRunner ${args.join(' ')}',
if (stdoutLines.isNotEmpty) _filter(stdoutLines.join('\n')),
if (stderrLines.isNotEmpty)
- _filter(stderrLines.join('\n'))
- .replaceAll(RegExp('^', multiLine: true), '[E] '),
+ _filter(
+ stderrLines.join('\n'),
+ ).replaceAll(RegExp('^', multiLine: true), '[E] '),
].join('\n'),
);
buffer.write('\n');
@@ -87,10 +88,10 @@
setUpAll(() async {
final tempDir = Directory.systemTemp.createTempSync();
snapshot = p.join(tempDir.path, 'command_runner.dart.snapshot');
- final r = Process.runSync(
- Platform.resolvedExecutable,
- ['--snapshot=$snapshot', _commandRunner],
- );
+ final r = Process.runSync(Platform.resolvedExecutable, [
+ '--snapshot=$snapshot',
+ _commandRunner,
+ ]);
expect(r.exitCode, 0, reason: r.stderr as String);
});
@@ -109,9 +110,7 @@
],
);
- await d.appDir(
- dependencies: {'flutter_gen': '^1.0.0'},
- ).create();
+ await d.appDir(dependencies: {'flutter_gen': '^1.0.0'}).create();
await pubGet();
final buffer = StringBuffer();
@@ -138,10 +137,7 @@
'''),
]),
]).create();
- await ctx.runEmbedding(
- ['pub', 'get'],
- workingDirectory: d.path(appPath),
- );
+ await ctx.runEmbedding(['pub', 'get'], workingDirectory: d.path(appPath));
await ctx.runEmbedding(
['pub', 'run', 'bin/main.dart'],
exitCode: 123,
@@ -150,45 +146,39 @@
});
testWithGolden(
- 'logfile is written with --verbose and on unexpected exceptions',
- (context) async {
- final server = await servePackages();
- server.serve('foo', '1.0.0');
- await d.appDir(dependencies: {'foo': 'any'}).create();
+ 'logfile is written with --verbose and on unexpected exceptions',
+ (context) async {
+ final server = await servePackages();
+ server.serve('foo', '1.0.0');
+ await d.appDir(dependencies: {'foo': 'any'}).create();
- // TODO(sigurdm) This logs the entire verbose trace to a golden file.
- //
- // This is fragile, and can break for all sorts of small reasons. We think
- // this might be worth while having to have at least minimal testing of the
- // verbose stack trace.
- //
- // But if you, future contributor, think this test is annoying: feel free to
- // remove it, or rewrite it to filter out the stack-trace itself, only
- // testing for creation of the file.
- //
- // It is a fragile test, and we acknowledge that it's usefulness can be
- // debated...
- await context.runEmbedding(
- ['pub', '--verbose', 'get'],
- workingDirectory: d.path(appPath),
- );
- context.expectNextSection(
- _filter(
- File(logFile).readAsStringSync(),
- ),
- );
- await d.dir('empty').create();
- await context.runEmbedding(
- ['pub', 'fail'],
- workingDirectory: d.path('empty'),
- exitCode: 1,
- );
- context.expectNextSection(
- _filter(
- File(logFile).readAsStringSync(),
- ),
- );
- });
+ // TODO(sigurdm) This logs the entire verbose trace to a golden file.
+ //
+ // This is fragile, and can break for all sorts of small reasons. We think
+ // this might be worth while having to have at least minimal testing of
+ // the verbose stack trace.
+ //
+ // But if you, future contributor, think this test is annoying: feel free
+ // to remove it, or rewrite it to filter out the stack-trace itself, only
+ // testing for creation of the file.
+ //
+ // It is a fragile test, and we acknowledge that it's usefulness can be
+ // debated...
+ await context.runEmbedding([
+ 'pub',
+ '--verbose',
+ 'get',
+ ], workingDirectory: d.path(appPath));
+ context.expectNextSection(_filter(File(logFile).readAsStringSync()));
+ await d.dir('empty').create();
+ await context.runEmbedding(
+ ['pub', 'fail'],
+ workingDirectory: d.path('empty'),
+ exitCode: 1,
+ );
+ context.expectNextSection(_filter(File(logFile).readAsStringSync()));
+ },
+ );
test('`embedding --verbose pub` is verbose', () async {
await servePackages();
@@ -199,10 +189,10 @@
testWithGolden('--help', (context) async {
await servePackages();
- await context.runEmbedding(
- ['pub', '--help'],
- workingDirectory: d.path('.'),
- );
+ await context.runEmbedding([
+ 'pub',
+ '--help',
+ ], workingDirectory: d.path('.'));
});
testWithGolden('--color forces colors', (context) async {
@@ -243,9 +233,7 @@
'environment': {'sdk': '^2.18.0'},
'dependencies': {'foo': '^1.0.0'},
}),
- d.dir('bin', [
- d.file('myapp.dart', 'main() {print(42);}'),
- ]),
+ d.dir('bin', [d.file('myapp.dart', 'main() {print(42);}')]),
]).create();
final server = await servePackages();
@@ -260,8 +248,9 @@
await pubGet(environment: {'_PUB_TEST_SDK_VERSION': '2.18.3'});
// Deleting the version-listing cache will cause it to be refetched, and the
// warning will happen.
- File(p.join(globalServer.cachingPath, '.cache', 'foo-versions.json'))
- .deleteSync();
+ File(
+ p.join(globalServer.cachingPath, '.cache', 'foo-versions.json'),
+ ).deleteSync();
server.serve(
'foo',
'1.0.1',
@@ -305,70 +294,63 @@
);
});
- test('`embedding run` does not have output when successful and no terminal',
- () async {
- await d.dir(appPath, [
- d.pubspec({
- 'name': 'myapp',
- 'dependencies': {'foo': '^1.0.0'},
- }),
- d.dir('bin', [
- d.file('myapp.dart', 'main() {print(42);}'),
- ]),
- ]).create();
-
- final server = await servePackages();
- server.serve('foo', '1.0.0');
-
- final buffer = StringBuffer();
- await runEmbeddingToBuffer(
- ['run', 'myapp'],
- buffer,
- workingDirectory: d.path(appPath),
- environment: {EnvironmentKeys.forceTerminalOutput: '0'},
- );
-
- expect(
- buffer.toString(),
- allOf(
- isNot(contains('Resolving dependencies...')),
- contains('42'),
- ),
- );
- });
- test('`embedding run` outputs info when successful and has a terminal',
- () async {
- await d.dir(appPath, [
- d.pubspec({
- 'name': 'myapp',
- 'dependencies': {'foo': '^1.0.0'},
- }),
- d.dir('bin', [
- d.file('myapp.dart', 'main() {print(42);}'),
- ]),
- ]).create();
-
- final server = await servePackages();
- server.serve('foo', '1.0.0');
-
- final buffer = StringBuffer();
- await runEmbeddingToBuffer(
- ['run', 'myapp'],
- buffer,
- workingDirectory: d.path(appPath),
- environment: {EnvironmentKeys.forceTerminalOutput: '1'},
- );
- expect(
- buffer.toString(),
- allOf(
- contains('Resolving dependencies'),
- contains('42'),
- ),
- );
- });
-
test(
- '`embedding run` does not recompile executables '
+ '`embedding run` does not have output when successful and no terminal',
+ () async {
+ await d.dir(appPath, [
+ d.pubspec({
+ 'name': 'myapp',
+ 'dependencies': {'foo': '^1.0.0'},
+ }),
+ d.dir('bin', [d.file('myapp.dart', 'main() {print(42);}')]),
+ ]).create();
+
+ final server = await servePackages();
+ server.serve('foo', '1.0.0');
+
+ final buffer = StringBuffer();
+ await runEmbeddingToBuffer(
+ ['run', 'myapp'],
+ buffer,
+ workingDirectory: d.path(appPath),
+ environment: {EnvironmentKeys.forceTerminalOutput: '0'},
+ );
+
+ expect(
+ buffer.toString(),
+ allOf(isNot(contains('Resolving dependencies...')), contains('42')),
+ );
+ },
+ );
+ test(
+ '`embedding run` outputs info when successful and has a terminal',
+ () async {
+ await d.dir(appPath, [
+ d.pubspec({
+ 'name': 'myapp',
+ 'dependencies': {'foo': '^1.0.0'},
+ }),
+ d.dir('bin', [d.file('myapp.dart', 'main() {print(42);}')]),
+ ]).create();
+
+ final server = await servePackages();
+ server.serve('foo', '1.0.0');
+
+ final buffer = StringBuffer();
+ await runEmbeddingToBuffer(
+ ['run', 'myapp'],
+ buffer,
+ workingDirectory: d.path(appPath),
+ environment: {EnvironmentKeys.forceTerminalOutput: '1'},
+ );
+ expect(
+ buffer.toString(),
+ allOf(contains('Resolving dependencies'), contains('42')),
+ );
+ },
+ );
+
+ test('`embedding run` does not recompile executables '
'from packages depending on sdk packages', () async {
final server = await servePackages();
server.serve(
@@ -384,25 +366,18 @@
await d.dir('flutter', [
d.dir('bin', [
d.dir('cache', [
- d.file(
- 'flutter.version.json',
- '{"flutterVersion": "1.2.3"}',
- ),
+ d.file('flutter.version.json', '{"flutterVersion": "1.2.3"}'),
]),
]),
d.dir('packages', [
- d.dir('foo', [
- d.libPubspec('foo', '1.2.3'),
- ]),
+ d.dir('foo', [d.libPubspec('foo', '1.2.3')]),
]),
]).create();
await d.dir(appPath, [
d.pubspec({
'name': 'myapp',
- 'dependencies': {
- 'hosted': '^1.0.0',
- },
+ 'dependencies': {'hosted': '^1.0.0'},
}),
]).create();
@@ -419,10 +394,7 @@
expect(
buffer.toString(),
- allOf(
- contains('Built hosted:hosted'),
- contains('42'),
- ),
+ allOf(contains('Built hosted:hosted'), contains('42')),
);
final buffer2 = StringBuffer();
@@ -437,10 +409,7 @@
);
expect(
buffer2.toString(),
- allOf(
- isNot(contains('Built hosted:hosted')),
- contains('42'),
- ),
+ allOf(isNot(contains('Built hosted:hosted')), contains('42')),
);
});
@@ -457,10 +426,7 @@
);
expect(
buffer.toString(),
- allOf(
- contains('Did you mean one of these?'),
- contains(' pub'),
- ),
+ allOf(contains('Did you mean one of these?'), contains(' pub')),
);
}
});
@@ -476,10 +442,7 @@
.replaceAll(Platform.pathSeparator, '/')
.replaceAll(Platform.operatingSystem, r'$OS')
.replaceAll(globalServer.port.toString(), r'$PORT')
- .replaceAll(
- RegExp(r'^Created:(.*)$', multiLine: true),
- r'Created: $TIME',
- )
+ .replaceAll(RegExp(r'^Created:(.*)$', multiLine: true), r'Created: $TIME')
.replaceAll(
RegExp(r'Generated by pub on (.*)$', multiLine: true),
r'Generated by pub on $TIME',
@@ -488,14 +451,8 @@
RegExp(r'X-Pub-Session-ID(.*)$', multiLine: true),
r'X-Pub-Session-ID: $ID',
)
- .replaceAll(
- RegExp(r'took (.*)$', multiLine: true),
- r'took: $TIME',
- )
- .replaceAll(
- RegExp(r'date: (.*)$', multiLine: true),
- r'date: $TIME',
- )
+ .replaceAll(RegExp(r'took (.*)$', multiLine: true), r'took: $TIME')
+ .replaceAll(RegExp(r'date: (.*)$', multiLine: true), r'date: $TIME')
.replaceAll(
RegExp(r'Creating (.*) from stream\.$', multiLine: true),
r'Creating $FILE from stream',
@@ -562,14 +519,8 @@
),
r' tool/test-bin/pub_command_runner.dart',
)
- .replaceAll(
- RegExp(r'[ ]{4,}', multiLine: true),
- r' ',
- )
- .replaceAll(
- RegExp(r' [\d]+:[\d]+ +', multiLine: true),
- r' $LINE:$COL ',
- )
+ .replaceAll(RegExp(r'[ ]{4,}', multiLine: true), r' ')
+ .replaceAll(RegExp(r' [\d]+:[\d]+ +', multiLine: true), r' $LINE:$COL ')
.replaceAll(
RegExp(r'Writing \d+ characters', multiLine: true),
r'Writing $N characters',
@@ -595,7 +546,6 @@
RegExp(r'"archive_sha256":"[0-9a-f]{64}"', multiLine: true),
r'"archive_sha256":"$SHA256"',
)
-
/// TODO(sigurdm): This hack suppresses differences in stack-traces
/// between dart 2.17 and 2.18. Remove when 2.18 is stable.
.replaceAllMapped(
diff --git a/test/embedding/ensure_pubspec_resolved.dart b/test/embedding/ensure_pubspec_resolved.dart
index 5e8ab33..a6160f1 100644
--- a/test/embedding/ensure_pubspec_resolved.dart
+++ b/test/embedding/ensure_pubspec_resolved.dart
@@ -38,38 +38,43 @@
});
test(
- 'does not require a pub get '
- 'if a `flutter_gen` package is injected into package_config.json',
- () async {
- await d.dir('bar', [
- d.pubspec({'name': 'bar'}),
- ]).create();
- await d.dir(appPath, [
- d.appPubspec(
- dependencies: {
- 'bar': {'path': '../bar'},
- },
- ),
- ]).create();
+ 'does not require a pub get '
+ 'if a `flutter_gen` package is injected into package_config.json',
+ () async {
+ await d.dir('bar', [
+ d.pubspec({'name': 'bar'}),
+ ]).create();
+ await d.dir(appPath, [
+ d.appPubspec(
+ dependencies: {
+ 'bar': {'path': '../bar'},
+ },
+ ),
+ ]).create();
- await pubGet();
+ await pubGet();
- final packageConfig =
- p.join(d.sandbox, 'myapp', '.dart_tool', 'package_config.json');
- final contents =
- json.decode(File(packageConfig).readAsStringSync()) as Map;
- (contents['packages'] as List).add({
- 'name': 'flutter_gen',
- 'rootUri': 'flutter_gen',
- 'languageVersion': '2.8',
- });
- writeTextFile(packageConfig, json.encode(contents));
+ final packageConfig = p.join(
+ d.sandbox,
+ 'myapp',
+ '.dart_tool',
+ 'package_config.json',
+ );
+ final contents =
+ json.decode(File(packageConfig).readAsStringSync()) as Map;
+ (contents['packages'] as List).add({
+ 'name': 'flutter_gen',
+ 'rootUri': 'flutter_gen',
+ 'languageVersion': '2.8',
+ });
+ writeTextFile(packageConfig, json.encode(contents));
- await runPub(
- args: ['run', 'bin/script.dart'],
- output: endsWith('hello!'),
- );
- });
+ await runPub(
+ args: ['run', 'bin/script.dart'],
+ output: endsWith('hello!'),
+ );
+ },
+ );
group('Does an implicit pub get if', () {
test("there's no lockfile", () async {
@@ -78,8 +83,9 @@
});
test("there's no package_config.json", () async {
- File(p.join(d.sandbox, 'myapp/.dart_tool/package_config.json'))
- .deleteSync();
+ File(
+ p.join(d.sandbox, 'myapp/.dart_tool/package_config.json'),
+ ).deleteSync();
await _implicitPubGet(
'`./pubspec.yaml` exists without corresponding `./pubspec.yaml` or `.dart_tool/pub/workspace_ref.json`.',
@@ -100,8 +106,10 @@
// Ensure that the pubspec looks newer than the lockfile.
await _touch('pubspec.yaml');
- await _implicitPubGet('The pubspec.yaml file has changed since the '
- 'pubspec.lock file was generated');
+ await _implicitPubGet(
+ 'The pubspec.yaml file has changed since the '
+ 'pubspec.lock file was generated',
+ );
});
test('the lockfile has a dependency from the wrong source', () async {
@@ -116,8 +124,10 @@
// Ensure that the pubspec looks newer than the lockfile.
await _touch('pubspec.yaml');
- await _implicitPubGet('The pubspec.yaml file has changed since the '
- 'pubspec.lock file was generated');
+ await _implicitPubGet(
+ 'The pubspec.yaml file has changed since the '
+ 'pubspec.lock file was generated',
+ );
});
test('the lockfile has a dependency from an unknown source', () async {
@@ -145,32 +155,38 @@
// Ensure that the pubspec looks newer than the lockfile.
await _touch('pubspec.yaml');
- await _implicitPubGet('The pubspec.yaml file has changed since the '
- 'pubspec.lock file was generated.');
+ await _implicitPubGet(
+ 'The pubspec.yaml file has changed since the '
+ 'pubspec.lock file was generated.',
+ );
});
- test('the lockfile has a dependency with the wrong description',
- () async {
- await d.dir('bar', [d.libPubspec('foo', '1.0.0')]).create();
+ test(
+ 'the lockfile has a dependency with the wrong description',
+ () async {
+ await d.dir('bar', [d.libPubspec('foo', '1.0.0')]).create();
- await d.dir(appPath, [
- d.appPubspec(
- dependencies: {
- 'foo': {'path': '../bar'},
- },
- ),
- ]).create();
+ await d.dir(appPath, [
+ d.appPubspec(
+ dependencies: {
+ 'foo': {'path': '../bar'},
+ },
+ ),
+ ]).create();
- await pubGet();
+ await pubGet();
- await createLockFile(appPath, dependenciesInSandBox: ['foo']);
+ await createLockFile(appPath, dependenciesInSandBox: ['foo']);
- // Ensure that the pubspec looks newer than the lockfile.
- await _touch('pubspec.yaml');
+ // Ensure that the pubspec looks newer than the lockfile.
+ await _touch('pubspec.yaml');
- await _implicitPubGet('The pubspec.yaml file has changed since the '
- 'pubspec.lock file was generated');
- });
+ await _implicitPubGet(
+ 'The pubspec.yaml file has changed since the '
+ 'pubspec.lock file was generated',
+ );
+ },
+ );
test('the pubspec has an incompatible version of a dependency', () async {
await d.dir(appPath, [
@@ -186,12 +202,13 @@
// Ensure that the pubspec looks newer than the lockfile.
await _touch('pubspec.yaml');
- await _implicitPubGet('The pubspec.yaml file has changed since the '
- 'pubspec.lock file was generated');
+ await _implicitPubGet(
+ 'The pubspec.yaml file has changed since the '
+ 'pubspec.lock file was generated',
+ );
});
- test(
- 'the lockfile is pointing to an unavailable package with a newer '
+ test('the lockfile is pointing to an unavailable package with a newer '
'pubspec', () async {
await d.dir(appPath, [
d.appPubspec(dependencies: {'foo': '1.0.0'}),
@@ -226,10 +243,7 @@
name: 'foo',
path: '../foo', // this is the wrong path
),
- d.packageConfigEntry(
- name: 'myapp',
- path: '.',
- ),
+ d.packageConfigEntry(name: 'myapp', path: '.'),
]),
]).create();
@@ -239,43 +253,45 @@
await _implicitPubGet('Could not find `../foo/pubspec.yaml`');
});
- test("the lock file's SDK constraint doesn't match the current SDK",
- () async {
- // Avoid using a path dependency because it triggers the full validation
- // logic. We want to be sure SDK-validation works without that logic.
- server.serve(
- 'foo',
- '1.0.0',
- pubspec: {
- 'environment': {'sdk': '>=3.0.0 <3.1.0'},
- },
- );
-
- await d.dir(appPath, [
- d.pubspec({
- 'name': 'myapp',
- 'environment': {'sdk': '^3.0.0'},
- 'dependencies': {'foo': '^1.0.0'},
- }),
- ]).create();
-
- await pubGet(environment: {'_PUB_TEST_SDK_VERSION': '3.0.0'});
-
- server.serve(
- 'foo',
- '1.0.1',
- pubspec: {
- 'environment': {'sdk': '^3.0.0'},
- },
- );
-
- await _implicitPubGet(
- 'The Dart SDK was updated since last package resolution.',
- );
- });
-
test(
- "the lock file's Flutter SDK constraint doesn't match the "
+ "the lock file's SDK constraint doesn't match the current SDK",
+ () async {
+ // Avoid using a path dependency because it triggers the full
+ // validation logic. We want to be sure SDK-validation works without
+ // that logic.
+ server.serve(
+ 'foo',
+ '1.0.0',
+ pubspec: {
+ 'environment': {'sdk': '>=3.0.0 <3.1.0'},
+ },
+ );
+
+ await d.dir(appPath, [
+ d.pubspec({
+ 'name': 'myapp',
+ 'environment': {'sdk': '^3.0.0'},
+ 'dependencies': {'foo': '^1.0.0'},
+ }),
+ ]).create();
+
+ await pubGet(environment: {'_PUB_TEST_SDK_VERSION': '3.0.0'});
+
+ server.serve(
+ 'foo',
+ '1.0.1',
+ pubspec: {
+ 'environment': {'sdk': '^3.0.0'},
+ },
+ );
+
+ await _implicitPubGet(
+ 'The Dart SDK was updated since last package resolution.',
+ );
+ },
+ );
+
+ test("the lock file's Flutter SDK constraint doesn't match the "
'current Flutter SDK', () async {
// Avoid using a path dependency because it triggers the full validation
// logic. We want to be sure SDK-validation works without that logic.
@@ -321,37 +337,40 @@
);
});
- test("a path dependency's dependency doesn't match the lockfile",
- () async {
- await d.dir('bar', [
- d.libPubspec('bar', '1.0.0', deps: {'foo': '1.0.0'}),
- ]).create();
-
- await d.dir(appPath, [
- d.appPubspec(
- dependencies: {
- 'bar': {'path': '../bar'},
- },
- ),
- ]).create();
-
- await pubGet();
-
- // Update bar's pubspec without touching the app's.
- await d.dir('bar', [
- d.libPubspec('bar', '1.0.0', deps: {'foo': '2.0.0'}),
- ]).create();
-
- // To ensure the timestamp is strictly later we need to touch again
- // here.
- await _touch(p.join(d.sandbox, 'bar', 'pubspec.yaml'));
-
- await _implicitPubGet('../bar/pubspec.yaml has changed '
- 'since the pubspec.lock file was generated.');
- });
-
test(
- "a path dependency's language version "
+ "a path dependency's dependency doesn't match the lockfile",
+ () async {
+ await d.dir('bar', [
+ d.libPubspec('bar', '1.0.0', deps: {'foo': '1.0.0'}),
+ ]).create();
+
+ await d.dir(appPath, [
+ d.appPubspec(
+ dependencies: {
+ 'bar': {'path': '../bar'},
+ },
+ ),
+ ]).create();
+
+ await pubGet();
+
+ // Update bar's pubspec without touching the app's.
+ await d.dir('bar', [
+ d.libPubspec('bar', '1.0.0', deps: {'foo': '2.0.0'}),
+ ]).create();
+
+ // To ensure the timestamp is strictly later we need to touch again
+ // here.
+ await _touch(p.join(d.sandbox, 'bar', 'pubspec.yaml'));
+
+ await _implicitPubGet(
+ '../bar/pubspec.yaml has changed '
+ 'since the pubspec.lock file was generated.',
+ );
+ },
+ );
+
+ test("a path dependency's language version "
"doesn't match the package_config.json", () async {
await d.dir('bar', [
d.libPubspec(
@@ -387,14 +406,15 @@
// here.
await _touch(p.join(d.sandbox, 'bar', 'pubspec.yaml'));
- await _implicitPubGet('../bar/pubspec.yaml has changed '
- 'since the pubspec.lock file was generated.');
+ await _implicitPubGet(
+ '../bar/pubspec.yaml has changed '
+ 'since the pubspec.lock file was generated.',
+ );
});
});
group("doesn't require the user to run pub get first if", () {
- test(
- 'the pubspec is older than the lockfile which is older than the '
+ test('the pubspec is older than the lockfile which is older than the '
'package-config, even if the contents are wrong', () async {
await d.dir(appPath, [
d.appPubspec(dependencies: {'foo': '1.0.0'}),
@@ -408,18 +428,20 @@
await _noImplicitPubGet();
});
- test("the pubspec is newer than the lockfile, but they're up-to-date",
- () async {
- await d.dir(appPath, [
- d.appPubspec(dependencies: {'foo': '1.0.0'}),
- ]).create();
+ test(
+ "the pubspec is newer than the lockfile, but they're up-to-date",
+ () async {
+ await d.dir(appPath, [
+ d.appPubspec(dependencies: {'foo': '1.0.0'}),
+ ]).create();
- await pubGet();
+ await pubGet();
- await _touch('pubspec.yaml');
+ await _touch('pubspec.yaml');
- await _noImplicitPubGet();
- });
+ await _noImplicitPubGet();
+ },
+ );
// Regression test for #1416
test('a path dependency has a dependency on the root package', () async {
@@ -443,12 +465,7 @@
});
test('has a path dependency, and nothing changed', () async {
- await d.dir('foo', [
- d.libPubspec(
- 'foo',
- '1.0.0',
- ),
- ]).create();
+ await d.dir('foo', [d.libPubspec('foo', '1.0.0')]).create();
await d.dir(appPath, [
d.appPubspec(
@@ -471,18 +488,19 @@
});
test(
- "the lockfile is newer than package_config.json, but it's up-to-date",
- () async {
- await d.dir(appPath, [
- d.appPubspec(dependencies: {'foo': '1.0.0'}),
- ]).create();
+ "the lockfile is newer than package_config.json, but it's up-to-date",
+ () async {
+ await d.dir(appPath, [
+ d.appPubspec(dependencies: {'foo': '1.0.0'}),
+ ]).create();
- await pubGet();
+ await pubGet();
- await _touch('pubspec.lock');
+ await _touch('pubspec.lock');
- await _noImplicitPubGet();
- });
+ await _noImplicitPubGet();
+ },
+ );
test("an overridden dependency's SDK constraint is unmatched", () async {
server.serve(
@@ -529,9 +547,7 @@
}
/// Ensures that pub doesn't require "dart pub get" for the current package.
-Future<void> _noImplicitPubGet({
- Map<String, String?>? environment,
-}) async {
+Future<void> _noImplicitPubGet({Map<String, String?>? environment}) async {
final buffer = StringBuffer();
await runEmbeddingToBuffer(
['pub', 'ensure-pubspec-resolved', '--verbose'],
@@ -549,8 +565,9 @@
final lockFileModified =
File(p.join(d.sandbox, 'myapp/pubspec.lock')).lastModifiedSync();
final packageConfigModified =
- File(p.join(d.sandbox, 'myapp/.dart_tool/package_config.json'))
- .lastModifiedSync();
+ File(
+ p.join(d.sandbox, 'myapp/.dart_tool/package_config.json'),
+ ).lastModifiedSync();
expect(!pubspecModified.isAfter(lockFileModified), isTrue);
expect(!lockFileModified.isAfter(packageConfigModified), isTrue);
diff --git a/test/embedding/get_executable_for_command.dart b/test/embedding/get_executable_for_command.dart
index 2994af8..4ed54de 100644
--- a/test/embedding/get_executable_for_command.dart
+++ b/test/embedding/get_executable_for_command.dart
@@ -12,11 +12,7 @@
import '../test_pub.dart';
import 'embedding_test.dart';
-enum ResolutionAttempt {
- resolution,
- fastPath,
- noResolution,
-}
+enum ResolutionAttempt { resolution, fastPath, noResolution }
Future<void> testGetExecutable(
String command,
@@ -55,12 +51,7 @@
reason: '${p.join(root, executable)} should exist',
);
final filtered = filterUnstableText(packageConfig ?? 'No package config');
- expect(
- output,
- contains(
- 'Package config: $filtered\n',
- ),
- );
+ expect(output, contains('Package config: $filtered\n'));
}
switch (resolution) {
case ResolutionAttempt.fastPath:
@@ -130,18 +121,14 @@
test('Error message when pubspec is broken', () async {
await servePackages();
await d.dir('foo', [
- d.pubspec({
- 'name': 'broken name',
- }),
+ d.pubspec({'name': 'broken name'}),
]).create();
await d.dir(appPath, [
d.pubspec({
'name': 'myapp',
'dependencies': {
- 'foo': {
- 'path': '../foo',
- },
+ 'foo': {'path': '../foo'},
},
}),
]).create();
@@ -170,9 +157,7 @@
'name': 'myapp',
'dependencies': {'foo': '^1.0.0'},
}),
- d.dir('bin', [
- d.file('myapp.dart', 'main() {print(42);}'),
- ]),
+ d.dir('bin', [d.file('myapp.dart', 'main() {print(42);}')]),
]).create();
await servePackages();
@@ -190,9 +175,7 @@
test('Reports parse failure', () async {
await servePackages();
await d.dir(appPath, [
- d.pubspec({
- 'name': 'myapp',
- }),
+ d.pubspec({'name': 'myapp'}),
]).create();
await testGetExecutable(
'::',
@@ -206,12 +189,8 @@
test('Reports compilation failure', () async {
await servePackages();
await d.dir(appPath, [
- d.pubspec({
- 'name': 'myapp',
- }),
- d.dir('bin', [
- d.file('foo.dart', 'main() {'),
- ]),
+ d.pubspec({'name': 'myapp'}),
+ d.dir('bin', [d.file('foo.dart', 'main() {')]),
]).create();
await servePackages();
@@ -254,10 +233,7 @@
d.pubspec({
'name': 'myapp',
'dependencies': {
- 'foo': {
- 'hosted': globalServer.url,
- 'version': '^1.0.0',
- },
+ 'foo': {'hosted': globalServer.url, 'version': '^1.0.0'},
},
}),
d.dir('bin', [
@@ -435,10 +411,7 @@
'1.2.3',
deps: {
'a': 'any',
- 'foo': {
- 'hosted': globalServer.url,
- 'version': '^1.0.0',
- },
+ 'foo': {'hosted': globalServer.url, 'version': '^1.0.0'},
},
extras: {
'workspace': ['pkgs/a', 'pkgs/b'],
@@ -464,19 +437,11 @@
d.file('tool.dart', 'main() {print(42);}'),
]),
d.dir('sub', [
- d.libPubspec(
- 'sub',
- '1.0.0',
- resolutionWorkspace: true,
- ),
+ d.libPubspec('sub', '1.0.0', resolutionWorkspace: true),
]),
]),
d.dir('b', [
- d.libPubspec(
- 'b',
- '1.0.0',
- resolutionWorkspace: true,
- ),
+ d.libPubspec('b', '1.0.0', resolutionWorkspace: true),
d.dir('bin', [
d.file('b.dart', 'main() {print(42);}'),
d.file('tool.dart', 'main() {print(42);}'),
@@ -484,9 +449,7 @@
]),
]),
]).create();
- await pubGet(
- environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- );
+ await pubGet(environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'});
await testGetExecutable(
'myapp',
p.join(d.sandbox, appPath, 'pkgs', 'a'),
@@ -517,22 +480,20 @@
'myapp.dart-3.5.0.snapshot',
),
environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- packageConfig:
- p.join('..', '..', '..', '.dart_tool', 'package_config.json'),
+ packageConfig: p.join(
+ '..',
+ '..',
+ '..',
+ '.dart_tool',
+ 'package_config.json',
+ ),
resolution: ResolutionAttempt.fastPath,
);
await testGetExecutable(
'a',
p.join(d.sandbox, appPath, 'pkgs'),
- executable: p.join(
- d.sandbox,
- appPath,
- 'pkgs',
- 'a',
- 'bin',
- 'a.dart',
- ),
+ executable: p.join(d.sandbox, appPath, 'pkgs', 'a', 'bin', 'a.dart'),
allowSnapshot: false,
packageConfig: p.join('..', '.dart_tool', 'package_config.json'),
environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
@@ -542,14 +503,7 @@
'b:tool',
p.join(d.sandbox, appPath),
allowSnapshot: false,
- executable: p.join(
- d.sandbox,
- appPath,
- 'pkgs',
- 'b',
- 'bin',
- 'tool.dart',
- ),
+ executable: p.join(d.sandbox, appPath, 'pkgs', 'b', 'bin', 'tool.dart'),
packageConfig: p.join('.dart_tool', 'package_config.json'),
environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
resolution: ResolutionAttempt.fastPath,
@@ -573,14 +527,7 @@
':tool',
p.join(d.sandbox, appPath, 'pkgs', 'a'),
allowSnapshot: false,
- executable: p.join(
- d.sandbox,
- appPath,
- 'pkgs',
- 'a',
- 'bin',
- 'tool.dart',
- ),
+ executable: p.join(d.sandbox, appPath, 'pkgs', 'a', 'bin', 'tool.dart'),
packageConfig: p.join('..', '..', '.dart_tool', 'package_config.json'),
environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
resolution: ResolutionAttempt.fastPath,
diff --git a/test/error_group_test.dart b/test/error_group_test.dart
index 0cdc87a..bdf7f19 100644
--- a/test/error_group_test.dart
+++ b/test/error_group_test.dart
@@ -27,8 +27,7 @@
errorGroup.signalError(const FormatException());
});
- test(
- "shouldn't allow additional futures or streams once an error has been "
+ test("shouldn't allow additional futures or streams once an error has been "
'signaled', () {
expect(errorGroup.done, throwsFormatException);
errorGroup.signalError(const FormatException());
@@ -38,8 +37,9 @@
throwsStateError,
);
expect(
- () => errorGroup
- .registerStream(StreamController<void>(sync: true).stream),
+ () => errorGroup.registerStream(
+ StreamController<void>(sync: true).stream,
+ ),
throwsStateError,
);
});
@@ -61,43 +61,41 @@
completer.complete('value');
});
- test(
- "shouldn't allow additional futures or streams once .done has "
+ test("shouldn't allow additional futures or streams once .done has "
'been called', () {
completer.complete('value');
expect(
- completer.future
- .then((_) => errorGroup.registerFuture(Future<void>.value())),
+ completer.future.then(
+ (_) => errorGroup.registerFuture(Future<void>.value()),
+ ),
throwsStateError,
);
expect(
completer.future.then(
- (_) => errorGroup
- .registerStream(StreamController<void>(sync: true).stream),
+ (_) => errorGroup.registerStream(
+ StreamController<void>(sync: true).stream,
+ ),
),
throwsStateError,
);
});
- test(
- 'should pass through an exception from the future if it has a '
+ test('should pass through an exception from the future if it has a '
'listener', () {
expect(future, throwsFormatException);
// errorGroup shouldn't top-level the exception
completer.completeError(const FormatException());
});
- test(
- 'should notify the error group of an exception from the future even '
+ test('should notify the error group of an exception from the future even '
'if it has a listener', () {
expect(future, throwsFormatException);
expect(errorGroup.done, throwsFormatException);
completer.completeError(const FormatException());
});
- test(
- 'should pass a signaled exception to the future if it has a listener '
+ test('should pass a signaled exception to the future if it has a listener '
'and should ignore a subsequent value from that future', () {
expect(future, throwsFormatException);
// errorGroup shouldn't top-level the exception
@@ -105,8 +103,7 @@
completer.complete('value');
});
- test(
- 'should pass a signaled exception to the future if it has a listener '
+ test('should pass a signaled exception to the future if it has a listener '
'and should ignore a subsequent exception from that future', () {
expect(future, throwsFormatException);
// errorGroup shouldn't top-level the exception
@@ -114,16 +111,14 @@
completer.completeError(ArgumentError());
});
- test(
- 'should notify the error group of a signaled exception even if the '
+ test('should notify the error group of a signaled exception even if the '
'future has a listener', () {
expect(future, throwsFormatException);
expect(errorGroup.done, throwsFormatException);
errorGroup.signalError(const FormatException());
});
- test(
- 'should complete .done if the future receives a value even if the '
+ test('should complete .done if the future receives a value even if the '
"future doesn't have a listener", () {
expect(errorGroup.done, completes);
completer.complete('value');
@@ -132,8 +127,7 @@
expect(errorGroup.done.then((_) => future), completion(equals('value')));
});
- test(
- 'should pipe an exception from the future to .done if the future '
+ test('should pipe an exception from the future to .done if the future '
"doesn't have a listener", () {
expect(errorGroup.done, throwsFormatException);
completer.completeError(const FormatException());
@@ -147,8 +141,7 @@
);
});
- test(
- "should pass a signaled exception to .done if the future doesn't have "
+ test("should pass a signaled exception to .done if the future doesn't have "
'a listener', () {
expect(errorGroup.done, throwsFormatException);
errorGroup.signalError(const FormatException());
@@ -178,8 +171,7 @@
future2 = errorGroup.registerFuture(completer2.future);
});
- test(
- 'should pipe exceptions from one future to the other and to '
+ test('should pipe exceptions from one future to the other and to '
'.complete', () {
expect(future1, throwsFormatException);
expect(future2, throwsFormatException);
@@ -188,8 +180,7 @@
completer1.completeError(const FormatException());
});
- test(
- 'each future should be able to complete with a value '
+ test('each future should be able to complete with a value '
'independently', () {
expect(future1, completion(equals('value1')));
expect(future2, completion(equals('value2')));
@@ -199,8 +190,7 @@
completer2.complete('value2');
});
- test(
- "shouldn't throw a top-level exception if a future receives an error "
+ test("shouldn't throw a top-level exception if a future receives an error "
'after the other listened future completes', () {
expect(future1, completion(equals('value')));
completer1.complete('value');
@@ -214,8 +204,7 @@
);
});
- test(
- "shouldn't throw a top-level exception if an error is signaled after "
+ test("shouldn't throw a top-level exception if an error is signaled after "
'one listened future completes', () {
expect(future1, completion(equals('value')));
completer1.complete('value');
@@ -259,24 +248,21 @@
..close();
});
- test(
- 'should pass through an error from the stream if it has a '
+ test('should pass through an error from the stream if it has a '
'listener', () {
expect(stream.first, throwsFormatException);
// errorGroup shouldn't top-level the exception
controller.addError(const FormatException());
});
- test(
- 'should notify the error group of an exception from the stream even '
+ test('should notify the error group of an exception from the stream even '
'if it has a listener', () {
expect(stream.first, throwsFormatException);
expect(errorGroup.done, throwsFormatException);
controller.addError(const FormatException());
});
- test(
- 'should pass a signaled exception to the stream if it has a listener '
+ test('should pass a signaled exception to the stream if it has a listener '
'and should unsubscribe that stream', () {
// errorGroup shouldn't top-level the exception
expect(stream.first, throwsFormatException);
@@ -285,16 +271,14 @@
expect(() => controller.add('value'), returnsNormally);
});
- test(
- 'should notify the error group of a signaled exception even if the '
+ test('should notify the error group of a signaled exception even if the '
'stream has a listener', () {
expect(stream.first, throwsFormatException);
expect(errorGroup.done, throwsFormatException);
errorGroup.signalError(const FormatException());
});
- test(
- 'should see one value and complete .done when the stream is done even '
+ test('should see one value and complete .done when the stream is done even '
"if the stream doesn't have a listener", () {
expect(errorGroup.done, completes);
controller.add('value');
@@ -319,8 +303,7 @@
stream = errorGroup.registerStream(controller.stream);
});
- test(
- 'should complete .done when the stream is done even if the stream '
+ test('should complete .done when the stream is done even if the stream '
"doesn't have a listener", () {
expect(errorGroup.done, completes);
controller.add('value');
@@ -333,8 +316,7 @@
);
});
- test(
- 'should pipe an exception from the stream to .done if the stream '
+ test('should pipe an exception from the stream to .done if the stream '
"doesn't have a listener", () {
expect(errorGroup.done, throwsFormatException);
controller.addError(const FormatException());
@@ -349,8 +331,7 @@
);
});
- test(
- "should pass a signaled exception to .done if the stream doesn't "
+ test("should pass a signaled exception to .done if the stream doesn't "
'have a listener', () {
expect(errorGroup.done, throwsFormatException);
errorGroup.signalError(const FormatException());
@@ -380,14 +361,16 @@
stream2 = errorGroup.registerStream(controller2.stream);
});
- test('should pipe exceptions from one stream to the other and to .done',
- () {
- expect(stream1.first, throwsFormatException);
- expect(stream2.first, throwsFormatException);
- expect(errorGroup.done, throwsFormatException);
+ test(
+ 'should pipe exceptions from one stream to the other and to .done',
+ () {
+ expect(stream1.first, throwsFormatException);
+ expect(stream2.first, throwsFormatException);
+ expect(errorGroup.done, throwsFormatException);
- controller1.addError(const FormatException());
- });
+ controller1.addError(const FormatException());
+ },
+ );
test('each future should be able to emit values independently', () {
expect(stream1.toList(), completion(equals(['value1.1', 'value1.2'])));
@@ -404,8 +387,7 @@
..close();
});
- test(
- "shouldn't throw a top-level exception if a stream receives an error "
+ test("shouldn't throw a top-level exception if a stream receives an error "
'after the other listened stream completes', () {
final signal = Completer<void>();
expect(
@@ -426,8 +408,7 @@
);
});
- test(
- "shouldn't throw a top-level exception if an error is signaled after "
+ test("shouldn't throw a top-level exception if an error is signaled after "
'one listened stream completes', () {
final signal = Completer<void>();
expect(
@@ -479,8 +460,7 @@
completer.completeError(const FormatException());
});
- test(
- 'the stream and the future should be able to complete/emit values '
+ test('the stream and the future should be able to complete/emit values '
'independently', () {
expect(stream.toList(), completion(equals(['value1.1', 'value1.2'])));
expect(future, completion(equals('value2.0')));
@@ -494,22 +474,23 @@
});
test(
- "shouldn't throw a top-level exception if the stream receives an error "
- 'after the listened future completes', () {
- expect(future, completion(equals('value')));
- completer.complete('value');
+ "shouldn't throw a top-level exception if the stream receives an error "
+ 'after the listened future completes',
+ () {
+ expect(future, completion(equals('value')));
+ completer.complete('value');
- expect(
- future.then((_) {
- // shouldn't cause a top-level exception
- controller.addError(const FormatException());
- }),
- completes,
- );
- });
+ expect(
+ future.then((_) {
+ // shouldn't cause a top-level exception
+ controller.addError(const FormatException());
+ }),
+ completes,
+ );
+ },
+ );
- test(
- "shouldn't throw a top-level exception if the future receives an "
+ test("shouldn't throw a top-level exception if the future receives an "
'error after the listened stream completes', () {
final signal = Completer<void>();
expect(
diff --git a/test/get/dry_run_does_not_apply_changes_test.dart b/test/get/dry_run_does_not_apply_changes_test.dart
index 1571e8b..1b47ef6 100644
--- a/test/get/dry_run_does_not_apply_changes_test.dart
+++ b/test/get/dry_run_does_not_apply_changes_test.dart
@@ -16,9 +16,10 @@
await pubGet(
args: ['--dry-run'],
- output: allOf(
- [contains('+ foo 1.0.0'), contains('Would change 1 dependency.')],
- ),
+ output: allOf([
+ contains('+ foo 1.0.0'),
+ contains('Would change 1 dependency.'),
+ ]),
);
await d.dir(appPath, [
diff --git a/test/get/enforce_lockfile_test.dart b/test/get/enforce_lockfile_test.dart
index e15f0c4..dac8ede 100644
--- a/test/get/enforce_lockfile_test.dart
+++ b/test/get/enforce_lockfile_test.dart
@@ -12,23 +12,26 @@
import '../test_pub.dart';
Future<void> main() async {
- test('Recreates .dart_tool/package_config.json, redownloads archives',
- () async {
- final server = await servePackages();
- server.serve('foo', '1.0.0');
- await appDir(dependencies: {'foo': 'any'}).create();
- await pubGet();
- final packageConfig =
- File(path(p.join(appPath, '.dart_tool', 'package_config.json')));
- packageConfig.deleteSync();
- await runPub(args: ['cache', 'clean', '-f']);
- await pubGet(args: ['--enforce-lockfile']);
- expect(packageConfig.existsSync(), isTrue);
- await cacheDir({'foo': '1.0.0'}).validate();
- await appPackageConfigFile([
- packageConfigEntry(name: 'foo', version: '1.0.0'),
- ]).validate();
- });
+ test(
+ 'Recreates .dart_tool/package_config.json, redownloads archives',
+ () async {
+ final server = await servePackages();
+ server.serve('foo', '1.0.0');
+ await appDir(dependencies: {'foo': 'any'}).create();
+ await pubGet();
+ final packageConfig = File(
+ path(p.join(appPath, '.dart_tool', 'package_config.json')),
+ );
+ packageConfig.deleteSync();
+ await runPub(args: ['cache', 'clean', '-f']);
+ await pubGet(args: ['--enforce-lockfile']);
+ expect(packageConfig.existsSync(), isTrue);
+ await cacheDir({'foo': '1.0.0'}).validate();
+ await appPackageConfigFile([
+ packageConfigEntry(name: 'foo', version: '1.0.0'),
+ ]).validate();
+ },
+ );
test('Refuses to get if no lockfile exists', () async {
await appDir(dependencies: {}).create();
@@ -73,8 +76,9 @@
);
// Deleting the version-listing cache will cause it to be refetched, and the
// error will happen.
- File(p.join(globalServer.cachingPath, '.cache', 'bar-versions.json'))
- .deleteSync();
+ File(
+ p.join(globalServer.cachingPath, '.cache', 'bar-versions.json'),
+ ).deleteSync();
final example = p.join('.', 'example');
final examplePubspec = p.join('example', 'pubspec.yaml');
@@ -91,9 +95,11 @@
'Unable to satisfy `$examplePubspec` '
'using `$examplePubspecLock` in `$example`.',
),
- contains('To update `$examplePubspecLock` run '
- '`dart pub get` in `$example` without\n'
- '`--enforce-lockfile`.'),
+ contains(
+ 'To update `$examplePubspecLock` run '
+ '`dart pub get` in `$example` without\n'
+ '`--enforce-lockfile`.',
+ ),
),
exitCode: DATA,
);
@@ -117,68 +123,72 @@
);
});
- test('Refuses to get if package is locked to version not matching constraint',
- () async {
- final server = await servePackages();
- server.serve('foo', '1.0.0');
- server.serve('foo', '2.0.0');
- await appDir(dependencies: {'foo': '^1.0.0'}).create();
- await pubGet();
- await appDir(dependencies: {'foo': '^2.0.0'}).create();
- await pubGet(
- args: ['--enforce-lockfile'],
- output: allOf([
- contains('> foo 2.0.0 (was 1.0.0)'),
- contains('Would have changed 1 dependency.'),
- ]),
- error: contains('Unable to satisfy `pubspec.yaml` using `pubspec.lock`.'),
- exitCode: DATA,
- );
- });
-
- test("Refuses to get if hash on server doesn't correspond to lockfile",
- () async {
- final server = await servePackages();
- server.serveContentHashes = true;
- server.serve('foo', '1.0.0');
- await appDir(dependencies: {'foo': '^1.0.0'}).create();
- await pubGet();
- server.serve(
- 'foo',
- '1.0.0',
- contents: [
- file('README.md', 'Including this will change the content-hash.'),
- ],
- );
- // Deleting the version-listing cache will cause it to be refetched, and the
- // error will happen.
- File(p.join(globalServer.cachingPath, '.cache', 'foo-versions.json'))
- .deleteSync();
- await pubGet(
- args: ['--enforce-lockfile'],
- output: allOf(
- contains('~ foo 1.0.0 (was 1.0.0)'),
- contains('Would have changed 1 dependency.'),
- ),
- error: allOf(
- contains('Cached version of foo-1.0.0 has wrong hash - redownloading.'),
- contains(
- 'The existing content-hash from pubspec.lock '
- 'doesn\'t match contents for:',
- ),
- contains(
- ' * foo-1.0.0 from "${server.url}"',
- ),
- contains(
+ test(
+ 'Refuses to get if package is locked to version not matching constraint',
+ () async {
+ final server = await servePackages();
+ server.serve('foo', '1.0.0');
+ server.serve('foo', '2.0.0');
+ await appDir(dependencies: {'foo': '^1.0.0'}).create();
+ await pubGet();
+ await appDir(dependencies: {'foo': '^2.0.0'}).create();
+ await pubGet(
+ args: ['--enforce-lockfile'],
+ output: allOf([
+ contains('> foo 2.0.0 (was 1.0.0)'),
+ contains('Would have changed 1 dependency.'),
+ ]),
+ error: contains(
'Unable to satisfy `pubspec.yaml` using `pubspec.lock`.',
),
- ),
- exitCode: DATA,
- );
- });
+ exitCode: DATA,
+ );
+ },
+ );
test(
- 'Refuses to get if archive on legacy server '
+ "Refuses to get if hash on server doesn't correspond to lockfile",
+ () async {
+ final server = await servePackages();
+ server.serveContentHashes = true;
+ server.serve('foo', '1.0.0');
+ await appDir(dependencies: {'foo': '^1.0.0'}).create();
+ await pubGet();
+ server.serve(
+ 'foo',
+ '1.0.0',
+ contents: [
+ file('README.md', 'Including this will change the content-hash.'),
+ ],
+ );
+ // Deleting the version-listing cache will cause it to be refetched, and
+ // the error will happen.
+ File(
+ p.join(globalServer.cachingPath, '.cache', 'foo-versions.json'),
+ ).deleteSync();
+ await pubGet(
+ args: ['--enforce-lockfile'],
+ output: allOf(
+ contains('~ foo 1.0.0 (was 1.0.0)'),
+ contains('Would have changed 1 dependency.'),
+ ),
+ error: allOf(
+ contains(
+ 'Cached version of foo-1.0.0 has wrong hash - redownloading.',
+ ),
+ contains(
+ 'The existing content-hash from pubspec.lock '
+ 'doesn\'t match contents for:',
+ ),
+ contains(' * foo-1.0.0 from "${server.url}"'),
+ contains('Unable to satisfy `pubspec.yaml` using `pubspec.lock`.'),
+ ),
+ exitCode: DATA,
+ );
+ },
+ );
+
+ test('Refuses to get if archive on legacy server '
'doesn\'t have hash corresponding to lockfile', () async {
final server = await servePackages();
server.serveContentHashes = false;
diff --git a/test/get/flutter_constraint_upper_bound_ignored_test.dart b/test/get/flutter_constraint_upper_bound_ignored_test.dart
index c5b733d..3491746 100644
--- a/test/get/flutter_constraint_upper_bound_ignored_test.dart
+++ b/test/get/flutter_constraint_upper_bound_ignored_test.dart
@@ -10,8 +10,9 @@
void main() {
test('pub get succeeds despite of "invalid" flutter upper bound', () async {
- final fakeFlutterRoot =
- d.dir('fake_flutter_root', [d.flutterVersion('1.23.0')]);
+ final fakeFlutterRoot = d.dir('fake_flutter_root', [
+ d.flutterVersion('1.23.0'),
+ ]);
await fakeFlutterRoot.create();
await d.dir(appPath, [
d.pubspec({
diff --git a/test/get/gets_in_example_folder_test.dart b/test/get/gets_in_example_folder_test.dart
index 8e990da..c65f688 100644
--- a/test/get/gets_in_example_folder_test.dart
+++ b/test/get/gets_in_example_folder_test.dart
@@ -17,50 +17,52 @@
void main() {
forBothPubGetAndUpgrade((command) {
test(
- 'pub ${command.name} --example also retrieves dependencies in example/',
- () async {
- await d.dir(appPath, [
- d.appPubspec(),
- d.dir('example', [
- d.pubspec({
- 'name': 'app_example',
- 'dependencies': {
- 'myapp': {'path': '..'},
- },
- }),
- ]),
- ]).create();
+ 'pub ${command.name} --example also retrieves dependencies in example/',
+ () async {
+ await d.dir(appPath, [
+ d.appPubspec(),
+ d.dir('example', [
+ d.pubspec({
+ 'name': 'app_example',
+ 'dependencies': {
+ 'myapp': {'path': '..'},
+ },
+ }),
+ ]),
+ ]).create();
- await pubCommand(command, args: ['--no-example']);
- final lockFile = File(p.join(d.sandbox, appPath, 'pubspec.lock'));
- final exampleLockFile = File(
- p.join(d.sandbox, appPath, 'example', 'pubspec.lock'),
- );
+ await pubCommand(command, args: ['--no-example']);
+ final lockFile = File(p.join(d.sandbox, appPath, 'pubspec.lock'));
+ final exampleLockFile = File(
+ p.join(d.sandbox, appPath, 'example', 'pubspec.lock'),
+ );
- expect(lockFile.existsSync(), true);
- expect(exampleLockFile.existsSync(), false);
- await pubCommand(
- command,
- args: ['--example'],
- output: command.name == 'get'
- ? '''
+ expect(lockFile.existsSync(), true);
+ expect(exampleLockFile.existsSync(), false);
+ await pubCommand(
+ command,
+ args: ['--example'],
+ output:
+ command.name == 'get'
+ ? '''
Resolving dependencies...
Downloading packages...
Got dependencies!
Resolving dependencies in `$dotExample`...
Downloading packages...
Got dependencies in `$dotExample`.'''
- : '''
+ : '''
Resolving dependencies...
Downloading packages...
No dependencies changed.
Resolving dependencies in `$dotExample`...
Downloading packages...
Got dependencies in `$dotExample`.''',
- );
- expect(lockFile.existsSync(), true);
- expect(exampleLockFile.existsSync(), true);
- });
+ );
+ expect(lockFile.existsSync(), true);
+ expect(exampleLockFile.existsSync(), true);
+ },
+ );
test('Failures are not summarized', () async {
await d.dir(appPath, [
diff --git a/test/get/git/check_out_and_upgrade_test.dart b/test/get/git/check_out_and_upgrade_test.dart
index 32a320b..2bf0743 100644
--- a/test/get/git/check_out_and_upgrade_test.dart
+++ b/test/get/git/check_out_and_upgrade_test.dart
@@ -11,16 +11,18 @@
test('checks out and upgrades a package from Git', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .create();
await pubGet();
@@ -33,19 +35,17 @@
await d.dir(cachePath, [
d.dir('git', [
- d.dir('cache', [
- d.gitPackageRepoCacheDir('foo'),
- ]),
+ d.dir('cache', [d.gitPackageRepoCacheDir('foo')]),
d.gitPackageRevisionCacheDir('foo'),
]),
]).validate();
final originalFooSpec = packageSpec('foo');
- await d.git(
- 'foo.git',
- [d.libDir('foo', 'foo 2'), d.libPubspec('foo', '1.0.0')],
- ).commit();
+ await d.git('foo.git', [
+ d.libDir('foo', 'foo 2'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).commit();
await pubUpgrade(output: contains('Changed 1 dependency!'));
diff --git a/test/get/git/check_out_branch_test.dart b/test/get/git/check_out_branch_test.dart
index b98ac8e..f07140d 100644
--- a/test/get/git/check_out_branch_test.dart
+++ b/test/get/git/check_out_branch_test.dart
@@ -11,33 +11,33 @@
test('checks out a package at a specific branch from Git', () async {
ensureGit();
- final repo = d.git(
- 'foo.git',
- [d.libDir('foo', 'foo 1'), d.libPubspec('foo', '1.0.0')],
- );
+ final repo = d.git('foo.git', [
+ d.libDir('foo', 'foo 1'),
+ d.libPubspec('foo', '1.0.0'),
+ ]);
await repo.create();
await repo.runGit(['branch', 'old']);
- await d.git(
- 'foo.git',
- [d.libDir('foo', 'foo 2'), d.libPubspec('foo', '1.0.0')],
- ).commit();
+ await d.git('foo.git', [
+ d.libDir('foo', 'foo 2'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).commit();
- await d.appDir(
- dependencies: {
- 'foo': {
- 'git': {'url': '../foo.git', 'ref': 'old'},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'git': {'url': '../foo.git', 'ref': 'old'},
+ },
+ },
+ )
+ .create();
await pubGet();
await d.dir(cachePath, [
d.dir('git', [
- d.dir('cache', [
- d.gitPackageRepoCacheDir('foo'),
- ]),
+ d.dir('cache', [d.gitPackageRepoCacheDir('foo')]),
d.gitPackageRevisionCacheDir('foo', modifier: 1),
]),
]).validate();
diff --git a/test/get/git/check_out_revision_test.dart b/test/get/git/check_out_revision_test.dart
index 591f257..0472524 100644
--- a/test/get/git/check_out_revision_test.dart
+++ b/test/get/git/check_out_revision_test.dart
@@ -11,33 +11,33 @@
test('checks out a package at a specific revision from Git', () async {
ensureGit();
- final repo = d.git(
- 'foo.git',
- [d.libDir('foo', 'foo 1'), d.libPubspec('foo', '1.0.0')],
- );
+ final repo = d.git('foo.git', [
+ d.libDir('foo', 'foo 1'),
+ d.libPubspec('foo', '1.0.0'),
+ ]);
await repo.create();
final commit = await repo.revParse('HEAD');
- await d.git(
- 'foo.git',
- [d.libDir('foo', 'foo 2'), d.libPubspec('foo', '1.0.0')],
- ).commit();
+ await d.git('foo.git', [
+ d.libDir('foo', 'foo 2'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).commit();
- await d.appDir(
- dependencies: {
- 'foo': {
- 'git': {'url': '../foo.git', 'ref': commit},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'git': {'url': '../foo.git', 'ref': commit},
+ },
+ },
+ )
+ .create();
await pubGet();
await d.dir(cachePath, [
d.dir('git', [
- d.dir('cache', [
- d.gitPackageRepoCacheDir('foo'),
- ]),
+ d.dir('cache', [d.gitPackageRepoCacheDir('foo')]),
d.gitPackageRevisionCacheDir('foo', modifier: 1),
]),
]).validate();
diff --git a/test/get/git/check_out_test.dart b/test/get/git/check_out_test.dart
index 0a79b4d..0478a3a 100644
--- a/test/get/git/check_out_test.dart
+++ b/test/get/git/check_out_test.dart
@@ -18,16 +18,18 @@
test('checks out a package from Git', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .create();
await pubGet();
@@ -37,7 +39,8 @@
expect(
dig<String>(lockfile, ['packages', 'foo', 'description', 'url']),
'../foo.git',
- reason: 'The relative path should be preserved, '
+ reason:
+ 'The relative path should be preserved, '
'and be a url (forward slashes on all platforms)',
);
@@ -54,22 +57,20 @@
test('checks out a package from Git with relative pub cache', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .create();
- await pubGet(
- environment: {
- 'PUB_CACHE': './pub_cache/',
- },
- );
+ await pubGet(environment: {'PUB_CACHE': './pub_cache/'});
await d.dir(appPath, [
d.dir('pub_cache', [
@@ -95,39 +96,44 @@
'''),
]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .create();
await pubGet();
});
- test(
- 'checks out a package from Git with a name that is not a valid '
+ test('checks out a package from Git with a name that is not a valid '
'file name in the url', () async {
ensureGit();
- final descriptor =
- d.git('foo.git', [d.libDir('foo'), d.libPubspec('foo', '1.0.0')]);
+ final descriptor = d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]);
await descriptor.create();
- await runProcess(
- 'git',
- ['update-server-info'],
- workingDir: descriptor.io.path,
- );
+ await runProcess('git', [
+ 'update-server-info',
+ ], workingDir: descriptor.io.path);
const funkyName = '@:+*foo';
- final server =
- await _serveDirectory(p.join(descriptor.io.path, '.git'), funkyName);
+ final server = await _serveDirectory(
+ p.join(descriptor.io.path, '.git'),
+ funkyName,
+ );
- await d.appDir(
- dependencies: {
- 'foo': {'git': 'http://localhost:${server.url.port}/$funkyName'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': 'http://localhost:${server.url.port}/$funkyName'},
+ },
+ )
+ .create();
await pubGet();
diff --git a/test/get/git/check_out_transitive_test.dart b/test/get/git/check_out_transitive_test.dart
index 339416d..b5495cf 100644
--- a/test/get/git/check_out_transitive_test.dart
+++ b/test/get/git/check_out_transitive_test.dart
@@ -24,36 +24,41 @@
'1.0.0',
deps: {
'bar': {
- 'git': p
- .toUri(p.absolute(d.sandbox, appPath, '../bar.git'))
- .toString(),
+ 'git':
+ p
+ .toUri(p.absolute(d.sandbox, appPath, '../bar.git'))
+ .toString(),
},
},
),
]).create();
- await d.git(
- 'bar.git',
- [d.libDir('bar'), d.libPubspec('bar', '1.0.0')],
- ).create();
+ await d.git('bar.git', [
+ d.libDir('bar'),
+ d.libPubspec('bar', '1.0.0'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {
- 'git':
- p.toUri(p.absolute(d.sandbox, appPath, '../foo.git')).toString(),
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'git':
+ p
+ .toUri(p.absolute(d.sandbox, appPath, '../foo.git'))
+ .toString(),
+ },
+ },
+ )
+ .create();
await pubGet();
await d.dir(cachePath, [
d.dir('git', [
- d.dir(
- 'cache',
- [d.gitPackageRepoCacheDir('foo'), d.gitPackageRepoCacheDir('bar')],
- ),
+ d.dir('cache', [
+ d.gitPackageRepoCacheDir('foo'),
+ d.gitPackageRepoCacheDir('bar'),
+ ]),
d.gitPackageRevisionCacheDir('foo'),
d.gitPackageRevisionCacheDir('bar'),
]),
@@ -77,19 +82,23 @@
),
]).create();
- await d.git(
- 'bar.git',
- [d.libDir('bar'), d.libPubspec('bar', '1.0.0')],
- ).create();
+ await d.git('bar.git', [
+ d.libDir('bar'),
+ d.libPubspec('bar', '1.0.0'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {
- 'git':
- p.toUri(p.absolute(d.sandbox, appPath, '../foo.git')).toString(),
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'git':
+ p
+ .toUri(p.absolute(d.sandbox, appPath, '../foo.git'))
+ .toString(),
+ },
+ },
+ )
+ .create();
await pubGet(
error: contains(
@@ -117,18 +126,21 @@
]),
]).create();
- await d.appDir(
- dependencies: {
- 'foo': {
- 'git': {
- 'url': p
- .toUri(p.absolute(d.sandbox, appPath, '../foo.git'))
- .toString(),
- 'path': 'pkgs/foo',
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'git': {
+ 'url':
+ p
+ .toUri(p.absolute(d.sandbox, appPath, '../foo.git'))
+ .toString(),
+ 'path': 'pkgs/foo',
+ },
+ },
},
- },
- },
- ).create();
+ )
+ .create();
await pubGet();
final lockFile = loadYaml(
@@ -141,8 +153,7 @@
);
});
- test(
- 'can have relative path dependencies '
+ test('can have relative path dependencies '
'to the repo root dir transitively from Git', () async {
ensureGit();
@@ -159,24 +170,26 @@
d.libPubspec('bar', '1.0.0'),
]).create();
- await d.appDir(
- dependencies: {
- 'foo': {
- 'git': {
- 'url': p
- .toUri(p.absolute(d.sandbox, appPath, '../foo.git'))
- .toString(),
- 'path': 'foo/',
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'git': {
+ 'url':
+ p
+ .toUri(p.absolute(d.sandbox, appPath, '../foo.git'))
+ .toString(),
+ 'path': 'foo/',
+ },
+ },
},
- },
- },
- ).create();
+ )
+ .create();
await pubGet();
});
- test(
- 'cannot have relative path dependencies transitively from Git '
+ test('cannot have relative path dependencies transitively from Git '
'to outside the repo', () async {
ensureGit();
@@ -192,14 +205,18 @@
await d.dir('bar', [d.libPubspec('bar', '1.0.0')]).create();
- await d.appDir(
- dependencies: {
- 'foo': {
- 'git':
- p.toUri(p.absolute(d.sandbox, appPath, '../foo.git')).toString(),
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'git':
+ p
+ .toUri(p.absolute(d.sandbox, appPath, '../foo.git'))
+ .toString(),
+ },
+ },
+ )
+ .create();
await pubGet(
exitCode: DATA,
diff --git a/test/get/git/check_out_twice_test.dart b/test/get/git/check_out_twice_test.dart
index 7b1a379..27a02cc 100644
--- a/test/get/git/check_out_twice_test.dart
+++ b/test/get/git/check_out_twice_test.dart
@@ -11,16 +11,18 @@
test('checks out a package from Git twice', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .create();
await pubGet();
diff --git a/test/get/git/check_out_unfetched_revision_of_cached_repo_test.dart b/test/get/git/check_out_unfetched_revision_of_cached_repo_test.dart
index 30bc0fd..db9f5d9 100644
--- a/test/get/git/check_out_unfetched_revision_of_cached_repo_test.dart
+++ b/test/get/git/check_out_unfetched_revision_of_cached_repo_test.dart
@@ -11,24 +11,25 @@
void main() {
// Regression test for issue 20947.
- test(
- 'checks out an unfetched and locked revision of a cached '
+ test('checks out an unfetched and locked revision of a cached '
'repository', () async {
ensureGit();
// In order to get a lockfile that refers to a newer revision than is in the
// cache, we'll switch between two caches. First we ensure that the repo is
// in the first cache.
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .create();
await pubGet();
@@ -38,10 +39,10 @@
renameInSandbox(cachePath, '$cachePath.old');
// Make the lockfile point to a new revision of the git repository.
- await d.git(
- 'foo.git',
- [d.libDir('foo', 'foo 2'), d.libPubspec('foo', '1.0.0')],
- ).commit();
+ await d.git('foo.git', [
+ d.libDir('foo', 'foo 2'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).commit();
await pubUpgrade(output: contains('Changed 1 dependency!'));
diff --git a/test/get/git/check_out_with_trailing_slash_test.dart b/test/get/git/check_out_with_trailing_slash_test.dart
index 3828cf5..da3f601 100644
--- a/test/get/git/check_out_with_trailing_slash_test.dart
+++ b/test/get/git/check_out_with_trailing_slash_test.dart
@@ -12,16 +12,18 @@
test('checks out a package from Git with a trailing slash', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git/'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git/'},
+ },
+ )
+ .create();
await pubGet();
@@ -34,9 +36,7 @@
await d.dir(cachePath, [
d.dir('git', [
- d.dir('cache', [
- d.gitPackageRepoCacheDir('foo'),
- ]),
+ d.dir('cache', [d.gitPackageRepoCacheDir('foo')]),
d.gitPackageRevisionCacheDir('foo'),
]),
]).validate();
diff --git a/test/get/git/clean_invalid_git_repo_cache_test.dart b/test/get/git/clean_invalid_git_repo_cache_test.dart
index c69c5eb..d1f69dc 100644
--- a/test/get/git/clean_invalid_git_repo_cache_test.dart
+++ b/test/get/git/clean_invalid_git_repo_cache_test.dart
@@ -14,10 +14,12 @@
/// empty-directory.
void _invalidateGitCache(String repo) {
final cacheDir = p.join(d.sandbox, p.joinAll([cachePath, 'git', 'cache']));
- final fooCacheDir = Directory(cacheDir).listSync().firstWhere((entity) {
- return entity is Directory &&
- entity.path.split(Platform.pathSeparator).last.startsWith(repo);
- }) as Directory;
+ final fooCacheDir =
+ Directory(cacheDir).listSync().firstWhere((entity) {
+ return entity is Directory &&
+ entity.path.split(Platform.pathSeparator).last.startsWith(repo);
+ })
+ as Directory;
fooCacheDir.deleteSync(recursive: true);
fooCacheDir.createSync();
@@ -27,16 +29,18 @@
test('Clean-up invalid git repo cache', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .create();
await pubGet();
@@ -55,18 +59,22 @@
test('Clean-up invalid git repo cache at a specific branch', () async {
ensureGit();
- final repo =
- d.git('foo.git', [d.libDir('foo'), d.libPubspec('foo', '1.0.0')]);
+ final repo = d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]);
await repo.create();
await repo.runGit(['branch', 'old']);
- await d.appDir(
- dependencies: {
- 'foo': {
- 'git': {'url': '../foo.git', 'ref': 'old'},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'git': {'url': '../foo.git', 'ref': 'old'},
+ },
+ },
+ )
+ .create();
await pubGet();
@@ -85,18 +93,22 @@
test('Clean-up invalid git repo cache at a specific commit', () async {
ensureGit();
- final repo =
- d.git('foo.git', [d.libDir('foo'), d.libPubspec('foo', '1.0.0')]);
+ final repo = d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]);
await repo.create();
final commit = await repo.revParse('HEAD');
- await d.appDir(
- dependencies: {
- 'foo': {
- 'git': {'url': '../foo.git', 'ref': commit},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'git': {'url': '../foo.git', 'ref': commit},
+ },
+ },
+ )
+ .create();
await pubGet();
diff --git a/test/get/git/dependency_name_match_pubspec_test.dart b/test/get/git/dependency_name_match_pubspec_test.dart
index 2dcf169..30aed72 100644
--- a/test/get/git/dependency_name_match_pubspec_test.dart
+++ b/test/get/git/dependency_name_match_pubspec_test.dart
@@ -9,15 +9,14 @@
import '../../test_pub.dart';
void main() {
- test(
- 'requires the dependency name to match the remote pubspec '
+ test('requires the dependency name to match the remote pubspec '
'name', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
await d.dir(appPath, [
d.appPubspec(
@@ -28,8 +27,10 @@
]).create();
await pubGet(
- error: contains('"name" field doesn\'t match expected name '
- '"weirdname".'),
+ error: contains(
+ '"name" field doesn\'t match expected name '
+ '"weirdname".',
+ ),
exitCode: exit_codes.DATA,
);
});
diff --git a/test/get/git/different_repo_name_test.dart b/test/get/git/different_repo_name_test.dart
index 4398274..def0a18 100644
--- a/test/get/git/different_repo_name_test.dart
+++ b/test/get/git/different_repo_name_test.dart
@@ -8,15 +8,14 @@
import '../../test_pub.dart';
void main() {
- test(
- 'doesn\'t require the repository name to match the name in the '
+ test('doesn\'t require the repository name to match the name in the '
'pubspec', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('weirdname'), d.libPubspec('weirdname', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('weirdname'),
+ d.libPubspec('weirdname', '1.0.0'),
+ ]).create();
await d.dir(appPath, [
d.appPubspec(
diff --git a/test/get/git/doesnt_fetch_if_nothing_changes_test.dart b/test/get/git/doesnt_fetch_if_nothing_changes_test.dart
index a258027..e608ee5 100644
--- a/test/get/git/doesnt_fetch_if_nothing_changes_test.dart
+++ b/test/get/git/doesnt_fetch_if_nothing_changes_test.dart
@@ -13,18 +13,20 @@
test("doesn't re-fetch a repository if nothing changes", () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {
- 'git': {'url': '../foo.git'},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'git': {'url': '../foo.git'},
+ },
+ },
+ )
+ .create();
await pubGet();
diff --git a/test/get/git/get_test.dart b/test/get/git/get_test.dart
index bb11441..10dce4b 100644
--- a/test/get/git/get_test.dart
+++ b/test/get/git/get_test.dart
@@ -12,40 +12,45 @@
test('Gives nice error message when git ref is bad', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {
- 'git': {'url': '../foo.git', 'ref': '^BAD_REF'},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'git': {'url': '../foo.git', 'ref': '^BAD_REF'},
+ },
+ },
+ )
+ .create();
await pubGet(
- error:
- contains("Because myapp depends on foo from git which doesn't exist "
- "(Could not find git ref '^BAD_REF' (fatal: "),
+ error: contains(
+ "Because myapp depends on foo from git which doesn't exist "
+ "(Could not find git ref '^BAD_REF' (fatal: ",
+ ),
exitCode: UNAVAILABLE,
);
});
test('works with safe.bareRepository=explicit', () async {
// https://git-scm.com/docs/git-config#Documentation/git-config.txt-safebareRepository
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
- await d.appDir(
- dependencies: {
- 'foo': {
- 'git': {'url': '../foo.git'},
- },
- },
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'git': {'url': '../foo.git'},
+ },
+ },
+ )
+ .create();
final gitConfigDir = d.dir('gitconfig');
await gitConfigDir.create();
await pubGet(
@@ -56,22 +61,23 @@
'GIT_CONFIG_VALUE_0': 'explicit',
},
);
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '2.0.0')],
- ).commit();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '2.0.0'),
+ ]).commit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '2.0.0')],
- ).runGit(['tag', '2.0.0']);
- await d.appDir(
- dependencies: {
- 'foo': {
- 'git': {'url': '../foo.git', 'ref': '2.0.0'},
- },
- },
- ).create();
+ await d
+ .git('foo.git', [d.libDir('foo'), d.libPubspec('foo', '2.0.0')])
+ .runGit(['tag', '2.0.0']);
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'git': {'url': '../foo.git', 'ref': '2.0.0'},
+ },
+ },
+ )
+ .create();
await pubGet(
environment: {
diff --git a/test/get/git/git_not_installed_test.dart b/test/get/git/git_not_installed_test.dart
index 479f6eb..956c74a 100644
--- a/test/get/git/git_not_installed_test.dart
+++ b/test/get/git/git_not_installed_test.dart
@@ -24,11 +24,13 @@
echo "not git"
''',
);
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .create();
await pubGet(
environment: extendedPathEnv(),
@@ -61,16 +63,19 @@
await d.git('foo.git', [d.libPubspec('foo', '1.0.0')]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .create();
await pubGet(
environment: extendedPathEnv(),
- warning:
- contains('You have a very old version of git (version 2.13.1.616)'),
+ warning: contains(
+ 'You have a very old version of git (version 2.13.1.616)',
+ ),
exitCode: 0,
);
});
diff --git a/test/get/git/lock_version_test.dart b/test/get/git/lock_version_test.dart
index 3fa063e..ccb29ff 100644
--- a/test/get/git/lock_version_test.dart
+++ b/test/get/git/lock_version_test.dart
@@ -13,25 +13,25 @@
test('keeps a Git package locked to the version in the lockfile', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .create();
// This get should lock the foo.git dependency to the current revision.
await pubGet();
await d.dir(cachePath, [
d.dir('git', [
- d.dir('cache', [
- d.gitPackageRepoCacheDir('foo'),
- ]),
+ d.dir('cache', [d.gitPackageRepoCacheDir('foo')]),
d.gitPackageRevisionCacheDir('foo'),
]),
]).validate();
@@ -41,10 +41,10 @@
// Delete the package spec to simulate a new checkout of the application.
deleteEntry(p.join(d.sandbox, packageConfigFilePath));
- await d.git(
- 'foo.git',
- [d.libDir('foo', 'foo 2'), d.libPubspec('foo', '1.0.0')],
- ).commit();
+ await d.git('foo.git', [
+ d.libDir('foo', 'foo 2'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).commit();
// This get shouldn't upgrade the foo.git dependency due to the lockfile.
await pubGet();
diff --git a/test/get/git/locked_revision_without_repo_test.dart b/test/get/git/locked_revision_without_repo_test.dart
index b6b2503..c3062bd 100644
--- a/test/get/git/locked_revision_without_repo_test.dart
+++ b/test/get/git/locked_revision_without_repo_test.dart
@@ -15,25 +15,25 @@
test('checks out the repository for a locked revision', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .create();
// This get should lock the foo.git dependency to the current revision.
await pubGet();
await d.dir(cachePath, [
d.dir('git', [
- d.dir('cache', [
- d.gitPackageRepoCacheDir('foo'),
- ]),
+ d.dir('cache', [d.gitPackageRepoCacheDir('foo')]),
d.gitPackageRevisionCacheDir('foo'),
]),
]).validate();
@@ -45,10 +45,10 @@
deleteEntry(p.join(d.sandbox, packageConfigFilePath));
deleteEntry(p.join(d.sandbox, cachePath));
- await d.git(
- 'foo.git',
- [d.libDir('foo', 'foo 2'), d.libPubspec('foo', '1.0.0')],
- ).commit();
+ await d.git('foo.git', [
+ d.libDir('foo', 'foo 2'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).commit();
// This get shouldn't upgrade the foo.git dependency due to the lockfile.
await pubGet();
diff --git a/test/get/git/path_test.dart b/test/get/git/path_test.dart
index 41c8b0c..241befe 100644
--- a/test/get/git/path_test.dart
+++ b/test/get/git/path_test.dart
@@ -22,13 +22,15 @@
]);
await repo.create();
- await d.appDir(
- dependencies: {
- 'sub': {
- 'git': {'url': '../foo.git', 'path': 'subdir'},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'sub': {
+ 'git': {'url': '../foo.git', 'path': 'subdir'},
+ },
+ },
+ )
+ .create();
await pubGet();
@@ -59,13 +61,15 @@
]);
await repo.create();
- await d.appDir(
- dependencies: {
- 'sub': {
- 'git': {'url': '../foo.git', 'path': 'sub/dir%25'},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'sub': {
+ 'git': {'url': '../foo.git', 'path': 'sub/dir%25'},
+ },
+ },
+ )
+ .create();
await pubGet();
@@ -102,13 +106,15 @@
group('requires path to be absolute', () {
test('absolute path', () async {
- await d.appDir(
- dependencies: {
- 'sub': {
- 'git': {'url': '../foo.git', 'path': '/subdir'},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'sub': {
+ 'git': {'url': '../foo.git', 'path': '/subdir'},
+ },
+ },
+ )
+ .create();
await pubGet(
error: contains(
@@ -119,13 +125,15 @@
);
});
test('scheme', () async {
- await d.appDir(
- dependencies: {
- 'sub': {
- 'git': {'url': '../foo.git', 'path': 'https://subdir'},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'sub': {
+ 'git': {'url': '../foo.git', 'path': 'https://subdir'},
+ },
+ },
+ )
+ .create();
await pubGet(
error: contains(
@@ -136,13 +144,15 @@
);
});
test('fragment', () async {
- await d.appDir(
- dependencies: {
- 'sub': {
- 'git': {'url': '../foo.git', 'path': 'subdir/dir#fragment'},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'sub': {
+ 'git': {'url': '../foo.git', 'path': 'subdir/dir#fragment'},
+ },
+ },
+ )
+ .create();
await pubGet(
error: contains(
@@ -154,13 +164,15 @@
});
test('query', () async {
- await d.appDir(
- dependencies: {
- 'sub': {
- 'git': {'url': '../foo.git', 'path': 'subdir/dir?query'},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'sub': {
+ 'git': {'url': '../foo.git', 'path': 'subdir/dir?query'},
+ },
+ },
+ )
+ .create();
await pubGet(
error: contains(
@@ -172,16 +184,18 @@
});
test('authority', () async {
- await d.appDir(
- dependencies: {
- 'sub': {
- 'git': {
- 'url': '../foo.git',
- 'path': 'bob:pwd@somewhere.example.com/subdir',
+ await d
+ .appDir(
+ dependencies: {
+ 'sub': {
+ 'git': {
+ 'url': '../foo.git',
+ 'path': 'bob:pwd@somewhere.example.com/subdir',
+ },
+ },
},
- },
- },
- ).create();
+ )
+ .create();
await pubGet(
error: contains(
@@ -193,86 +207,97 @@
});
});
- test('depends on a package in a deep subdirectory, non-relative uri',
- () async {
- ensureGit();
+ test(
+ 'depends on a package in a deep subdirectory, non-relative uri',
+ () async {
+ ensureGit();
- final repo = d.git('foo.git', [
- d.dir('sub', [
- d.dir('dir%', [d.libPubspec('sub', '1.0.0'), d.libDir('sub', '1.0.0')]),
- ]),
- ]);
- await repo.create();
-
- await d.appDir(
- dependencies: {
- 'sub': {
- 'git': {
- 'url': p.toUri(p.join(d.sandbox, 'foo.git')).toString(),
- 'path': 'sub/dir%25',
- },
- },
- },
- ).create();
-
- await pubGet();
-
- await d.dir(cachePath, [
- d.dir('git', [
- d.dir('cache', [d.gitPackageRepoCacheDir('foo')]),
- d.hashDir('foo', [
- d.dir('sub', [
- d.dir('dir%', [d.libDir('sub', '1.0.0')]),
+ final repo = d.git('foo.git', [
+ d.dir('sub', [
+ d.dir('dir%', [
+ d.libPubspec('sub', '1.0.0'),
+ d.libDir('sub', '1.0.0'),
]),
]),
- ]),
- ]).validate();
+ ]);
+ await repo.create();
- await d.appPackageConfigFile([
- d.packageConfigEntry(
- name: 'sub',
- path: pathInCache('git/foo-${await repo.revParse('HEAD')}/sub/dir%25'),
- ),
- ]).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'sub': {
+ 'git': {
+ 'url': p.toUri(p.join(d.sandbox, 'foo.git')).toString(),
+ 'path': 'sub/dir%25',
+ },
+ },
+ },
+ )
+ .create();
- final lockFile = LockFile.load(
- p.join(d.sandbox, appPath, 'pubspec.lock'),
- SystemCache().sources,
- );
+ await pubGet();
- expect(
- (lockFile.packages['sub']!.description.description as GitDescription)
- .path,
- 'sub/dir%25',
- reason: 'use uris to specify the path relative to the repo',
- );
- });
+ await d.dir(cachePath, [
+ d.dir('git', [
+ d.dir('cache', [d.gitPackageRepoCacheDir('foo')]),
+ d.hashDir('foo', [
+ d.dir('sub', [
+ d.dir('dir%', [d.libDir('sub', '1.0.0')]),
+ ]),
+ ]),
+ ]),
+ ]).validate();
+
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(
+ name: 'sub',
+ path: pathInCache(
+ 'git/foo-${await repo.revParse('HEAD')}/sub/dir%25',
+ ),
+ ),
+ ]).validate();
+
+ final lockFile = LockFile.load(
+ p.join(d.sandbox, appPath, 'pubspec.lock'),
+ SystemCache().sources,
+ );
+
+ expect(
+ (lockFile.packages['sub']!.description.description as GitDescription)
+ .path,
+ 'sub/dir%25',
+ reason: 'use uris to specify the path relative to the repo',
+ );
+ },
+ );
test('depends on multiple packages in subdirectories', () async {
ensureGit();
final repo = d.git('foo.git', [
- d.dir(
- 'subdir1',
- [d.libPubspec('sub1', '1.0.0'), d.libDir('sub1', '1.0.0')],
- ),
- d.dir(
- 'subdir2',
- [d.libPubspec('sub2', '1.0.0'), d.libDir('sub2', '1.0.0')],
- ),
+ d.dir('subdir1', [
+ d.libPubspec('sub1', '1.0.0'),
+ d.libDir('sub1', '1.0.0'),
+ ]),
+ d.dir('subdir2', [
+ d.libPubspec('sub2', '1.0.0'),
+ d.libDir('sub2', '1.0.0'),
+ ]),
]);
await repo.create();
- await d.appDir(
- dependencies: {
- 'sub1': {
- 'git': {'url': '../foo.git', 'path': 'subdir1'},
- },
- 'sub2': {
- 'git': {'url': '../foo.git', 'path': 'subdir2'},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'sub1': {
+ 'git': {'url': '../foo.git', 'path': 'subdir1'},
+ },
+ 'sub2': {
+ 'git': {'url': '../foo.git', 'path': 'subdir2'},
+ },
+ },
+ )
+ .create();
await pubGet();
@@ -298,63 +323,71 @@
]).validate();
});
- test('depends on packages in the same subdirectory at different revisions',
- () async {
- ensureGit();
+ test(
+ 'depends on packages in the same subdirectory at different revisions',
+ () async {
+ ensureGit();
- final repo = d.git('foo.git', [
- d.dir(
- 'subdir',
- [d.libPubspec('sub1', '1.0.0'), d.libDir('sub1', '1.0.0')],
- ),
- ]);
- await repo.create();
- final oldRevision = await repo.revParse('HEAD');
-
- deleteEntry(p.join(d.sandbox, 'foo.git', 'subdir'));
-
- await d.git('foo.git', [
- d.dir(
- 'subdir',
- [d.libPubspec('sub2', '1.0.0'), d.libDir('sub2', '1.0.0')],
- ),
- ]).commit();
- final newRevision = await repo.revParse('HEAD');
-
- await d.appDir(
- dependencies: {
- 'sub1': {
- 'git': {'url': '../foo.git', 'path': 'subdir', 'ref': oldRevision},
- },
- 'sub2': {
- 'git': {'url': '../foo.git', 'path': 'subdir'},
- },
- },
- ).create();
-
- await pubGet();
-
- await d.dir(cachePath, [
- d.dir('git', [
- d.dir('cache', [d.gitPackageRepoCacheDir('foo')]),
- d.dir('foo-$oldRevision', [
- d.dir('subdir', [d.libDir('sub1', '1.0.0')]),
+ final repo = d.git('foo.git', [
+ d.dir('subdir', [
+ d.libPubspec('sub1', '1.0.0'),
+ d.libDir('sub1', '1.0.0'),
]),
- d.dir('foo-$newRevision', [
- d.dir('subdir', [d.libDir('sub2', '1.0.0')]),
- ]),
- ]),
- ]).validate();
+ ]);
+ await repo.create();
+ final oldRevision = await repo.revParse('HEAD');
- await d.appPackageConfigFile([
- d.packageConfigEntry(
- name: 'sub1',
- path: pathInCache('git/foo-$oldRevision/subdir'),
- ),
- d.packageConfigEntry(
- name: 'sub2',
- path: pathInCache('git/foo-$newRevision/subdir'),
- ),
- ]).validate();
- });
+ deleteEntry(p.join(d.sandbox, 'foo.git', 'subdir'));
+
+ await d.git('foo.git', [
+ d.dir('subdir', [
+ d.libPubspec('sub2', '1.0.0'),
+ d.libDir('sub2', '1.0.0'),
+ ]),
+ ]).commit();
+ final newRevision = await repo.revParse('HEAD');
+
+ await d
+ .appDir(
+ dependencies: {
+ 'sub1': {
+ 'git': {
+ 'url': '../foo.git',
+ 'path': 'subdir',
+ 'ref': oldRevision,
+ },
+ },
+ 'sub2': {
+ 'git': {'url': '../foo.git', 'path': 'subdir'},
+ },
+ },
+ )
+ .create();
+
+ await pubGet();
+
+ await d.dir(cachePath, [
+ d.dir('git', [
+ d.dir('cache', [d.gitPackageRepoCacheDir('foo')]),
+ d.dir('foo-$oldRevision', [
+ d.dir('subdir', [d.libDir('sub1', '1.0.0')]),
+ ]),
+ d.dir('foo-$newRevision', [
+ d.dir('subdir', [d.libDir('sub2', '1.0.0')]),
+ ]),
+ ]),
+ ]).validate();
+
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(
+ name: 'sub1',
+ path: pathInCache('git/foo-$oldRevision/subdir'),
+ ),
+ d.packageConfigEntry(
+ name: 'sub2',
+ path: pathInCache('git/foo-$newRevision/subdir'),
+ ),
+ ]).validate();
+ },
+ );
}
diff --git a/test/get/git/require_pubspec_name_test.dart b/test/get/git/require_pubspec_name_test.dart
index 59625c8..15201bc 100644
--- a/test/get/git/require_pubspec_name_test.dart
+++ b/test/get/git/require_pubspec_name_test.dart
@@ -9,18 +9,19 @@
import '../../test_pub.dart';
void main() {
- test(
- 'requires the dependency to have a pubspec with a name '
+ test('requires the dependency to have a pubspec with a name '
'field', () async {
ensureGit();
await d.git('foo.git', [d.libDir('foo'), d.pubspec({})]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .create();
await pubGet(
error: contains('Missing the required "name" field.'),
diff --git a/test/get/git/require_pubspec_test.dart b/test/get/git/require_pubspec_test.dart
index 36af134..7c4f7aa 100644
--- a/test/get/git/require_pubspec_test.dart
+++ b/test/get/git/require_pubspec_test.dart
@@ -13,15 +13,19 @@
await d.git('foo.git', [d.libDir('foo')]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .create();
await pubGet(
- error: RegExp(r'Could not find a file named "pubspec\.yaml" '
- r'in [^\n]\.'),
+ error: RegExp(
+ r'Could not find a file named "pubspec\.yaml" '
+ r'in [^\n]\.',
+ ),
);
});
}
diff --git a/test/get/git/ssh_url_test.dart b/test/get/git/ssh_url_test.dart
index 01439d8..81bf45e 100644
--- a/test/get/git/ssh_url_test.dart
+++ b/test/get/git/ssh_url_test.dart
@@ -16,46 +16,44 @@
// We could set up a local cache, and only test the '--offline' part of this.
// But for now we live with this.
test(
- 'Git description uris can be of the form git@github.com:dart-lang/pub.git',
- () {
- final description = GitDescription(
- url: 'git@github.com:dart-lang/pub.git',
- ref: 'main',
- path: 'abc/',
- containingDir: null,
- );
- expect(
- description.format(),
- 'git@github.com:dart-lang/pub.git at main in abc/',
- );
- expect(
- description.serializeForPubspec(
+ 'Git description uris can be of the form git@github.com:dart-lang/pub.git',
+ () {
+ final description = GitDescription(
+ url: 'git@github.com:dart-lang/pub.git',
+ ref: 'main',
+ path: 'abc/',
containingDir: null,
- languageVersion: const LanguageVersion(2, 16),
- ),
- {
- 'url': 'git@github.com:dart-lang/pub.git',
- 'ref': 'main',
- 'path': 'abc/',
- },
- );
- final resolvedDescription = ResolvedGitDescription(
- description,
- '7d48f902b0326fc2ce0615c20f1aab6c811fe55b',
- );
+ );
+ expect(
+ description.format(),
+ 'git@github.com:dart-lang/pub.git at main in abc/',
+ );
+ expect(
+ description.serializeForPubspec(
+ containingDir: null,
+ languageVersion: const LanguageVersion(2, 16),
+ ),
+ {
+ 'url': 'git@github.com:dart-lang/pub.git',
+ 'ref': 'main',
+ 'path': 'abc/',
+ },
+ );
+ final resolvedDescription = ResolvedGitDescription(
+ description,
+ '7d48f902b0326fc2ce0615c20f1aab6c811fe55b',
+ );
- expect(
- resolvedDescription.format(),
- 'git@github.com:dart-lang/pub.git at 7d48f9 in abc/',
- );
- expect(
- resolvedDescription.serializeForLockfile(containingDir: null),
- {
+ expect(
+ resolvedDescription.format(),
+ 'git@github.com:dart-lang/pub.git at 7d48f9 in abc/',
+ );
+ expect(resolvedDescription.serializeForLockfile(containingDir: null), {
'url': 'git@github.com:dart-lang/pub.git',
'ref': 'main',
'path': 'abc/',
'resolved-ref': '7d48f902b0326fc2ce0615c20f1aab6c811fe55b',
- },
- );
- });
+ });
+ },
+ );
}
diff --git a/test/get/git/stay_locked_if_compatible_test.dart b/test/get/git/stay_locked_if_compatible_test.dart
index 6bde0c2..b18f56e 100644
--- a/test/get/git/stay_locked_if_compatible_test.dart
+++ b/test/get/git/stay_locked_if_compatible_test.dart
@@ -8,36 +8,39 @@
import '../../test_pub.dart';
void main() {
- test(
- "doesn't upgrade a locked Git package with a new compatible "
+ test("doesn't upgrade a locked Git package with a new compatible "
'constraint', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo', 'foo 1.0.0'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo', 'foo 1.0.0'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .create();
await pubGet();
final originalFooSpec = packageSpec('foo');
- await d.git(
- 'foo.git',
- [d.libDir('foo', 'foo 1.0.1'), d.libPubspec('foo', '1.0.1')],
- ).commit();
+ await d.git('foo.git', [
+ d.libDir('foo', 'foo 1.0.1'),
+ d.libPubspec('foo', '1.0.1'),
+ ]).commit();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git', 'version': '>=1.0.0'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git', 'version': '>=1.0.0'},
+ },
+ )
+ .create();
await pubGet();
diff --git a/test/get/git/unlock_if_incompatible_test.dart b/test/get/git/unlock_if_incompatible_test.dart
index 5bdbccd..ee91aa7 100644
--- a/test/get/git/unlock_if_incompatible_test.dart
+++ b/test/get/git/unlock_if_incompatible_test.dart
@@ -8,52 +8,51 @@
import '../../test_pub.dart';
void main() {
- test(
- 'upgrades a locked Git package with a new incompatible '
+ test('upgrades a locked Git package with a new incompatible '
'constraint', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '0.5.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '0.5.0'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .create();
await pubGet();
await d.dir(cachePath, [
d.dir('git', [
- d.dir('cache', [
- d.gitPackageRepoCacheDir('foo'),
- ]),
+ d.dir('cache', [d.gitPackageRepoCacheDir('foo')]),
d.gitPackageRevisionCacheDir('foo'),
]),
]).validate();
final originalFooSpec = packageSpec('foo');
- await d.git(
- 'foo.git',
- [d.libDir('foo', 'foo 2'), d.libPubspec('foo', '1.0.0')],
- ).commit();
+ await d.git('foo.git', [
+ d.libDir('foo', 'foo 2'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).commit();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git', 'version': '>=1.0.0'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git', 'version': '>=1.0.0'},
+ },
+ )
+ .create();
await pubGet();
await d.dir(cachePath, [
- d.dir('git', [
- d.gitPackageRevisionCacheDir('foo', modifier: 2),
- ]),
+ d.dir('git', [d.gitPackageRevisionCacheDir('foo', modifier: 2)]),
]).validate();
expect(packageSpec('foo'), isNot(originalFooSpec));
diff --git a/test/get/hosted/advisory_test.dart b/test/get/hosted/advisory_test.dart
index 00782e8..ded0377 100644
--- a/test/get/hosted/advisory_test.dart
+++ b/test/get/hosted/advisory_test.dart
@@ -20,10 +20,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- 'baz': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0', 'baz': '^1.0.0'},
}),
]).create();
@@ -39,8 +36,39 @@
});
testWithGolden(
- 'no advisories to show - a single advisory with no pub packages',
- (ctx) async {
+ 'no advisories to show - a single advisory with no pub packages',
+ (ctx) async {
+ final server = await servePackages();
+ server
+ ..serve('foo', '1.0.0')
+ ..serve('foo', '1.2.3')
+ ..serve('baz', '1.0.0');
+
+ await d.dir(appPath, [
+ d.pubspec({
+ 'name': 'app',
+ 'dependencies': {'foo': '^1.0.0', 'baz': '^1.0.0'},
+ }),
+ ]).create();
+
+ server.addAdvisory(
+ advisoryId: '123',
+ displayUrl: 'https://github.com/advisories/123',
+ affectedPackages: [
+ AffectedPackage(
+ name: 'foo',
+ ecosystem: 'NotPub',
+ versions: ['1.2.3'],
+ ),
+ ],
+ );
+ await ctx.run(['get']);
+ },
+ );
+
+ testWithGolden('several advisories, one of which has no pub packages', (
+ ctx,
+ ) async {
final server = await servePackages();
server
..serve('foo', '1.0.0')
@@ -50,38 +78,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- 'baz': '^1.0.0',
- },
- }),
- ]).create();
-
- server.addAdvisory(
- advisoryId: '123',
- displayUrl: 'https://github.com/advisories/123',
- affectedPackages: [
- AffectedPackage(name: 'foo', ecosystem: 'NotPub', versions: ['1.2.3']),
- ],
- );
- await ctx.run(['get']);
- });
-
- testWithGolden('several advisories, one of which has no pub packages',
- (ctx) async {
- final server = await servePackages();
- server
- ..serve('foo', '1.0.0')
- ..serve('foo', '1.2.3')
- ..serve('baz', '1.0.0');
-
- await d.dir(appPath, [
- d.pubspec({
- 'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- 'baz': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0', 'baz': '^1.0.0'},
}),
]).create();
@@ -112,10 +109,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- 'baz': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0', 'baz': '^1.0.0'},
}),
]).create();
@@ -138,10 +132,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- 'baz': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0', 'baz': '^1.0.0'},
}),
]).create();
@@ -172,10 +163,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- 'baz': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0', 'baz': '^1.0.0'},
}),
]).create();
@@ -241,10 +229,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- 'baz': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0', 'baz': '^1.0.0'},
}),
]).create();
server.addAdvisory(
@@ -267,10 +252,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- 'baz': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0', 'baz': '^1.0.0'},
}),
]).create();
@@ -295,10 +277,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- 'no_advisory_pkg': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0', 'no_advisory_pkg': '^1.0.0'},
}),
]).create();
@@ -331,10 +310,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- 'no_advisory_pkg': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0', 'no_advisory_pkg': '^1.0.0'},
}),
]).create();
@@ -364,10 +340,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- 'baz': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0', 'baz': '^1.0.0'},
}),
]).create();
@@ -396,16 +369,11 @@
..serve('baz', '1.0.0');
await d.dir(appPath, [
- d.pubspec(
- {
- 'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- 'baz': '^1.0.0',
- },
- 'ignored_advisories': ['123'],
- },
- ),
+ d.pubspec({
+ 'name': 'app',
+ 'dependencies': {'foo': '^1.0.0', 'baz': '^1.0.0'},
+ 'ignored_advisories': ['123'],
+ }),
]).create();
server.addAdvisory(
advisoryId: '123',
@@ -432,16 +400,11 @@
..serve('baz', '1.0.0');
await d.dir(appPath, [
- d.pubspec(
- {
- 'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- 'baz': '^1.0.0',
- },
- 'ignored_advisories': ['abc'],
- },
- ),
+ d.pubspec({
+ 'name': 'app',
+ 'dependencies': {'foo': '^1.0.0', 'baz': '^1.0.0'},
+ 'ignored_advisories': ['abc'],
+ }),
]).create();
server.addAdvisory(
diff --git a/test/get/hosted/do_not_upgrade_on_removed_constraints_test.dart b/test/get/hosted/do_not_upgrade_on_removed_constraints_test.dart
index 09f7b78..a1acb15 100644
--- a/test/get/hosted/do_not_upgrade_on_removed_constraints_test.dart
+++ b/test/get/hosted/do_not_upgrade_on_removed_constraints_test.dart
@@ -8,8 +8,7 @@
import '../../test_pub.dart';
void main() {
- test(
- "doesn't upgrade dependencies whose constraints have been "
+ test("doesn't upgrade dependencies whose constraints have been "
'removed', () async {
await servePackages()
..serve('foo', '1.0.0', deps: {'shared_dep': 'any'})
diff --git a/test/get/hosted/does_no_network_requests_when_possible_test.dart b/test/get/hosted/does_no_network_requests_when_possible_test.dart
index 0739f19..849cae4 100644
--- a/test/get/hosted/does_no_network_requests_when_possible_test.dart
+++ b/test/get/hosted/does_no_network_requests_when_possible_test.dart
@@ -9,10 +9,11 @@
void main() {
test('does not request versions if the lockfile is up to date', () async {
- final server = await servePackages()
- ..serve('foo', '1.0.0')
- ..serve('foo', '1.1.0')
- ..serve('foo', '1.2.0');
+ final server =
+ await servePackages()
+ ..serve('foo', '1.0.0')
+ ..serve('foo', '1.1.0')
+ ..serve('foo', '1.2.0');
await d.appDir(dependencies: {'foo': 'any'}).create();
diff --git a/test/get/hosted/explain_bad_hosted_url_test.dart b/test/get/hosted/explain_bad_hosted_url_test.dart
index fb44e52..f42d3b8 100644
--- a/test/get/hosted/explain_bad_hosted_url_test.dart
+++ b/test/get/hosted/explain_bad_hosted_url_test.dart
@@ -36,8 +36,6 @@
server.serve('foo', '1.0.0');
await d.appDir(dependencies: {'foo': 'any'}).create();
- await pubGet(
- environment: {'PUB_HOSTED_URL': '${globalServer.url}/'},
- );
+ await pubGet(environment: {'PUB_HOSTED_URL': '${globalServer.url}/'});
});
}
diff --git a/test/get/hosted/get_stress_test.dart b/test/get/hosted/get_stress_test.dart
index a8b89e5..f0a34f2 100644
--- a/test/get/hosted/get_stress_test.dart
+++ b/test/get/hosted/get_stress_test.dart
@@ -15,12 +15,14 @@
server.serve('pkg$i', '1.$i.0');
}
- await d.appDir(
- dependencies: {
- 'foo': '1.2.3',
- for (var i = 0; i < 20; i++) 'pkg$i': '^1.$i.0',
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': '1.2.3',
+ for (var i = 0; i < 20; i++) 'pkg$i': '^1.$i.0',
+ },
+ )
+ .create();
await pubGet();
diff --git a/test/get/hosted/get_test.dart b/test/get/hosted/get_test.dart
index eaa5879..983de03 100644
--- a/test/get/hosted/get_test.dart
+++ b/test/get/hosted/get_test.dart
@@ -14,44 +14,47 @@
import '../../test_pub.dart';
void main() {
- test('gets a package from a pub server and validates its CRC32C checksum',
- () async {
- final server = await servePackages();
- server.serve('foo', '1.2.3');
+ test(
+ 'gets a package from a pub server and validates its CRC32C checksum',
+ () async {
+ final server = await servePackages();
+ server.serve('foo', '1.2.3');
- expect(await server.peekArchiveChecksumHeader('foo', '1.2.3'), isNotNull);
+ expect(await server.peekArchiveChecksumHeader('foo', '1.2.3'), isNotNull);
- await d.appDir(dependencies: {'foo': '1.2.3'}).create();
+ await d.appDir(dependencies: {'foo': '1.2.3'}).create();
- await pubGet();
+ await pubGet();
- await d.cacheDir({'foo': '1.2.3'}).validate();
- await d.appPackageConfigFile([
- d.packageConfigEntry(name: 'foo', version: '1.2.3'),
- ]).validate();
- });
+ await d.cacheDir({'foo': '1.2.3'}).validate();
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(name: 'foo', version: '1.2.3'),
+ ]).validate();
+ },
+ );
group('gets a package from a pub server without validating its checksum', () {
late PackageServer server;
setUp(() async {
- server = await servePackages()
- ..serveChecksums = false
- ..serve('foo', '1.2.3')
- ..serve(
- 'bar',
- '1.2.3',
- headers: {
- 'x-goog-hash': [''],
- },
- )
- ..serve(
- 'baz',
- '1.2.3',
- headers: {
- 'x-goog-hash': ['md5=loremipsum'],
- },
- );
+ server =
+ await servePackages()
+ ..serveChecksums = false
+ ..serve('foo', '1.2.3')
+ ..serve(
+ 'bar',
+ '1.2.3',
+ headers: {
+ 'x-goog-hash': [''],
+ },
+ )
+ ..serve(
+ 'baz',
+ '1.2.3',
+ headers: {
+ 'x-goog-hash': ['md5=loremipsum'],
+ },
+ );
});
test('because of omitted checksum header', () async {
@@ -105,14 +108,19 @@
final server = await startPackageServer();
server.serve('foo', '1.2.3');
- await d.appDir(
- dependencies: {
- 'foo': {
- 'version': '1.2.3',
- 'hosted': {'name': 'foo', 'url': 'http://localhost:${server.port}'},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'version': '1.2.3',
+ 'hosted': {
+ 'name': 'foo',
+ 'url': 'http://localhost:${server.port}',
+ },
+ },
+ },
+ )
+ .create();
await pubGet();
@@ -122,156 +130,178 @@
]).validate();
});
- test('recognizes and retries a package with a CRC32C checksum mismatch',
- () async {
- final server = await startPackageServer();
+ test(
+ 'recognizes and retries a package with a CRC32C checksum mismatch',
+ () async {
+ final server = await startPackageServer();
- server.serve(
- 'foo',
- '1.2.3',
- headers: {
- 'x-goog-hash': PackageServer.composeChecksumHeader(crc32c: 3381945770),
- },
- );
-
- await d.appDir(
- dependencies: {
- 'foo': {
- 'version': '1.2.3',
- 'hosted': {'name': 'foo', 'url': 'http://localhost:${server.port}'},
+ server.serve(
+ 'foo',
+ '1.2.3',
+ headers: {
+ 'x-goog-hash': PackageServer.composeChecksumHeader(
+ crc32c: 3381945770,
+ ),
},
- },
- ).create();
+ );
- await pubGet(
- exitCode: exit_codes.TEMP_FAIL,
- error: RegExp(
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'version': '1.2.3',
+ 'hosted': {
+ 'name': 'foo',
+ 'url': 'http://localhost:${server.port}',
+ },
+ },
+ },
+ )
+ .create();
+
+ await pubGet(
+ exitCode: exit_codes.TEMP_FAIL,
+ error: RegExp(
r'''Package archive for foo 1.2.3 downloaded from "(.+)" has '''
r'''"x-goog-hash: crc32c=(\d+)", which doesn't match the checksum '''
- r'''of the archive downloaded\.'''),
- silent: contains('Attempt #2'),
- environment: {
- 'PUB_MAX_HTTP_RETRIES': '2',
- },
- );
- });
+ r'''of the archive downloaded\.''',
+ ),
+ silent: contains('Attempt #2'),
+ environment: {'PUB_MAX_HTTP_RETRIES': '2'},
+ );
+ },
+ );
group('recognizes bad checksum header and retries', () {
late PackageServer server;
setUp(() async {
- server = await servePackages()
- ..serve(
- 'foo',
- '1.2.3',
- headers: {
- 'x-goog-hash': ['crc32c=,md5='],
- },
- )
- ..serve(
- 'bar',
- '1.2.3',
- headers: {
- 'x-goog-hash': ['crc32c=loremipsum,md5=loremipsum'],
- },
- )
- ..serve(
- 'baz',
- '1.2.3',
- headers: {
- 'x-goog-hash': ['crc32c=MTIzNDU=,md5=NTQzMjE='],
- },
- );
+ server =
+ await servePackages()
+ ..serve(
+ 'foo',
+ '1.2.3',
+ headers: {
+ 'x-goog-hash': ['crc32c=,md5='],
+ },
+ )
+ ..serve(
+ 'bar',
+ '1.2.3',
+ headers: {
+ 'x-goog-hash': ['crc32c=loremipsum,md5=loremipsum'],
+ },
+ )
+ ..serve(
+ 'baz',
+ '1.2.3',
+ headers: {
+ 'x-goog-hash': ['crc32c=MTIzNDU=,md5=NTQzMjE='],
+ },
+ );
});
test('when the CRC32C checksum is empty', () async {
- await d.appDir(
- dependencies: {
- 'foo': {
- 'version': '1.2.3',
- 'hosted': {'name': 'foo', 'url': 'http://localhost:${server.port}'},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'version': '1.2.3',
+ 'hosted': {
+ 'name': 'foo',
+ 'url': 'http://localhost:${server.port}',
+ },
+ },
+ },
+ )
+ .create();
await pubGet(
exitCode: exit_codes.TEMP_FAIL,
error: contains(
- 'Package archive "foo-1.2.3.tar.gz" has a malformed CRC32C '
- 'checksum in its response headers'),
+ 'Package archive "foo-1.2.3.tar.gz" has a malformed CRC32C '
+ 'checksum in its response headers',
+ ),
silent: contains('Attempt #2'),
- environment: {
- 'PUB_MAX_HTTP_RETRIES': '2',
- },
+ environment: {'PUB_MAX_HTTP_RETRIES': '2'},
);
});
test('when the CRC32C checksum has bad encoding', () async {
- await d.appDir(
- dependencies: {
- 'bar': {
- 'version': '1.2.3',
- 'hosted': {'name': 'bar', 'url': 'http://localhost:${server.port}'},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'bar': {
+ 'version': '1.2.3',
+ 'hosted': {
+ 'name': 'bar',
+ 'url': 'http://localhost:${server.port}',
+ },
+ },
+ },
+ )
+ .create();
await pubGet(
exitCode: exit_codes.TEMP_FAIL,
error: contains(
- 'Package archive "bar-1.2.3.tar.gz" has a malformed CRC32C '
- 'checksum in its response headers'),
+ 'Package archive "bar-1.2.3.tar.gz" has a malformed CRC32C '
+ 'checksum in its response headers',
+ ),
silent: contains('Attempt #2'),
- environment: {
- 'PUB_MAX_HTTP_RETRIES': '2',
- },
+ environment: {'PUB_MAX_HTTP_RETRIES': '2'},
);
});
test('when the CRC32C checksum is malformed', () async {
- await d.appDir(
- dependencies: {
- 'baz': {
- 'version': '1.2.3',
- 'hosted': {'name': 'baz', 'url': 'http://localhost:${server.port}'},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'baz': {
+ 'version': '1.2.3',
+ 'hosted': {
+ 'name': 'baz',
+ 'url': 'http://localhost:${server.port}',
+ },
+ },
+ },
+ )
+ .create();
await pubGet(
exitCode: exit_codes.TEMP_FAIL,
error: contains(
- 'Package archive "baz-1.2.3.tar.gz" has a malformed CRC32C '
- 'checksum in its response headers'),
+ 'Package archive "baz-1.2.3.tar.gz" has a malformed CRC32C '
+ 'checksum in its response headers',
+ ),
silent: contains('Attempt #2'),
- environment: {
- 'PUB_MAX_HTTP_RETRIES': '2',
- },
+ environment: {'PUB_MAX_HTTP_RETRIES': '2'},
);
});
});
- test('gets a package from a pub server that uses gzip response compression',
- () async {
- final server = await servePackages();
- server.autoCompress = true;
- server.serveChecksums = false;
- server.serve('foo', '1.2.3');
-
- expect(await server.peekArchiveChecksumHeader('foo', '1.2.3'), isNull);
-
- await d.appDir(dependencies: {'foo': '1.2.3'}).create();
-
- await pubGet();
-
- await d.cacheDir({'foo': '1.2.3'}).validate();
- await d.appPackageConfigFile([
- d.packageConfigEntry(name: 'foo', version: '1.2.3'),
- ]).validate();
- });
-
test(
- 'gets a package from a pub server that uses gzip response compression '
+ 'gets a package from a pub server that uses gzip response compression',
+ () async {
+ final server = await servePackages();
+ server.autoCompress = true;
+ server.serveChecksums = false;
+ server.serve('foo', '1.2.3');
+
+ expect(await server.peekArchiveChecksumHeader('foo', '1.2.3'), isNull);
+
+ await d.appDir(dependencies: {'foo': '1.2.3'}).create();
+
+ await pubGet();
+
+ await d.cacheDir({'foo': '1.2.3'}).validate();
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(name: 'foo', version: '1.2.3'),
+ ]).validate();
+ },
+ );
+
+ test('gets a package from a pub server that uses gzip response compression '
'and validates its CRC32C checksum', () async {
final server = await servePackages();
server.autoCompress = true;
@@ -313,9 +343,7 @@
await pubGet();
final packages = dig<Map>(
- loadYaml(
- readTextFile(p.join(d.sandbox, appPath, 'pubspec.lock')),
- ),
+ loadYaml(readTextFile(p.join(d.sandbox, appPath, 'pubspec.lock'))),
['packages'],
);
expect(
@@ -357,9 +385,7 @@
await pubGet();
final packages = dig<Map>(
- loadYaml(
- readTextFile(p.join(d.sandbox, appPath, 'pubspec.lock')),
- ),
+ loadYaml(readTextFile(p.join(d.sandbox, appPath, 'pubspec.lock'))),
['packages'],
);
expect(
@@ -392,20 +418,19 @@
);
await d.appDir(dependencies: {'foo': 'any'}).create();
await pubGet(
- error:
- contains('Tar file contained duplicate path blah/myduplicatefile.'),
+ error: contains(
+ 'Tar file contained duplicate path blah/myduplicatefile.',
+ ),
exitCode: DATA,
);
});
test('Fails gracefully when downloading archive', () async {
final server = await servePackages();
- server.serve(
- 'foo',
- '1.0.0',
+ server.serve('foo', '1.0.0');
+ final downloadPattern = RegExp(
+ r'/packages/([^/]*)/versions/([^/]*).tar.gz',
);
- final downloadPattern =
- RegExp(r'/packages/([^/]*)/versions/([^/]*).tar.gz');
server.handle(
downloadPattern,
(request) => Response(403, body: 'Go away!'),
diff --git a/test/get/hosted/gets_a_package_with_busted_dev_dependencies_test.dart b/test/get/hosted/gets_a_package_with_busted_dev_dependencies_test.dart
index d1b90e6..791b748 100644
--- a/test/get/hosted/gets_a_package_with_busted_dev_dependencies_test.dart
+++ b/test/get/hosted/gets_a_package_with_busted_dev_dependencies_test.dart
@@ -9,8 +9,7 @@
void main() {
// Regression test for issue 22194.
- test(
- 'gets a dependency with broken dev dependencies from a pub '
+ test('gets a dependency with broken dev dependencies from a pub '
'server', () async {
final server = await servePackages();
server.serve(
diff --git a/test/get/hosted/resolve_constraints_test.dart b/test/get/hosted/resolve_constraints_test.dart
index 6eed388..32e196a 100644
--- a/test/get/hosted/resolve_constraints_test.dart
+++ b/test/get/hosted/resolve_constraints_test.dart
@@ -20,8 +20,11 @@
await pubGet();
- await d
- .cacheDir({'foo': '1.2.3', 'bar': '2.3.4', 'baz': '2.0.4'}).validate();
+ await d.cacheDir({
+ 'foo': '1.2.3',
+ 'bar': '2.3.4',
+ 'baz': '2.0.4',
+ }).validate();
await d.appPackageConfigFile([
d.packageConfigEntry(name: 'foo', version: '1.2.3'),
d.packageConfigEntry(name: 'bar', version: '2.3.4'),
diff --git a/test/get/hosted/resolve_with_retracted_package_versions_test.dart b/test/get/hosted/resolve_with_retracted_package_versions_test.dart
index bafe700..c32cdc6 100644
--- a/test/get/hosted/resolve_with_retracted_package_versions_test.dart
+++ b/test/get/hosted/resolve_with_retracted_package_versions_test.dart
@@ -13,10 +13,11 @@
void main() {
test('Do not consider retracted packages', () async {
- final server = await servePackages()
- ..serve('foo', '1.0.0', deps: {'bar': '^1.0.0'})
- ..serve('bar', '1.0.0')
- ..serve('bar', '1.1.0');
+ final server =
+ await servePackages()
+ ..serve('foo', '1.0.0', deps: {'bar': '^1.0.0'})
+ ..serve('bar', '1.0.0')
+ ..serve('bar', '1.1.0');
await d.appDir(dependencies: {'foo': '1.0.0'}).create();
server.retractPackageVersion('bar', '1.1.0');
@@ -30,9 +31,10 @@
});
test('Error when the only available package version is retracted', () async {
- final server = await servePackages()
- ..serve('foo', '1.0.0', deps: {'bar': '^1.0.0'})
- ..serve('bar', '1.0.0');
+ final server =
+ await servePackages()
+ ..serve('foo', '1.0.0', deps: {'bar': '^1.0.0'})
+ ..serve('bar', '1.0.0');
await d.appDir(dependencies: {'foo': '1.0.0'}).create();
server.retractPackageVersion('bar', '1.0.0');
@@ -49,10 +51,11 @@
// In this case we expect a newer version to be published at some point which
// will then cause pub upgrade to choose that one.
test('Allow retracted version when it was already in pubspec.lock', () async {
- final server = await servePackages()
- ..serve('foo', '1.0.0', deps: {'bar': '^1.0.0'})
- ..serve('bar', '1.0.0')
- ..serve('bar', '1.1.0');
+ final server =
+ await servePackages()
+ ..serve('foo', '1.0.0', deps: {'bar': '^1.0.0'})
+ ..serve('bar', '1.0.0')
+ ..serve('bar', '1.1.0');
await d.appDir(dependencies: {'foo': '1.0.0'}).create();
await pubGet();
@@ -87,62 +90,65 @@
]).validate();
});
- test('Offline versions of pub commands also handle retracted packages',
- () async {
- final server = await servePackages();
- await populateCache(
- {
+ test(
+ 'Offline versions of pub commands also handle retracted packages',
+ () async {
+ final server = await servePackages();
+ await populateCache({
'foo': ['1.0.0'],
'bar': ['1.0.0', '1.1.0'],
- },
- server,
- );
+ }, server);
- await d.cacheDir({
- 'foo': '1.0.0',
- 'bar': ['1.0.0', '1.1.0'],
- }).validate();
+ await d.cacheDir({
+ 'foo': '1.0.0',
+ 'bar': ['1.0.0', '1.1.0'],
+ }).validate();
- final barVersionsCache =
- p.join(globalServer.cachingPath, '.cache', 'bar-versions.json');
- expect(fileExists(barVersionsCache), isTrue);
- deleteEntry(barVersionsCache);
+ final barVersionsCache = p.join(
+ globalServer.cachingPath,
+ '.cache',
+ 'bar-versions.json',
+ );
+ expect(fileExists(barVersionsCache), isTrue);
+ deleteEntry(barVersionsCache);
- server.retractPackageVersion('bar', '1.1.0');
- await pubGet();
+ server.retractPackageVersion('bar', '1.1.0');
+ await pubGet();
- await d.cacheDir({'bar': '1.1.0'}).validate();
+ await d.cacheDir({'bar': '1.1.0'}).validate();
- // Now serve only errors - to validate we are truly offline.
- server.serveErrors();
+ // Now serve only errors - to validate we are truly offline.
+ server.serveErrors();
- await d.appDir(dependencies: {'foo': '1.0.0', 'bar': '^1.0.0'}).create();
+ await d.appDir(dependencies: {'foo': '1.0.0', 'bar': '^1.0.0'}).create();
- await pubUpgrade(args: ['--offline']);
+ await pubUpgrade(args: ['--offline']);
- // We choose bar 1.1.0 since we already have it in pubspec.lock
- await d.appPackageConfigFile([
- d.packageConfigEntry(name: 'foo', version: '1.0.0'),
- d.packageConfigEntry(name: 'bar', version: '1.1.0'),
- ]).validate();
+ // We choose bar 1.1.0 since we already have it in pubspec.lock
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(name: 'foo', version: '1.0.0'),
+ d.packageConfigEntry(name: 'bar', version: '1.1.0'),
+ ]).validate();
- // Delete lockfile so that retracted versions are not considered.
- final lockFile = p.join(d.sandbox, appPath, 'pubspec.lock');
- expect(fileExists(lockFile), isTrue);
- deleteEntry(lockFile);
+ // Delete lockfile so that retracted versions are not considered.
+ final lockFile = p.join(d.sandbox, appPath, 'pubspec.lock');
+ expect(fileExists(lockFile), isTrue);
+ deleteEntry(lockFile);
- await pubGet(args: ['--offline']);
- await d.appPackageConfigFile([
- d.packageConfigEntry(name: 'foo', version: '1.0.0'),
- d.packageConfigEntry(name: 'bar', version: '1.0.0'),
- ]).validate();
- });
+ await pubGet(args: ['--offline']);
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(name: 'foo', version: '1.0.0'),
+ d.packageConfigEntry(name: 'bar', version: '1.0.0'),
+ ]).validate();
+ },
+ );
test('Allow retracted version when pinned in dependency_overrides', () async {
- final server = await servePackages()
- ..serve('foo', '1.0.0')
- ..serve('foo', '2.0.0')
- ..serve('foo', '3.0.0');
+ final server =
+ await servePackages()
+ ..serve('foo', '1.0.0')
+ ..serve('foo', '2.0.0')
+ ..serve('foo', '3.0.0');
await d.dir(appPath, [
d.pubspec({
@@ -160,35 +166,38 @@
]).validate();
});
- test('Prefer retracted version in dependency_overrides over pubspec.lock',
- () async {
- final server = await servePackages()
- ..serve('foo', '1.0.0')
- ..serve('foo', '2.0.0')
- ..serve('foo', '3.0.0');
+ test(
+ 'Prefer retracted version in dependency_overrides over pubspec.lock',
+ () async {
+ final server =
+ await servePackages()
+ ..serve('foo', '1.0.0')
+ ..serve('foo', '2.0.0')
+ ..serve('foo', '3.0.0');
- await d.appDir(dependencies: {'foo': 'any'}).create();
- await pubGet();
+ await d.appDir(dependencies: {'foo': 'any'}).create();
+ await pubGet();
- server.retractPackageVersion('foo', '2.0.0');
- server.retractPackageVersion('foo', '3.0.0');
+ server.retractPackageVersion('foo', '2.0.0');
+ server.retractPackageVersion('foo', '3.0.0');
- await pubUpgrade();
- await d.appPackageConfigFile([
- d.packageConfigEntry(name: 'foo', version: '3.0.0'),
- ]).validate();
+ await pubUpgrade();
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(name: 'foo', version: '3.0.0'),
+ ]).validate();
- await d.dir(appPath, [
- d.pubspec({
- 'name': 'myapp',
- 'dependencies': {'foo': '<=3.0.0'},
- 'dependency_overrides': {'foo': '2.0.0'},
- }),
- ]).create();
+ await d.dir(appPath, [
+ d.pubspec({
+ 'name': 'myapp',
+ 'dependencies': {'foo': '<=3.0.0'},
+ 'dependency_overrides': {'foo': '2.0.0'},
+ }),
+ ]).create();
- await pubUpgrade();
- await d.appPackageConfigFile([
- d.packageConfigEntry(name: 'foo', version: '2.0.0'),
- ]).validate();
- });
+ await pubUpgrade();
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(name: 'foo', version: '2.0.0'),
+ ]).validate();
+ },
+ );
}
diff --git a/test/get/hosted/stay_locked_if_compatible_test.dart b/test/get/hosted/stay_locked_if_compatible_test.dart
index 4adc728..fc4d0c9 100644
--- a/test/get/hosted/stay_locked_if_compatible_test.dart
+++ b/test/get/hosted/stay_locked_if_compatible_test.dart
@@ -8,8 +8,7 @@
import '../../test_pub.dart';
void main() {
- test(
- "doesn't upgrade a locked pub server package with a new "
+ test("doesn't upgrade a locked pub server package with a new "
'compatible constraint', () async {
final server = await servePackages();
server.serve('foo', '1.0.0');
diff --git a/test/get/hosted/stay_locked_if_new_is_satisfied_test.dart b/test/get/hosted/stay_locked_if_new_is_satisfied_test.dart
index cb431f0..da5597e 100644
--- a/test/get/hosted/stay_locked_if_new_is_satisfied_test.dart
+++ b/test/get/hosted/stay_locked_if_new_is_satisfied_test.dart
@@ -8,8 +8,7 @@
import '../../test_pub.dart';
void main() {
- test(
- "doesn't unlock dependencies if a new dependency is already "
+ test("doesn't unlock dependencies if a new dependency is already "
'satisfied', () async {
final server = await servePackages();
server.serve('foo', '1.0.0', deps: {'bar': '<2.0.0'});
diff --git a/test/get/hosted/stay_locked_test.dart b/test/get/hosted/stay_locked_test.dart
index 6fee254..142fd58 100644
--- a/test/get/hosted/stay_locked_test.dart
+++ b/test/get/hosted/stay_locked_test.dart
@@ -10,8 +10,7 @@
import '../../test_pub.dart';
void main() {
- test(
- 'keeps a hosted package locked to the version in the '
+ test('keeps a hosted package locked to the version in the '
'lockfile', () async {
final server = await servePackages();
server.serve('foo', '1.0.0');
diff --git a/test/get/hosted/unlock_if_incompatible_test.dart b/test/get/hosted/unlock_if_incompatible_test.dart
index a560ea7..0bbde0f 100644
--- a/test/get/hosted/unlock_if_incompatible_test.dart
+++ b/test/get/hosted/unlock_if_incompatible_test.dart
@@ -8,8 +8,7 @@
import '../../test_pub.dart';
void main() {
- test(
- 'upgrades a locked pub server package with a new incompatible '
+ test('upgrades a locked pub server package with a new incompatible '
'constraint', () async {
final server = await servePackages();
server.serve('foo', '1.0.0');
diff --git a/test/get/hosted/unlock_if_new_is_unsatisfied_test.dart b/test/get/hosted/unlock_if_new_is_unsatisfied_test.dart
index ce46e2b..62d40c9 100644
--- a/test/get/hosted/unlock_if_new_is_unsatisfied_test.dart
+++ b/test/get/hosted/unlock_if_new_is_unsatisfied_test.dart
@@ -8,8 +8,7 @@
import '../../test_pub.dart';
void main() {
- test(
- 'unlocks dependencies if necessary to ensure that a new '
+ test('unlocks dependencies if necessary to ensure that a new '
'dependency is satisfied', () async {
final server = await servePackages();
diff --git a/test/get/hosted/unlock_if_version_doesnt_exist_test.dart b/test/get/hosted/unlock_if_version_doesnt_exist_test.dart
index cdb4229..7d58d07 100644
--- a/test/get/hosted/unlock_if_version_doesnt_exist_test.dart
+++ b/test/get/hosted/unlock_if_version_doesnt_exist_test.dart
@@ -10,25 +10,27 @@
import '../../test_pub.dart';
void main() {
- test('upgrades a locked pub server package with a nonexistent version',
- () async {
- final server = await servePackages();
- server.serve('foo', '1.0.0');
+ test(
+ 'upgrades a locked pub server package with a nonexistent version',
+ () async {
+ final server = await servePackages();
+ server.serve('foo', '1.0.0');
- await d.appDir(dependencies: {'foo': 'any'}).create();
- await pubGet();
- await d.appPackageConfigFile([
- d.packageConfigEntry(name: 'foo', version: '1.0.0'),
- ]).validate();
+ await d.appDir(dependencies: {'foo': 'any'}).create();
+ await pubGet();
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(name: 'foo', version: '1.0.0'),
+ ]).validate();
- deleteEntry(p.join(d.sandbox, cachePath));
+ deleteEntry(p.join(d.sandbox, cachePath));
- server.clearPackages();
- server.serve('foo', '1.0.1');
+ server.clearPackages();
+ server.serve('foo', '1.0.1');
- await pubGet();
- await d.appPackageConfigFile([
- d.packageConfigEntry(name: 'foo', version: '1.0.1'),
- ]).validate();
- });
+ await pubGet();
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(name: 'foo', version: '1.0.1'),
+ ]).validate();
+ },
+ );
}
diff --git a/test/get/hosted/warn_about_discontinued_test.dart b/test/get/hosted/warn_about_discontinued_test.dart
index 96f998e..08a7c5a 100644
--- a/test/get/hosted/warn_about_discontinued_test.dart
+++ b/test/get/hosted/warn_about_discontinued_test.dart
@@ -26,10 +26,16 @@
// A pub get straight away will not trigger the warning, as we cache
// responses for a while.
await pubGet();
- final fooVersionsCache =
- p.join(globalServer.cachingPath, '.cache', 'foo-versions.json');
- final transitiveVersionsCache =
- p.join(globalServer.cachingPath, '.cache', 'transitive-versions.json');
+ final fooVersionsCache = p.join(
+ globalServer.cachingPath,
+ '.cache',
+ 'foo-versions.json',
+ );
+ final transitiveVersionsCache = p.join(
+ globalServer.cachingPath,
+ '.cache',
+ 'transitive-versions.json',
+ );
expect(fileExists(fooVersionsCache), isTrue);
expect(fileExists(transitiveVersionsCache), isTrue);
deleteEntry(fooVersionsCache);
@@ -130,8 +136,11 @@
// A pub get straight away will not trigger the warning, as we cache
// responses for a while.
await pubGet();
- final fooVersionsCache =
- p.join(globalServer.cachingPath, '.cache', 'foo-versions.json');
+ final fooVersionsCache = p.join(
+ globalServer.cachingPath,
+ '.cache',
+ 'foo-versions.json',
+ );
expect(fileExists(fooVersionsCache), isTrue);
deleteEntry(fooVersionsCache);
// We warn only about the direct dependency here:
@@ -207,8 +216,11 @@
server.serve('foo', '1.2.3');
await d.appDir(dependencies: {'foo': '1.2.3'}).create();
await pubGet();
- final fooVersionsCache =
- p.join(globalServer.cachingPath, '.cache', 'foo-versions.json');
+ final fooVersionsCache = p.join(
+ globalServer.cachingPath,
+ '.cache',
+ 'foo-versions.json',
+ );
expect(fileExists(fooVersionsCache), isTrue);
deleteEntry(fooVersionsCache);
// Serve 400 on all requests.
diff --git a/test/get/hosted/warn_about_retracted_package_test.dart b/test/get/hosted/warn_about_retracted_package_test.dart
index ba9b47d..67749d6 100644
--- a/test/get/hosted/warn_about_retracted_package_test.dart
+++ b/test/get/hosted/warn_about_retracted_package_test.dart
@@ -11,59 +11,73 @@
void main() {
test('Report retracted packages', () async {
- final server = await servePackages()
- ..serve('foo', '1.0.0', deps: {'bar': 'any'})
- ..serve('bar', '1.0.0');
+ final server =
+ await servePackages()
+ ..serve('foo', '1.0.0', deps: {'bar': 'any'})
+ ..serve('bar', '1.0.0');
await d.appDir(dependencies: {'foo': '1.0.0'}).create();
await pubGet();
server.retractPackageVersion('bar', '1.0.0');
// Delete the cache to trigger the report.
- final barVersionsCache =
- p.join(server.cachingPath, '.cache', 'bar-versions.json');
+ final barVersionsCache = p.join(
+ server.cachingPath,
+ '.cache',
+ 'bar-versions.json',
+ );
expect(fileExists(barVersionsCache), isTrue);
deleteEntry(barVersionsCache);
await pubGet(output: contains('bar 1.0.0 (retracted)'));
});
test('Report retracted packages with newer version available', () async {
- final server = await servePackages()
- ..serve('foo', '1.0.0', deps: {'bar': '^1.0.0'})
- ..serve('bar', '1.0.0')
- ..serve('bar', '2.0.0')
- ..serve('bar', '2.0.1-pre');
+ final server =
+ await servePackages()
+ ..serve('foo', '1.0.0', deps: {'bar': '^1.0.0'})
+ ..serve('bar', '1.0.0')
+ ..serve('bar', '2.0.0')
+ ..serve('bar', '2.0.1-pre');
await d.appDir(dependencies: {'foo': '1.0.0'}).create();
await pubGet();
server.retractPackageVersion('bar', '1.0.0');
// Delete the cache to trigger the report.
- final barVersionsCache =
- p.join(server.cachingPath, '.cache', 'bar-versions.json');
+ final barVersionsCache = p.join(
+ server.cachingPath,
+ '.cache',
+ 'bar-versions.json',
+ );
expect(fileExists(barVersionsCache), isTrue);
deleteEntry(barVersionsCache);
await pubGet(output: contains('bar 1.0.0 (retracted, 2.0.0 available)'));
});
- test('Report retracted packages with newer prerelease version available',
- () async {
- final server = await servePackages()
- ..serve('foo', '1.0.0', deps: {'bar': '^1.0.0-pre'})
- ..serve('bar', '1.0.0-pre')
- ..serve('bar', '2.0.1-pre');
- await d.appDir(dependencies: {'foo': '1.0.0'}).create();
+ test(
+ 'Report retracted packages with newer prerelease version available',
+ () async {
+ final server =
+ await servePackages()
+ ..serve('foo', '1.0.0', deps: {'bar': '^1.0.0-pre'})
+ ..serve('bar', '1.0.0-pre')
+ ..serve('bar', '2.0.1-pre');
+ await d.appDir(dependencies: {'foo': '1.0.0'}).create();
- await pubGet();
+ await pubGet();
- server.retractPackageVersion('bar', '1.0.0-pre');
- // Delete the cache to trigger the report.
- final barVersionsCache =
- p.join(server.cachingPath, '.cache', 'bar-versions.json');
- expect(fileExists(barVersionsCache), isTrue);
- deleteEntry(barVersionsCache);
- await pubGet(
- output: contains('bar 1.0.0-pre (retracted, 2.0.1-pre available)'),
- );
- });
+ server.retractPackageVersion('bar', '1.0.0-pre');
+ // Delete the cache to trigger the report.
+ final barVersionsCache = p.join(
+ server.cachingPath,
+ '.cache',
+ 'bar-versions.json',
+ );
+ expect(fileExists(barVersionsCache), isTrue);
+ deleteEntry(barVersionsCache);
+ await pubGet(
+ output: contains('bar 1.0.0-pre (retracted, 2.0.1-pre available)'),
+ );
+ },
+ );
}
diff --git a/test/get/package_name_test.dart b/test/get/package_name_test.dart
index 9672214..aa716cd 100644
--- a/test/get/package_name_test.dart
+++ b/test/get/package_name_test.dart
@@ -50,9 +50,9 @@
await pubGet();
await d.dir(appPath, [
- d.packageConfigFile(
- [d.packageConfigEntry(name: 'foo.bar.baz', path: '.')],
- ),
+ d.packageConfigFile([
+ d.packageConfigEntry(name: 'foo.bar.baz', path: '.'),
+ ]),
]).validate();
});
}
diff --git a/test/get/path/absolute_path_test.dart b/test/get/path/absolute_path_test.dart
index b871268..e23db26 100644
--- a/test/get/path/absolute_path_test.dart
+++ b/test/get/path/absolute_path_test.dart
@@ -10,8 +10,10 @@
void main() {
test('path dependency with absolute path', () async {
- await d
- .dir('foo', [d.libDir('foo'), d.libPubspec('foo', '0.0.1')]).create();
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '0.0.1'),
+ ]).create();
await d.dir(appPath, [
d.appPubspec(
diff --git a/test/get/path/absolute_symlink_test.dart b/test/get/path/absolute_symlink_test.dart
index 1f373ea..1754892 100644
--- a/test/get/path/absolute_symlink_test.dart
+++ b/test/get/path/absolute_symlink_test.dart
@@ -9,11 +9,12 @@
import '../../test_pub.dart';
void main() {
- test(
- 'generates a symlink with an absolute path if the dependency '
+ test('generates a symlink with an absolute path if the dependency '
'path was absolute', () async {
- await d
- .dir('foo', [d.libDir('foo'), d.libPubspec('foo', '0.0.1')]).create();
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '0.0.1'),
+ ]).create();
final fooPath = d.path('foo');
await d.dir(appPath, [
diff --git a/test/get/path/empty_pubspec_test.dart b/test/get/path/empty_pubspec_test.dart
index 9e3ff28..5f3ba9a 100644
--- a/test/get/path/empty_pubspec_test.dart
+++ b/test/get/path/empty_pubspec_test.dart
@@ -24,7 +24,8 @@
await pubGet(
exitCode: exit_codes.DATA,
- error: 'Error on line 1, column 1 of '
+ error:
+ 'Error on line 1, column 1 of '
'${p.join('..', 'foo', 'pubspec.yaml')}: '
'Missing the required "name" field.',
);
diff --git a/test/get/path/no_pubspec_test.dart b/test/get/path/no_pubspec_test.dart
index 1d08702..9fe1a82 100644
--- a/test/get/path/no_pubspec_test.dart
+++ b/test/get/path/no_pubspec_test.dart
@@ -24,7 +24,8 @@
]).create();
await pubGet(
- error: 'Because myapp depends on foo from path which doesn\'t exist '
+ error:
+ 'Because myapp depends on foo from path which doesn\'t exist '
'(No pubspec.yaml found for package foo in $fooPath.), '
'version solving failed.',
exitCode: exit_codes.NO_INPUT,
diff --git a/test/get/path/nonexistent_dir_test.dart b/test/get/path/nonexistent_dir_test.dart
index df942dd..cf5af32 100644
--- a/test/get/path/nonexistent_dir_test.dart
+++ b/test/get/path/nonexistent_dir_test.dart
@@ -22,7 +22,8 @@
]).create();
await pubGet(
- error: 'Because myapp depends on foo from path which doesn\'t exist '
+ error:
+ 'Because myapp depends on foo from path which doesn\'t exist '
'(could not find package foo at "$badPath"), version solving failed.',
exitCode: exit_codes.NO_INPUT,
);
diff --git a/test/get/path/path_is_file_test.dart b/test/get/path/path_is_file_test.dart
index f6f8c41..d586621 100644
--- a/test/get/path/path_is_file_test.dart
+++ b/test/get/path/path_is_file_test.dart
@@ -11,8 +11,10 @@
void main() {
test('path dependency when path is a file', () async {
- await d
- .dir('foo', [d.libDir('foo'), d.libPubspec('foo', '0.0.1')]).create();
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '0.0.1'),
+ ]).create();
await d.file('dummy.txt', '').create();
final dummyPath = p.join(d.sandbox, 'dummy.txt');
@@ -26,7 +28,8 @@
]).create();
await pubGet(
- error: 'Because myapp depends on foo from path which doesn\'t exist '
+ error:
+ 'Because myapp depends on foo from path which doesn\'t exist '
'(Path dependency for package foo must refer to a directory, '
'not a file. Was "$dummyPath".), version solving failed.',
exitCode: exit_codes.NO_INPUT,
diff --git a/test/get/path/relative_path_test.dart b/test/get/path/relative_path_test.dart
index fa58c4a..7acb41f 100644
--- a/test/get/path/relative_path_test.dart
+++ b/test/get/path/relative_path_test.dart
@@ -16,8 +16,10 @@
void main() {
test('can use relative path', () async {
- await d
- .dir('foo', [d.libDir('foo'), d.libPubspec('foo', '0.0.1')]).create();
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '0.0.1'),
+ ]).create();
await d.dir(appPath, [
d.appPubspec(
@@ -65,45 +67,49 @@
]).validate();
});
- test('path is relative to containing pubspec when using --directory',
- () async {
- await d.dir('relative', [
- d.dir('foo', [
- d.libDir('foo'),
- d.libPubspec(
- 'foo',
- '0.0.1',
- deps: {
- 'bar': {'path': '../bar'},
+ test(
+ 'path is relative to containing pubspec when using --directory',
+ () async {
+ await d.dir('relative', [
+ d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec(
+ 'foo',
+ '0.0.1',
+ deps: {
+ 'bar': {'path': '../bar'},
+ },
+ ),
+ ]),
+ d.dir('bar', [d.libDir('bar'), d.libPubspec('bar', '0.0.1')]),
+ ]).create();
+
+ await d.dir(appPath, [
+ d.appPubspec(
+ dependencies: {
+ 'foo': {'path': '../relative/foo'},
},
),
- ]),
- d.dir('bar', [d.libDir('bar'), d.libPubspec('bar', '0.0.1')]),
- ]).create();
+ ]).create();
- await d.dir(appPath, [
- d.appPubspec(
- dependencies: {
- 'foo': {'path': '../relative/foo'},
- },
- ),
- ]).create();
+ await pubGet(
+ args: ['--directory', appPath],
+ workingDirectory: d.sandbox,
+ output: contains('Changed 2 dependencies in `myapp`!'),
+ );
- await pubGet(
- args: ['--directory', appPath],
- workingDirectory: d.sandbox,
- output: contains('Changed 2 dependencies in `myapp`!'),
- );
-
- await d.appPackageConfigFile([
- d.packageConfigEntry(name: 'foo', path: '../relative/foo'),
- d.packageConfigEntry(name: 'bar', path: '../relative/bar'),
- ]).validate();
- });
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(name: 'foo', path: '../relative/foo'),
+ d.packageConfigEntry(name: 'bar', path: '../relative/bar'),
+ ]).validate();
+ },
+ );
test('relative path preserved in the lockfile', () async {
- await d
- .dir('foo', [d.libDir('foo'), d.libPubspec('foo', '0.0.1')]).create();
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '0.0.1'),
+ ]).create();
await d.dir(appPath, [
d.appPubspec(
diff --git a/test/get/path/relative_symlink_test.dart b/test/get/path/relative_symlink_test.dart
index ef5b9ca..2723723 100644
--- a/test/get/path/relative_symlink_test.dart
+++ b/test/get/path/relative_symlink_test.dart
@@ -16,11 +16,12 @@
import '../../test_pub.dart';
void main() {
- test(
- 'generates a symlink with a relative path if the dependency '
+ test('generates a symlink with a relative path if the dependency '
'path was relative', () async {
- await d
- .dir('foo', [d.libDir('foo'), d.libPubspec('foo', '0.0.1')]).create();
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '0.0.1'),
+ ]).create();
await d.dir(appPath, [
d.appPubspec(
diff --git a/test/get/path/shared_dependency_symlink_test.dart b/test/get/path/shared_dependency_symlink_test.dart
index e0174d4..65907f0 100644
--- a/test/get/path/shared_dependency_symlink_test.dart
+++ b/test/get/path/shared_dependency_symlink_test.dart
@@ -11,10 +11,10 @@
void main() {
test('shared dependency with symlink', () async {
- await d.dir(
- 'shared',
- [d.libDir('shared'), d.libPubspec('shared', '0.0.1')],
- ).create();
+ await d.dir('shared', [
+ d.libDir('shared'),
+ d.libPubspec('shared', '0.0.1'),
+ ]).create();
await d.dir('foo', [
d.libDir('foo'),
diff --git a/test/get/path/shared_dependency_test.dart b/test/get/path/shared_dependency_test.dart
index 3a3ce8d..a481452 100644
--- a/test/get/path/shared_dependency_test.dart
+++ b/test/get/path/shared_dependency_test.dart
@@ -9,10 +9,10 @@
void main() {
test('shared dependency with same path', () async {
- await d.dir(
- 'shared',
- [d.libDir('shared'), d.libPubspec('shared', '0.0.1')],
- ).create();
+ await d.dir('shared', [
+ d.libDir('shared'),
+ d.libPubspec('shared', '0.0.1'),
+ ]).create();
await d.dir('foo', [
d.libDir('foo'),
@@ -55,10 +55,10 @@
});
test('shared dependency with paths that normalize the same', () async {
- await d.dir(
- 'shared',
- [d.libDir('shared'), d.libPubspec('shared', '0.0.1')],
- ).create();
+ await d.dir('shared', [
+ d.libDir('shared'),
+ d.libPubspec('shared', '0.0.1'),
+ ]).create();
await d.dir('foo', [
d.libDir('foo'),
diff --git a/test/get/preserve_lock_file_line_endings_test.dart b/test/get/preserve_lock_file_line_endings_test.dart
index e4e941f..0435aed 100644
--- a/test/get/preserve_lock_file_line_endings_test.dart
+++ b/test/get/preserve_lock_file_line_endings_test.dart
@@ -10,19 +10,21 @@
import '../test_pub.dart';
Future<void> main() async {
- test('pub get creates lock file with unix line endings if none exist',
- () async {
- await d.appDir().create();
+ test(
+ 'pub get creates lock file with unix line endings if none exist',
+ () async {
+ await d.appDir().create();
- await pubGet();
+ await pubGet();
- await d
- .file(
- p.join(appPath, 'pubspec.lock'),
- allOf(contains('\n'), isNot(contains('\r\n'))),
- )
- .validate();
- });
+ await d
+ .file(
+ p.join(appPath, 'pubspec.lock'),
+ allOf(contains('\n'), isNot(contains('\r\n'))),
+ )
+ .validate();
+ },
+ );
test('pub get preserves line endings of lock file', () async {
await d.appDir().create();
diff --git a/test/get/sdk_constraint_required_test.dart b/test/get/sdk_constraint_required_test.dart
index 96b13a7..e91610a 100644
--- a/test/get/sdk_constraint_required_test.dart
+++ b/test/get/sdk_constraint_required_test.dart
@@ -11,9 +11,7 @@
void main() {
test('pub get fails without an SDK constraint', () async {
await d.dir(appPath, [
- d.rawPubspec({
- 'name': 'myapp',
- }),
+ d.rawPubspec({'name': 'myapp'}),
]).create();
await pubGet(
@@ -37,12 +35,10 @@
test('pub get fails with an non-null-safety SDK constraint', () async {
await d.dir(appPath, [
- d.rawPubspec(
- {
- 'name': 'myapp',
- 'environment': {'sdk': '>=2.9.0 <4.0.0'},
- },
- ),
+ d.rawPubspec({
+ 'name': 'myapp',
+ 'environment': {'sdk': '>=2.9.0 <4.0.0'},
+ }),
]).create();
await pubGet(
diff --git a/test/get/summary_only_environment_test.dart b/test/get/summary_only_environment_test.dart
index e9c39d8..e2f7284 100644
--- a/test/get/summary_only_environment_test.dart
+++ b/test/get/summary_only_environment_test.dart
@@ -13,7 +13,8 @@
await d.appDir(dependencies: {'foo': 'any'}).create();
await pubGet(
- output: 'Resolving dependencies...\n'
+ output:
+ 'Resolving dependencies...\n'
'Downloading packages...\n'
'Got dependencies.',
silent: contains('+ foo 1.0.0'),
diff --git a/test/get/switch_source_test.dart b/test/get/switch_source_test.dart
index 2cf8432..ebe33e2 100644
--- a/test/get/switch_source_test.dart
+++ b/test/get/switch_source_test.dart
@@ -12,16 +12,18 @@
final server = await servePackages();
server.serve('foo', '1.2.3');
- await d.dir(
- 'foo',
- [d.libDir('foo', 'foo 0.0.1'), d.libPubspec('foo', '0.0.1')],
- ).create();
+ await d.dir('foo', [
+ d.libDir('foo', 'foo 0.0.1'),
+ d.libPubspec('foo', '0.0.1'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'path': '../foo'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'path': '../foo'},
+ },
+ )
+ .create();
await pubGet();
diff --git a/test/get/with_empty_environment_test.dart b/test/get/with_empty_environment_test.dart
index 28c5b02..504faac 100644
--- a/test/get/with_empty_environment_test.dart
+++ b/test/get/with_empty_environment_test.dart
@@ -15,9 +15,7 @@
await d.appDir(dependencies: {'foo': 'any'}).create();
await pubGet(
- environment: {
- '_PUB_TEST_CONFIG_DIR': null,
- },
+ environment: {'_PUB_TEST_CONFIG_DIR': null},
includeParentHomeAndPath: false,
);
});
diff --git a/test/git_test.dart b/test/git_test.dart
index 3dae020..ca5f1f9 100644
--- a/test/git_test.dart
+++ b/test/git_test.dart
@@ -10,10 +10,9 @@
void main() {
test('splitZeroTerminated works', () {
expect(splitZeroTerminated(Uint8List.fromList([])), <Uint8List>[]);
- expect(
- splitZeroTerminated(Uint8List.fromList([0])),
- <Uint8List>[Uint8List.fromList([])],
- );
+ expect(splitZeroTerminated(Uint8List.fromList([0])), <Uint8List>[
+ Uint8List.fromList([]),
+ ]);
expect(splitZeroTerminated(Uint8List.fromList([1, 0, 1])), <Uint8List>[
Uint8List.fromList([1]),
diff --git a/test/global/activate/activate_casing_test.dart b/test/global/activate/activate_casing_test.dart
index 91d3fde..eed95e6 100644
--- a/test/global/activate/activate_casing_test.dart
+++ b/test/global/activate/activate_casing_test.dart
@@ -9,36 +9,33 @@
import '../../test_pub.dart';
void main() {
- test(
- 'We only allow activating lower-case package names',
- () async {
- final server = await servePackages();
- server.serve(
- 'Foo',
- '1.0.0',
- contents: [
- d.dir('bin', [d.file('foo.dart', 'main() => print("hi"); ')]),
- ],
- );
+ test('We only allow activating lower-case package names', () async {
+ final server = await servePackages();
+ server.serve(
+ 'Foo',
+ '1.0.0',
+ contents: [
+ d.dir('bin', [d.file('foo.dart', 'main() => print("hi"); ')]),
+ ],
+ );
- await d.dir('foo', [d.libPubspec('Foo', '1.0.0')]).create();
- await runPub(
- args: ['global', 'activate', 'Foo'],
- error: '''
+ await d.dir('foo', [d.libPubspec('Foo', '1.0.0')]).create();
+ await runPub(
+ args: ['global', 'activate', 'Foo'],
+ error: '''
You can only activate packages with lower-case names.
Did you mean `foo`?''',
- exitCode: 1,
- );
+ exitCode: 1,
+ );
- await runPub(
- args: ['global', 'activate', '-spath', p.join(d.sandbox, 'foo')],
- error: '''
+ await runPub(
+ args: ['global', 'activate', '-spath', p.join(d.sandbox, 'foo')],
+ error: '''
You can only activate packages with lower-case names.
Did you mean `foo`?''',
- exitCode: 1,
- );
- },
- );
+ exitCode: 1,
+ );
+ });
}
diff --git a/test/global/activate/activate_git_after_hosted_test.dart b/test/global/activate/activate_git_after_hosted_test.dart
index aa4689b..070fce8 100644
--- a/test/global/activate/activate_git_after_hosted_test.dart
+++ b/test/global/activate/activate_git_after_hosted_test.dart
@@ -31,14 +31,18 @@
await runPub(
args: ['global', 'activate', '-sgit', '../foo.git'],
output: allOf(
- startsWith('Package foo is currently active at version 1.0.0.\n'
- 'Resolving dependencies...\n'
- 'Downloading packages...\n'
- '* foo 1.0.0 from git ..${separator}foo.git at '),
+ startsWith(
+ 'Package foo is currently active at version 1.0.0.\n'
+ 'Resolving dependencies...\n'
+ 'Downloading packages...\n'
+ '* foo 1.0.0 from git ..${separator}foo.git at ',
+ ),
// Specific revision number goes here.
- endsWith('Building package executables...\n'
- 'Built foo:foo.\n'
- 'Activated foo 1.0.0 from Git repository "..${separator}foo.git".'),
+ endsWith(
+ 'Building package executables...\n'
+ 'Built foo:foo.\n'
+ 'Activated foo 1.0.0 from Git repository "..${separator}foo.git".',
+ ),
),
);
diff --git a/test/global/activate/custom_hosted_url_test.dart b/test/global/activate/custom_hosted_url_test.dart
index cd5c2dc..fe5053e 100644
--- a/test/global/activate/custom_hosted_url_test.dart
+++ b/test/global/activate/custom_hosted_url_test.dart
@@ -15,12 +15,9 @@
// The custom pub server.
final customServer = await startPackageServer();
Map<String, dynamic> hostedDep(String name, String constraint) => {
- 'hosted': {
- 'url': customServer.url,
- 'name': name,
- },
- 'version': constraint,
- };
+ 'hosted': {'url': customServer.url, 'name': name},
+ 'version': constraint,
+ };
customServer.serve('foo', '1.0.0', deps: {'bar': hostedDep('bar', 'any')});
customServer.serve('bar', '1.0.0', deps: {'baz': 'any'});
diff --git a/test/global/activate/different_version_test.dart b/test/global/activate/different_version_test.dart
index b57d983..d93df2b 100644
--- a/test/global/activate/different_version_test.dart
+++ b/test/global/activate/different_version_test.dart
@@ -8,8 +8,7 @@
import '../../test_pub.dart';
void main() {
- test(
- "discards the previous active version if it doesn't match the "
+ test("discards the previous active version if it doesn't match the "
'constraint', () async {
await servePackages()
..serve(
diff --git a/test/global/activate/doesnt_snapshot_path_executables_test.dart b/test/global/activate/doesnt_snapshot_path_executables_test.dart
index dbc4e25..a7f3973 100644
--- a/test/global/activate/doesnt_snapshot_path_executables_test.dart
+++ b/test/global/activate/doesnt_snapshot_path_executables_test.dart
@@ -21,10 +21,10 @@
await d.dir(cachePath, [
d.dir('global_packages', [
- d.dir(
- 'foo',
- [d.file('pubspec.lock', contains('1.0.0')), d.nothing('bin')],
- ),
+ d.dir('foo', [
+ d.file('pubspec.lock', contains('1.0.0')),
+ d.nothing('bin'),
+ ]),
]),
]).validate();
});
diff --git a/test/global/activate/fails_when_active_package_is_corrupt_test.dart b/test/global/activate/fails_when_active_package_is_corrupt_test.dart
index 004cc02..0f29db9 100644
--- a/test/global/activate/fails_when_active_package_is_corrupt_test.dart
+++ b/test/global/activate/fails_when_active_package_is_corrupt_test.dart
@@ -12,41 +12,47 @@
void main() {
test(
- 'Complains if the current lockfile does not contain the expected package',
- () async {
- final server = await servePackages();
- server.serve(
- 'foo',
- '1.0.0',
- contents: [
- d.dir('bin', [d.file('foo.dart', 'main() => print("hi"); ')]),
- ],
- );
+ 'Complains if the current lockfile does not contain the expected package',
+ () async {
+ final server = await servePackages();
+ server.serve(
+ 'foo',
+ '1.0.0',
+ contents: [
+ d.dir('bin', [d.file('foo.dart', 'main() => print("hi"); ')]),
+ ],
+ );
- await runPub(args: ['global', 'activate', 'foo']);
+ await runPub(args: ['global', 'activate', 'foo']);
- // Write a bad pubspec.lock file.
- final lockFilePath =
- p.join(d.sandbox, cachePath, 'global_packages', 'foo', 'pubspec.lock');
- File(lockFilePath).writeAsStringSync('''
+ // Write a bad pubspec.lock file.
+ final lockFilePath = p.join(
+ d.sandbox,
+ cachePath,
+ 'global_packages',
+ 'foo',
+ 'pubspec.lock',
+ );
+ File(lockFilePath).writeAsStringSync('''
packages: {}
sdks: {}
''');
- // Activating it again suggests deactivating the package.
- await runPub(
- args: ['global', 'activate', 'foo'],
- error: '''
+ // Activating it again suggests deactivating the package.
+ await runPub(
+ args: ['global', 'activate', 'foo'],
+ error: '''
Could not find `foo` in `$lockFilePath`.
Your Pub cache might be corrupted.
Consider `dart pub global deactivate foo`''',
- exitCode: 1,
- );
+ exitCode: 1,
+ );
- await runPub(
- args: ['global', 'deactivate', 'foo'],
- output: 'Removed package `foo`',
- );
- });
+ await runPub(
+ args: ['global', 'deactivate', 'foo'],
+ output: 'Removed package `foo`',
+ );
+ },
+ );
}
diff --git a/test/global/activate/git_package_test.dart b/test/global/activate/git_package_test.dart
index 3ad50b7..78cad08 100644
--- a/test/global/activate/git_package_test.dart
+++ b/test/global/activate/git_package_test.dart
@@ -20,14 +20,18 @@
await runPub(
args: ['global', 'activate', '-sgit', '../foo.git'],
output: allOf(
- startsWith('Resolving dependencies...\n'
- 'Downloading packages...\n'
- '+ foo 1.0.0 from git ..${p.separator}foo.git at '),
+ startsWith(
+ 'Resolving dependencies...\n'
+ 'Downloading packages...\n'
+ '+ foo 1.0.0 from git ..${p.separator}foo.git at ',
+ ),
// Specific revision number goes here.
- endsWith('Building package executables...\n'
- 'Built foo:foo.\n'
- 'Activated foo 1.0.0 from Git repository '
- '"..${p.separator}foo.git".'),
+ endsWith(
+ 'Building package executables...\n'
+ 'Built foo:foo.\n'
+ 'Activated foo 1.0.0 from Git repository '
+ '"..${p.separator}foo.git".',
+ ),
),
);
});
@@ -38,31 +42,22 @@
await d.git('foo.git', [
d.libPubspec('foo', '0.0.0'),
d.dir('bin', [d.file('foo.dart', "main() => print('0');")]),
- d.dir(
- 'sub',
- [
- d.libPubspec('foo', '1.0.0'),
- d.dir('bin', [d.file('sub.dart', "main() => print('1');")]),
- ],
- ),
+ d.dir('sub', [
+ d.libPubspec('foo', '1.0.0'),
+ d.dir('bin', [d.file('sub.dart', "main() => print('1');")]),
+ ]),
]).create();
await d.git('foo.git', [
- d.dir(
- 'sub',
- [
- d.libPubspec('sub', '2.0.0'),
- d.dir('bin', [d.file('sub.dart', "main() => print('2');")]),
- ],
- ),
+ d.dir('sub', [
+ d.libPubspec('sub', '2.0.0'),
+ d.dir('bin', [d.file('sub.dart', "main() => print('2');")]),
+ ]),
]).commit();
await d.git('foo.git', [
- d.dir(
- 'sub',
- [
- d.libPubspec('sub', '3.0.0'),
- d.dir('bin', [d.file('sub.dart', "main() => print('3');")]),
- ],
- ),
+ d.dir('sub', [
+ d.libPubspec('sub', '3.0.0'),
+ d.dir('bin', [d.file('sub.dart', "main() => print('3');")]),
+ ]),
]).commit();
await runPub(
@@ -78,24 +73,21 @@
'--git-path=sub/',
],
output: allOf(
- startsWith('Resolving dependencies...\n'
- 'Downloading packages...\n'
- '+ sub 2.0.0 from git ..${p.separator}foo.git at'),
+ startsWith(
+ 'Resolving dependencies...\n'
+ 'Downloading packages...\n'
+ '+ sub 2.0.0 from git ..${p.separator}foo.git at',
+ ),
// Specific revision number goes here.
contains('in sub'),
- endsWith('Building package executables...\n'
- 'Built sub:sub.\n'
- 'Activated sub 2.0.0 from Git repository '
- '"..${p.separator}foo.git".'),
+ endsWith(
+ 'Building package executables...\n'
+ 'Built sub:sub.\n'
+ 'Activated sub 2.0.0 from Git repository '
+ '"..${p.separator}foo.git".',
+ ),
),
);
- await runPub(
- args: [
- 'global',
- 'run',
- 'sub',
- ],
- output: contains('2'),
- );
+ await runPub(args: ['global', 'run', 'sub'], output: contains('2'));
});
}
diff --git a/test/global/activate/ignores_active_version_test.dart b/test/global/activate/ignores_active_version_test.dart
index b144480..0c5794f 100644
--- a/test/global/activate/ignores_active_version_test.dart
+++ b/test/global/activate/ignores_active_version_test.dart
@@ -10,10 +10,7 @@
void main() {
test('ignores previously activated version', () async {
await servePackages()
- ..serve(
- 'foo',
- '1.2.3',
- )
+ ..serve('foo', '1.2.3')
..serve(
'foo',
'1.3.0',
diff --git a/test/global/activate/installs_dependencies_for_path_test.dart b/test/global/activate/installs_dependencies_for_path_test.dart
index 556b6d2..dc8218e 100644
--- a/test/global/activate/installs_dependencies_for_path_test.dart
+++ b/test/global/activate/installs_dependencies_for_path_test.dart
@@ -18,8 +18,9 @@
d.dir('bin', [d.file('foo.dart', "main() => print('ok');")]),
]).create();
- final pub =
- await startPub(args: ['global', 'activate', '-spath', '../foo']);
+ final pub = await startPub(
+ args: ['global', 'activate', '-spath', '../foo'],
+ );
expect(pub.stdout, emitsThrough('Resolving dependencies in `../foo`...'));
expect(pub.stdout, emitsThrough(startsWith('Activated foo 0.0.0 at path')));
await pub.shouldExit();
diff --git a/test/global/activate/outdated_binstub_test.dart b/test/global/activate/outdated_binstub_test.dart
index 0b8c6ba..6b078d0 100644
--- a/test/global/activate/outdated_binstub_test.dart
+++ b/test/global/activate/outdated_binstub_test.dart
@@ -27,10 +27,9 @@
'executables': {'foo-script': 'script'},
},
contents: [
- d.dir(
- 'bin',
- [d.file('script.dart', "main(args) => print('ok \$args');")],
- ),
+ d.dir('bin', [
+ d.file('script.dart', "main(args) => print('ok \$args');"),
+ ]),
],
);
diff --git a/test/global/activate/path_package_test.dart b/test/global/activate/path_package_test.dart
index 29162bd..bdc1f65 100644
--- a/test/global/activate/path_package_test.dart
+++ b/test/global/activate/path_package_test.dart
@@ -24,42 +24,44 @@
});
// Regression test for #1751
- test('activates a package at a local path with a relative path dependency',
- () async {
- await d.dir('foo', [
- d.libPubspec(
- 'foo',
- '1.0.0',
- deps: {
- 'bar': {'path': '../bar'},
- },
- ),
- d.dir('bin', [
- d.file('foo.dart', """
+ test(
+ 'activates a package at a local path with a relative path dependency',
+ () async {
+ await d.dir('foo', [
+ d.libPubspec(
+ 'foo',
+ '1.0.0',
+ deps: {
+ 'bar': {'path': '../bar'},
+ },
+ ),
+ d.dir('bin', [
+ d.file('foo.dart', """
import 'package:bar/bar.dart';
main() => print(value);
"""),
- ]),
- ]).create();
+ ]),
+ ]).create();
- await d.dir('bar', [
- d.libPubspec('bar', '1.0.0'),
- d.dir('lib', [d.file('bar.dart', "final value = 'ok';")]),
- ]).create();
+ await d.dir('bar', [
+ d.libPubspec('bar', '1.0.0'),
+ d.dir('lib', [d.file('bar.dart', "final value = 'ok';")]),
+ ]).create();
- final path = canonicalize(p.join(d.sandbox, 'foo'));
- await runPub(
- args: ['global', 'activate', '--source', 'path', '../foo'],
- output: endsWith('Activated foo 1.0.0 at path "$path".'),
- );
+ final path = canonicalize(p.join(d.sandbox, 'foo'));
+ await runPub(
+ args: ['global', 'activate', '--source', 'path', '../foo'],
+ output: endsWith('Activated foo 1.0.0 at path "$path".'),
+ );
- await runPub(
- args: ['global', 'run', 'foo'],
- output: endsWith('ok'),
- workingDirectory: p.current,
- );
- });
+ await runPub(
+ args: ['global', 'run', 'foo'],
+ output: endsWith('ok'),
+ workingDirectory: p.current,
+ );
+ },
+ );
test("Doesn't precompile binaries when activating from path", () async {
final server = await servePackages();
@@ -78,9 +80,10 @@
await runPub(
args: ['global', 'activate', '--source', 'path', '../foo'],
- output: allOf(
- [contains('Activated foo 1.0.0 at path'), isNot(contains('Built'))],
- ),
+ output: allOf([
+ contains('Activated foo 1.0.0 at path'),
+ isNot(contains('Built')),
+ ]),
);
});
}
diff --git a/test/global/activate/reactivating_git_upgrades_test.dart b/test/global/activate/reactivating_git_upgrades_test.dart
index 50a85f1..709f631 100644
--- a/test/global/activate/reactivating_git_upgrades_test.dart
+++ b/test/global/activate/reactivating_git_upgrades_test.dart
@@ -20,13 +20,17 @@
await runPub(
args: ['global', 'activate', '-sgit', '../foo.git'],
output: allOf(
- startsWith('Resolving dependencies...\n'
- 'Downloading packages...\n'
- '+ foo 1.0.0 from git ..${separator}foo.git at '),
+ startsWith(
+ 'Resolving dependencies...\n'
+ 'Downloading packages...\n'
+ '+ foo 1.0.0 from git ..${separator}foo.git at ',
+ ),
// Specific revision number goes here.
- endsWith('Building package executables...\n'
- 'Built foo:foo.\n'
- 'Activated foo 1.0.0 from Git repository "..${separator}foo.git".'),
+ endsWith(
+ 'Building package executables...\n'
+ 'Built foo:foo.\n'
+ 'Activated foo 1.0.0 from Git repository "..${separator}foo.git".',
+ ),
),
);
@@ -36,15 +40,19 @@
await runPub(
args: ['global', 'activate', '-sgit', '../foo.git'],
output: allOf(
- startsWith('Package foo is currently active from Git repository '
- '"..${separator}foo.git".\n'
- 'Resolving dependencies...\n'
- 'Downloading packages...\n'
- '> foo 1.0.1 from git ..${separator}foo.git at '),
+ startsWith(
+ 'Package foo is currently active from Git repository '
+ '"..${separator}foo.git".\n'
+ 'Resolving dependencies...\n'
+ 'Downloading packages...\n'
+ '> foo 1.0.1 from git ..${separator}foo.git at ',
+ ),
// Specific revision number goes here.
- endsWith('Building package executables...\n'
- 'Built foo:foo.\n'
- 'Activated foo 1.0.1 from Git repository "..${separator}foo.git".'),
+ endsWith(
+ 'Building package executables...\n'
+ 'Built foo:foo.\n'
+ 'Activated foo 1.0.1 from Git repository "..${separator}foo.git".',
+ ),
),
);
});
diff --git a/test/global/activate/snapshots_git_executables_test.dart b/test/global/activate/snapshots_git_executables_test.dart
index 0f2bf5f..09c8143 100644
--- a/test/global/activate/snapshots_git_executables_test.dart
+++ b/test/global/activate/snapshots_git_executables_test.dart
@@ -23,9 +23,10 @@
await runPub(
args: ['global', 'activate', '-sgit', '../foo.git'],
- output: allOf(
- [contains('Built foo:hello.'), contains('Built foo:goodbye.')],
- ),
+ output: allOf([
+ contains('Built foo:hello.'),
+ contains('Built foo:goodbye.'),
+ ]),
);
await d.dir(cachePath, [
diff --git a/test/global/activate/snapshots_hosted_executables_test.dart b/test/global/activate/snapshots_hosted_executables_test.dart
index e8783f1..6ff32a9 100644
--- a/test/global/activate/snapshots_hosted_executables_test.dart
+++ b/test/global/activate/snapshots_hosted_executables_test.dart
@@ -18,19 +18,19 @@
d.file('hello.dart', "void main() => print('hello!');"),
d.file('goodbye.dart', "void main() => print('goodbye!');"),
d.file('shell.sh', 'echo shell'),
- d.dir(
- 'subdir',
- [d.file('sub.dart', "void main() => print('sub!');")],
- ),
+ d.dir('subdir', [
+ d.file('sub.dart', "void main() => print('sub!');"),
+ ]),
]),
],
);
await runPub(
args: ['global', 'activate', 'foo'],
- output: allOf(
- [contains('Built foo:hello.'), contains('Built foo:goodbye.')],
- ),
+ output: allOf([
+ contains('Built foo:hello.'),
+ contains('Built foo:goodbye.'),
+ ]),
);
await d.dir(cachePath, [
diff --git a/test/global/activate/unknown_package_test.dart b/test/global/activate/unknown_package_test.dart
index 59ae919..2dc1974 100644
--- a/test/global/activate/unknown_package_test.dart
+++ b/test/global/activate/unknown_package_test.dart
@@ -14,8 +14,10 @@
await runPub(
args: ['global', 'activate', 'foo'],
error: allOf([
- contains("Because pub global activate depends on foo any which doesn't "
- 'exist (could not find package foo at http://localhost:'),
+ contains(
+ "Because pub global activate depends on foo any which doesn't "
+ 'exist (could not find package foo at http://localhost:',
+ ),
contains('), version solving failed.'),
]),
exitCode: exit_codes.UNAVAILABLE,
diff --git a/test/global/binstubs/binstub_runs_executable_test.dart b/test/global/binstubs/binstub_runs_executable_test.dart
index fe631d9..9317775 100644
--- a/test/global/binstubs/binstub_runs_executable_test.dart
+++ b/test/global/binstubs/binstub_runs_executable_test.dart
@@ -20,10 +20,9 @@
'executables': {'foo-script': 'script'},
},
contents: [
- d.dir(
- 'bin',
- [d.file('script.dart', "main(args) => print('ok \$args');")],
- ),
+ d.dir('bin', [
+ d.file('script.dart', "main(args) => print('ok \$args');"),
+ ]),
],
);
@@ -46,10 +45,9 @@
'name': 'foo',
'executables': {'foo-script': 'script'},
}),
- d.dir(
- 'bin',
- [d.file('script.dart', "main(args) => print('ok \$args');")],
- ),
+ d.dir('bin', [
+ d.file('script.dart', "main(args) => print('ok \$args');"),
+ ]),
]).create();
await runPub(args: ['global', 'activate', '-spath', '../foo']);
diff --git a/test/global/binstubs/binstub_runs_global_run_if_no_snapshot_test.dart b/test/global/binstubs/binstub_runs_global_run_if_no_snapshot_test.dart
index 85b92c4..11eb4d2 100644
--- a/test/global/binstubs/binstub_runs_global_run_if_no_snapshot_test.dart
+++ b/test/global/binstubs/binstub_runs_global_run_if_no_snapshot_test.dart
@@ -28,16 +28,12 @@
await d.dir(cachePath, [
d.dir('bin', [
- d.file(
- binStubName('foo-script'),
- contains('global run foo:script'),
- ),
+ d.file(binStubName('foo-script'), contains('global run foo:script')),
]),
]).validate();
});
- test(
- 'the binstubs of hosted package runs pub global run '
+ test('the binstubs of hosted package runs pub global run '
'if there is no snapshot', () async {
final server = await servePackages();
server.serve(
@@ -59,10 +55,7 @@
await d.dir(cachePath, [
d.dir('bin', [
- d.file(
- binStubName('foo-script'),
- contains('global run foo:script'),
- ),
+ d.file(binStubName('foo-script'), contains('global run foo:script')),
]),
]).validate();
@@ -76,18 +69,18 @@
'bin',
'foo-script${Platform.isWindows ? '.bat' : ''}',
);
- final result =
- await Process.run(binstub, [], environment: getPubTestEnvironment());
+ final result = await Process.run(
+ binstub,
+ [],
+ environment: getPubTestEnvironment(),
+ );
expect(result.stderr, '');
expect(result.exitCode, 0);
expect(result.stdout, contains('ok'));
await d.dir(cachePath, [
d.dir('bin', [
- d.file(
- binStubName('foo-script'),
- contains('global run foo:script'),
- ),
+ d.file(binStubName('foo-script'), contains('global run foo:script')),
]),
]).validate();
});
diff --git a/test/global/binstubs/does_not_warn_if_no_executables_test.dart b/test/global/binstubs/does_not_warn_if_no_executables_test.dart
index 869a4d8..2acb8ea 100644
--- a/test/global/binstubs/does_not_warn_if_no_executables_test.dart
+++ b/test/global/binstubs/does_not_warn_if_no_executables_test.dart
@@ -14,10 +14,9 @@
'foo',
'1.0.0',
contents: [
- d.dir(
- 'bin',
- [d.file('script.dart', "main(args) => print('ok \$args');")],
- ),
+ d.dir('bin', [
+ d.file('script.dart', "main(args) => print('ok \$args');"),
+ ]),
],
);
diff --git a/test/global/binstubs/does_not_warn_if_on_path_test.dart b/test/global/binstubs/does_not_warn_if_on_path_test.dart
index dbe6654..ef72179 100644
--- a/test/global/binstubs/does_not_warn_if_on_path_test.dart
+++ b/test/global/binstubs/does_not_warn_if_on_path_test.dart
@@ -20,10 +20,9 @@
'executables': {'script': null},
},
contents: [
- d.dir(
- 'bin',
- [d.file('script.dart', "main(args) => print('ok \$args');")],
- ),
+ d.dir('bin', [
+ d.file('script.dart', "main(args) => print('ok \$args');"),
+ ]),
],
);
diff --git a/test/global/binstubs/missing_script_test.dart b/test/global/binstubs/missing_script_test.dart
index 61915dc..32fae7c 100644
--- a/test/global/binstubs/missing_script_test.dart
+++ b/test/global/binstubs/missing_script_test.dart
@@ -17,18 +17,23 @@
}),
]).create();
- final pub =
- await startPub(args: ['global', 'activate', '-spath', '../foo']);
+ final pub = await startPub(
+ args: ['global', 'activate', '-spath', '../foo'],
+ );
expect(
pub.stderr,
- emits('Warning: Executable "missing" runs '
- '"${p.join('bin', 'not_here.dart')}", which was not found in foo.'),
+ emits(
+ 'Warning: Executable "missing" runs '
+ '"${p.join('bin', 'not_here.dart')}", which was not found in foo.',
+ ),
);
expect(
pub.stderr,
- emits('Warning: Executable "nope" runs '
- '"${p.join('bin', 'nope.dart')}", which was not found in foo.'),
+ emits(
+ 'Warning: Executable "nope" runs '
+ '"${p.join('bin', 'nope.dart')}", which was not found in foo.',
+ ),
);
await pub.shouldExit();
});
diff --git a/test/global/binstubs/name_collision_test.dart b/test/global/binstubs/name_collision_test.dart
index 7e1c33f..e580041 100644
--- a/test/global/binstubs/name_collision_test.dart
+++ b/test/global/binstubs/name_collision_test.dart
@@ -27,8 +27,9 @@
await runPub(args: ['global', 'activate', '-spath', '../foo']);
- final pub =
- await startPub(args: ['global', 'activate', '-spath', '../bar']);
+ final pub = await startPub(
+ args: ['global', 'activate', '-spath', '../bar'],
+ );
expect(pub.stdout, emitsThrough('Installed executable bar.'));
expect(
pub.stderr,
@@ -40,8 +41,10 @@
);
expect(
pub.stderr,
- emits('Deactivate the other package(s) or activate bar using '
- '--overwrite.'),
+ emits(
+ 'Deactivate the other package(s) or activate bar using '
+ '--overwrite.',
+ ),
);
await pub.shouldExit();
diff --git a/test/global/binstubs/outdated_binstub_runs_pub_global_test.dart b/test/global/binstubs/outdated_binstub_runs_pub_global_test.dart
index 8325c16..5ca957b 100644
--- a/test/global/binstubs/outdated_binstub_runs_pub_global_test.dart
+++ b/test/global/binstubs/outdated_binstub_runs_pub_global_test.dart
@@ -22,117 +22,106 @@
}
void main() {
- test("an outdated binstub runs 'pub global run', which replaces old binstub",
- () async {
- final server = await servePackages();
- server.serve(
- 'foo',
- '1.0.0',
- pubspec: {
- 'executables': {
- 'foo-script': 'script',
- 'foo-script2': 'script',
- 'foo-script-not-installed': 'script',
- 'foo-another-script': 'another-script',
- 'foo-another-script-not-installed': 'another-script',
- },
- },
- contents: [
- d.dir('bin', [
- d.file('script.dart', r"main(args) => print('ok $args');"),
- d.file(
- 'another-script.dart',
- r"main(args) => print('not so good $args');",
- ),
- ]),
- ],
- );
-
- await runPub(
- args: [
- 'global',
- 'activate',
+ test(
+ "an outdated binstub runs 'pub global run', which replaces old binstub",
+ () async {
+ final server = await servePackages();
+ server.serve(
'foo',
- '--executable',
- 'foo-script',
- '--executable',
- 'foo-script2',
- '--executable',
- 'foo-another-script',
- ],
- environment: {'_PUB_TEST_SDK_VERSION': '3.0.0'},
- );
+ '1.0.0',
+ pubspec: {
+ 'executables': {
+ 'foo-script': 'script',
+ 'foo-script2': 'script',
+ 'foo-script-not-installed': 'script',
+ 'foo-another-script': 'another-script',
+ 'foo-another-script-not-installed': 'another-script',
+ },
+ },
+ contents: [
+ d.dir('bin', [
+ d.file('script.dart', r"main(args) => print('ok $args');"),
+ d.file(
+ 'another-script.dart',
+ r"main(args) => print('not so good $args');",
+ ),
+ ]),
+ ],
+ );
- expect(binStub('foo-script'), contains('script.dart-3.0.0.snapshot'));
+ await runPub(
+ args: [
+ 'global',
+ 'activate',
+ 'foo',
+ '--executable',
+ 'foo-script',
+ '--executable',
+ 'foo-script2',
+ '--executable',
+ 'foo-another-script',
+ ],
+ environment: {'_PUB_TEST_SDK_VERSION': '3.0.0'},
+ );
- expect(binStub('foo-script2'), contains('script.dart-3.0.0.snapshot'));
+ expect(binStub('foo-script'), contains('script.dart-3.0.0.snapshot'));
- expect(
- binStub('foo-script-not-installed'),
- null,
- );
+ expect(binStub('foo-script2'), contains('script.dart-3.0.0.snapshot'));
- expect(
- binStub('foo-another-script'),
- contains('another-script.dart-3.0.0.snapshot'),
- );
+ expect(binStub('foo-script-not-installed'), null);
- expect(
- binStub('foo-another-script-not-installed'),
- null,
- );
+ expect(
+ binStub('foo-another-script'),
+ contains('another-script.dart-3.0.0.snapshot'),
+ );
- // Replace the created snapshot with one that really doesn't work with the
- // current dart.
- await d.dir(cachePath, [
- d.dir('global_packages', [
- d.dir('foo', [
- d.dir(
- 'bin',
- [d.outOfDateSnapshot('script.dart-3.0.0.snapshot')],
- ),
+ expect(binStub('foo-another-script-not-installed'), null);
+
+ // Replace the created snapshot with one that really doesn't work with the
+ // current dart.
+ await d.dir(cachePath, [
+ d.dir('global_packages', [
+ d.dir('foo', [
+ d.dir('bin', [d.outOfDateSnapshot('script.dart-3.0.0.snapshot')]),
+ ]),
]),
- ]),
- ]).create();
+ ]).create();
- final process = await TestProcess.start(
- p.join(d.sandbox, cachePath, 'bin', binStubName('foo-script')),
- ['arg1', 'arg2'],
- environment: getEnvironment(),
- );
+ final process = await TestProcess.start(
+ p.join(d.sandbox, cachePath, 'bin', binStubName('foo-script')),
+ ['arg1', 'arg2'],
+ environment: getEnvironment(),
+ );
- expect(await process.stdout.rest.toList(), contains('ok [arg1, arg2]'));
+ expect(await process.stdout.rest.toList(), contains('ok [arg1, arg2]'));
- expect(
- binStub('foo-script'),
- contains('script.dart-3.1.2+3.snapshot'),
- );
+ expect(binStub('foo-script'), contains('script.dart-3.1.2+3.snapshot'));
- expect(
- binStub('foo-script2'),
- contains('script.dart-3.1.2+3.snapshot'),
- );
+ expect(binStub('foo-script2'), contains('script.dart-3.1.2+3.snapshot'));
- expect(
- binStub('foo-script-not-installed'),
- null,
- reason: 'global run recompile should not install new binstubs',
- );
+ expect(
+ binStub('foo-script-not-installed'),
+ null,
+ reason: 'global run recompile should not install new binstubs',
+ );
- expect(
- binStub('foo-another-script'),
- contains('another-script.dart-3.0.0.snapshot'),
- reason:
- 'global run recompile should not refresh binstubs for other scripts',
- );
+ expect(
+ binStub('foo-another-script'),
+ contains('another-script.dart-3.0.0.snapshot'),
+ reason:
+ 'global run recompile should not '
+ 'refresh binstubs for other scripts',
+ );
- expect(
- binStub('foo-another-script-not-installed'),
- null,
- reason:
- 'global run recompile should not install binstubs for other scripts',
- );
+ expect(
+ binStub('foo-another-script-not-installed'),
+ null,
+ reason:
+ 'global run recompile should not '
+ 'install binstubs for other scripts',
+ );
- await process.shouldExit();
- });
+ await process.shouldExit();
+ },
+ );
}
diff --git a/test/global/binstubs/outdated_snapshot_test.dart b/test/global/binstubs/outdated_snapshot_test.dart
index 36f9b50..5c09518 100644
--- a/test/global/binstubs/outdated_snapshot_test.dart
+++ b/test/global/binstubs/outdated_snapshot_test.dart
@@ -21,10 +21,9 @@
'executables': {'foo-script': 'script'},
},
contents: [
- d.dir(
- 'bin',
- [d.file('script.dart', "main(args) => print('ok \$args');")],
- ),
+ d.dir('bin', [
+ d.file('script.dart', "main(args) => print('ok \$args');"),
+ ]),
],
);
@@ -33,10 +32,9 @@
await d.dir(cachePath, [
d.dir('global_packages', [
d.dir('foo', [
- d.dir(
- 'bin',
- [d.outOfDateSnapshot('script.dart-$versionSuffix.snapshot-1')],
- ),
+ d.dir('bin', [
+ d.outOfDateSnapshot('script.dart-$versionSuffix.snapshot-1'),
+ ]),
]),
]),
]).create();
diff --git a/test/global/binstubs/path_package_test.dart b/test/global/binstubs/path_package_test.dart
index 6cdc37f..359b205 100644
--- a/test/global/binstubs/path_package_test.dart
+++ b/test/global/binstubs/path_package_test.dart
@@ -23,10 +23,9 @@
);
await d.dir(cachePath, [
- d.dir(
- 'bin',
- [d.file(binStubName('foo'), contains('global run foo:foo'))],
- ),
+ d.dir('bin', [
+ d.file(binStubName('foo'), contains('global run foo:foo')),
+ ]),
]).validate();
});
}
diff --git a/test/global/binstubs/removes_when_deactivated_test.dart b/test/global/binstubs/removes_when_deactivated_test.dart
index 664af05..4dabe2a 100644
--- a/test/global/binstubs/removes_when_deactivated_test.dart
+++ b/test/global/binstubs/removes_when_deactivated_test.dart
@@ -28,10 +28,10 @@
await runPub(args: ['global', 'deactivate', 'foo']);
await d.dir(cachePath, [
- d.dir(
- 'bin',
- [d.nothing(binStubName('one')), d.nothing(binStubName('two'))],
- ),
+ d.dir('bin', [
+ d.nothing(binStubName('one')),
+ d.nothing(binStubName('two')),
+ ]),
]).validate();
});
}
diff --git a/test/global/binstubs/runs_once_even_when_dart_is_batch_test.dart b/test/global/binstubs/runs_once_even_when_dart_is_batch_test.dart
index 777cc84..2498480 100644
--- a/test/global/binstubs/runs_once_even_when_dart_is_batch_test.dart
+++ b/test/global/binstubs/runs_once_even_when_dart_is_batch_test.dart
@@ -28,15 +28,12 @@
await runPub(args: ['global', 'activate', 'foo']);
- await d.dir(
- 'bin',
- [
- d.file('dart.bat', '''
+ await d.dir('bin', [
+ d.file('dart.bat', '''
@echo off
${Platform.resolvedExecutable} %*
'''),
- ],
- ).create();
+ ]).create();
final process = await Process.run(
p.join(d.sandbox, cachePath, 'bin', 'script.bat'),
diff --git a/test/global/binstubs/warns_if_not_on_path_test.dart b/test/global/binstubs/warns_if_not_on_path_test.dart
index d2cb042..3d87c79 100644
--- a/test/global/binstubs/warns_if_not_on_path_test.dart
+++ b/test/global/binstubs/warns_if_not_on_path_test.dart
@@ -17,10 +17,9 @@
'executables': {'some-dart-script': 'script'},
},
contents: [
- d.dir(
- 'bin',
- [d.file('script.dart', "main(args) => print('ok \$args');")],
- ),
+ d.dir('bin', [
+ d.file('script.dart', "main(args) => print('ok \$args');"),
+ ]),
],
);
diff --git a/test/global/deactivate/git_package_test.dart b/test/global/deactivate/git_package_test.dart
index 59dd322..f522f87 100644
--- a/test/global/deactivate/git_package_test.dart
+++ b/test/global/deactivate/git_package_test.dart
@@ -21,7 +21,8 @@
await runPub(
args: ['global', 'deactivate', 'foo'],
- output: 'Deactivated package foo 1.0.0 from Git repository '
+ output:
+ 'Deactivated package foo 1.0.0 from Git repository '
'"..${separator}foo.git".',
);
});
diff --git a/test/global/run/fails_if_sdk_constraint_is_unmet_test.dart b/test/global/run/fails_if_sdk_constraint_is_unmet_test.dart
index 5631fe3..77ac65f 100644
--- a/test/global/run/fails_if_sdk_constraint_is_unmet_test.dart
+++ b/test/global/run/fails_if_sdk_constraint_is_unmet_test.dart
@@ -28,8 +28,9 @@
await runPub(
args: ['global', 'run', 'foo:script'],
- error:
- contains("foo as globally activated doesn't support Dart 3.1.2+4."),
+ error: contains(
+ "foo as globally activated doesn't support Dart 3.1.2+4.",
+ ),
exitCode: exit_codes.DATA,
environment: {'_PUB_TEST_SDK_VERSION': '3.1.2+4'},
);
@@ -69,24 +70,19 @@
..serve(
'foo',
'1.0.0',
- deps: {
- 'bar': '^1.0.0',
- },
+ deps: {'bar': '^1.0.0'},
sdk: '^3.0.0',
contents: [
- d.dir(
- 'bin',
- [d.file('script.dart', "main(args) => print('123-OK');")],
- ),
+ d.dir('bin', [
+ d.file('script.dart', "main(args) => print('123-OK');"),
+ ]),
],
)
..serve(
'bar',
'1.0.0',
pubspec: {
- 'environment': {
- 'sdk': '^3.0.1',
- },
+ 'environment': {'sdk': '^3.0.1'},
},
);
@@ -100,13 +96,11 @@
await runPub(
environment: {'_PUB_TEST_SDK_VERSION': '3.0.0'},
args: ['global', 'run', 'foo:script'],
- error: contains(
- """
+ error: contains("""
foo as globally activated doesn't support Dart 3.0.0.
try:
-`dart pub global activate foo` to reactivate.""",
- ),
+`dart pub global activate foo` to reactivate."""),
exitCode: exit_codes.DATA,
);
});
@@ -118,10 +112,7 @@
'1.0.0',
sdk: '^2.19.0',
contents: [
- d.dir(
- 'bin',
- [d.file('script.dart', "main(args) => print('123-OK');")],
- ),
+ d.dir('bin', [d.file('script.dart', "main(args) => print('123-OK');")]),
],
);
diff --git a/test/global/run/nonexistent_script_test.dart b/test/global/run/nonexistent_script_test.dart
index 90e3fb0..4acc0c2 100644
--- a/test/global/run/nonexistent_script_test.dart
+++ b/test/global/run/nonexistent_script_test.dart
@@ -24,9 +24,7 @@
final pub = await pubRun(global: true, args: ['foo:script']);
expect(
pub.stderr,
- emits(
- "Could not find ${p.join("bin", "script.dart")} in package foo.",
- ),
+ emits("Could not find ${p.join("bin", "script.dart")} in package foo."),
);
await pub.shouldExit(exit_codes.NO_INPUT);
});
diff --git a/test/global/run/package_api_test.dart b/test/global/run/package_api_test.dart
index 6d849c3..4a61658 100644
--- a/test/global/run/package_api_test.dart
+++ b/test/global/run/package_api_test.dart
@@ -44,28 +44,33 @@
);
expect(pub.stdout, emits(p.toUri(packageConfigPath).toString()));
- final fooResourcePath =
- p.join(globalServer.pathInCache('foo', '1.0.0'), 'lib/resource.txt');
+ final fooResourcePath = p.join(
+ globalServer.pathInCache('foo', '1.0.0'),
+ 'lib/resource.txt',
+ );
expect(pub.stdout, emits(p.toUri(fooResourcePath).toString()));
- final barResourcePath =
- p.join(globalServer.pathInCache('bar', '1.0.0'), 'lib/resource.txt');
+ final barResourcePath = p.join(
+ globalServer.pathInCache('bar', '1.0.0'),
+ 'lib/resource.txt',
+ );
expect(pub.stdout, emits(p.toUri(barResourcePath).toString()));
await pub.shouldExit(0);
});
- test('a mutable untransformed application sees a file: package root',
- () async {
- await d.dir('foo', [d.libPubspec('foo', '1.0.0')]).create();
+ test(
+ 'a mutable untransformed application sees a file: package root',
+ () async {
+ await d.dir('foo', [d.libPubspec('foo', '1.0.0')]).create();
- await d.dir(appPath, [
- d.appPubspec(
- dependencies: {
- 'foo': {'path': '../foo'},
- },
- ),
- d.dir('bin', [
- d.file('script.dart', """
+ await d.dir(appPath, [
+ d.appPubspec(
+ dependencies: {
+ 'foo': {'path': '../foo'},
+ },
+ ),
+ d.dir('bin', [
+ d.file('script.dart', """
import 'dart:isolate';
main() async {
@@ -76,22 +81,25 @@
Uri.parse('package:foo/resource.txt')));
}
"""),
- ]),
- ]).create();
+ ]),
+ ]).create();
- await runPub(args: ['global', 'activate', '-s', 'path', '.']);
+ await runPub(args: ['global', 'activate', '-s', 'path', '.']);
- final pub = await pubRun(global: true, args: ['myapp:script']);
+ final pub = await pubRun(global: true, args: ['myapp:script']);
- final packageConfigPath =
- p.join(d.sandbox, 'myapp/.dart_tool/package_config.json');
- expect(pub.stdout, emitsThrough(p.toUri(packageConfigPath).toString()));
+ final packageConfigPath = p.join(
+ d.sandbox,
+ 'myapp/.dart_tool/package_config.json',
+ );
+ expect(pub.stdout, emitsThrough(p.toUri(packageConfigPath).toString()));
- final myappResourcePath = p.join(d.sandbox, 'myapp/lib/resource.txt');
- expect(pub.stdout, emits(p.toUri(myappResourcePath).toString()));
+ final myappResourcePath = p.join(d.sandbox, 'myapp/lib/resource.txt');
+ expect(pub.stdout, emits(p.toUri(myappResourcePath).toString()));
- final fooResourcePath = p.join(d.sandbox, 'foo/lib/resource.txt');
- expect(pub.stdout, emits(p.toUri(fooResourcePath).toString()));
- await pub.shouldExit(0);
- });
+ final fooResourcePath = p.join(d.sandbox, 'foo/lib/resource.txt');
+ expect(pub.stdout, emits(p.toUri(fooResourcePath).toString()));
+ await pub.shouldExit(0);
+ },
+ );
}
diff --git a/test/global/run/recompiles_if_snapshot_is_out_of_date_test.dart b/test/global/run/recompiles_if_snapshot_is_out_of_date_test.dart
index 01a03d6..caa2b02 100644
--- a/test/global/run/recompiles_if_snapshot_is_out_of_date_test.dart
+++ b/test/global/run/recompiles_if_snapshot_is_out_of_date_test.dart
@@ -56,10 +56,9 @@
await d.dir(cachePath, [
d.dir('global_packages', [
d.dir('foo', [
- d.dir(
- 'bin',
- [d.file('script.dart-$versionSuffix.snapshot', contains('ok'))],
- ),
+ d.dir('bin', [
+ d.file('script.dart-$versionSuffix.snapshot', contains('ok')),
+ ]),
]),
]),
]).validate();
@@ -70,9 +69,7 @@
server.serve(
'foo',
'1.0.0',
- deps: {
- 'bar': 'any',
- },
+ deps: {'bar': 'any'},
contents: [
d.dir('bin', [
d.file('foo.dart', 'import "package:bar/bar.dart"; main() => bar();'),
@@ -84,28 +81,19 @@
'bar',
'1.0.0',
contents: [
- d.dir('lib', [
- d.file('bar.dart', 'bar() => print("original");'),
- ]),
+ d.dir('lib', [d.file('bar.dart', 'bar() => print("original");')]),
],
);
- await runPub(
- args: ['global', 'activate', 'foo'],
- );
+ await runPub(args: ['global', 'activate', 'foo']);
- await runPub(
- args: ['global', 'run', 'foo'],
- output: 'original',
- );
+ await runPub(args: ['global', 'run', 'foo'], output: 'original');
server.serve(
'bar',
'1.0.0',
contents: [
- d.dir('lib', [
- d.file('foo.dart', 'foo() => print("updated");'),
- ]),
+ d.dir('lib', [d.file('foo.dart', 'foo() => print("updated");')]),
],
);
@@ -160,94 +148,92 @@
);
});
- test('validate resolution before recompilation - updated sdk package',
- () async {
- final server = await servePackages();
- server.serve(
- 'foo',
- '1.0.0',
- deps: {
- 'bar': {'sdk': 'dart', 'version': '^1.0.0'},
- },
- contents: [
- d.dir('bin', [
- d.file('foo.dart', 'main() => print("foo");'),
- ]),
- ],
- );
+ test(
+ 'validate resolution before recompilation - updated sdk package',
+ () async {
+ final server = await servePackages();
+ server.serve(
+ 'foo',
+ '1.0.0',
+ deps: {
+ 'bar': {'sdk': 'dart', 'version': '^1.0.0'},
+ },
+ contents: [
+ d.dir('bin', [d.file('foo.dart', 'main() => print("foo");')]),
+ ],
+ );
- await d.dir('dart', [
- d.dir('packages', [
- d.dir('bar', [
- d.libPubspec('bar', '1.0.0', deps: {}),
+ await d.dir('dart', [
+ d.dir('packages', [
+ d.dir('bar', [d.libPubspec('bar', '1.0.0', deps: {})]),
]),
- ]),
- d.sdkPackagesConfig(
- SdkPackageConfig(
- 'dart',
- {'bar': SdkPackage('bar', 'packages/bar')},
- 1,
+ d.sdkPackagesConfig(
+ SdkPackageConfig('dart', {
+ 'bar': SdkPackage('bar', 'packages/bar'),
+ }, 1),
),
- ),
- ]).create();
+ ]).create();
- await runPub(
- args: ['global', 'activate', 'foo'],
- environment: {'DART_ROOT': p.join(d.sandbox, 'dart')},
- );
+ await runPub(
+ args: ['global', 'activate', 'foo'],
+ environment: {'DART_ROOT': p.join(d.sandbox, 'dart')},
+ );
- await runPub(
- args: ['global', 'run', 'foo'],
- environment: {'DART_ROOT': p.join(d.sandbox, 'dart')},
- output: 'foo',
- );
+ await runPub(
+ args: ['global', 'run', 'foo'],
+ environment: {'DART_ROOT': p.join(d.sandbox, 'dart')},
+ output: 'foo',
+ );
- await d.dir('dart', [
- d.dir('packages', [
- d.dir('bar', [
- // Within constraint, but doesn't satisfy pubspec.lock.
- d.libPubspec('bar', '1.2.0', deps: {}),
+ await d.dir('dart', [
+ d.dir('packages', [
+ d.dir('bar', [
+ // Within constraint, but doesn't satisfy pubspec.lock.
+ d.libPubspec('bar', '1.2.0', deps: {}),
+ ]),
]),
- ]),
- ]).create();
+ ]).create();
- await runPub(
- args: ['global', 'run', 'foo'],
- environment: {
- 'DART_ROOT': p.join(d.sandbox, 'dart'),
- '_PUB_TEST_SDK_VERSION': '3.2.1+4',
- },
- output: contains('> bar 1.2.0 from sdk dart (was 1.0.0 from sdk dart)'),
- error: allOf(
- contains(
- 'The current activation of `foo` is not compatible with your '
- 'current SDK.',
+ await runPub(
+ args: ['global', 'run', 'foo'],
+ environment: {
+ 'DART_ROOT': p.join(d.sandbox, 'dart'),
+ '_PUB_TEST_SDK_VERSION': '3.2.1+4',
+ },
+ output: contains('> bar 1.2.0 from sdk dart (was 1.0.0 from sdk dart)'),
+ error: allOf(
+ contains(
+ 'The current activation of `foo` is not compatible with your '
+ 'current SDK.',
+ ),
+ contains('Try reactivating the package'),
),
- contains('Try reactivating the package'),
- ),
- exitCode: DATA,
- );
+ exitCode: DATA,
+ );
- await d.dir('dart', [
- d.dir('packages', [
- d.dir('bar', [
- // Doesn't fulfill constraint, but doesn't satisfy pubspec.lock.
- d.libPubspec('bar', '2.0.0', deps: {}),
+ await d.dir('dart', [
+ d.dir('packages', [
+ d.dir('bar', [
+ // Doesn't fulfill constraint, but doesn't satisfy pubspec.lock.
+ d.libPubspec('bar', '2.0.0', deps: {}),
+ ]),
]),
- ]),
- ]).create();
- await runPub(
- args: ['global', 'run', 'foo'],
- environment: {
- 'DART_ROOT': p.join(d.sandbox, 'dart'),
- '_PUB_TEST_SDK_VERSION': '3.2.1+4',
- },
- error: allOf(
- contains('Because every version of foo depends on bar ^1.0.0 from sdk'),
- contains('The package `foo` as currently activated cannot resolve.'),
- contains('Try reactivating the package'),
- ),
- exitCode: 1,
- );
- });
+ ]).create();
+ await runPub(
+ args: ['global', 'run', 'foo'],
+ environment: {
+ 'DART_ROOT': p.join(d.sandbox, 'dart'),
+ '_PUB_TEST_SDK_VERSION': '3.2.1+4',
+ },
+ error: allOf(
+ contains(
+ 'Because every version of foo depends on bar ^1.0.0 from sdk',
+ ),
+ contains('The package `foo` as currently activated cannot resolve.'),
+ contains('Try reactivating the package'),
+ ),
+ exitCode: 1,
+ );
+ },
+ );
}
diff --git a/test/global/run/runs_script_in_checked_mode_test.dart b/test/global/run/runs_script_in_checked_mode_test.dart
index 4d080c2..c8fc40d 100644
--- a/test/global/run/runs_script_in_checked_mode_test.dart
+++ b/test/global/run/runs_script_in_checked_mode_test.dart
@@ -20,8 +20,10 @@
await runPub(args: ['global', 'activate', 'foo']);
- final pub =
- await pubRun(global: true, args: ['--enable-asserts', 'foo:script']);
+ final pub = await pubRun(
+ global: true,
+ args: ['--enable-asserts', 'foo:script'],
+ );
expect(pub.stderr, emitsThrough(contains('Failed assertion')));
await pub.shouldExit(255);
});
diff --git a/test/golden_file.dart b/test/golden_file.dart
index 3090483..ad080cb 100644
--- a/test/golden_file.dart
+++ b/test/golden_file.dart
@@ -21,18 +21,19 @@
}();
/// Find the current `_test.dart` filename invoked from stack-trace.
-String _findCurrentTestFilename() => Trace.current()
- .frames
- .lastWhere(
- (frame) =>
- frame.uri.isScheme('file') &&
- p.basename(frame.uri.toFilePath()).endsWith('_test.dart'),
- )
- .uri
- .toFilePath();
+String _findCurrentTestFilename() =>
+ Trace.current().frames
+ .lastWhere(
+ (frame) =>
+ frame.uri.isScheme('file') &&
+ p.basename(frame.uri.toFilePath()).endsWith('_test.dart'),
+ )
+ .uri
+ .toFilePath();
class GoldenTestContext {
- static const _endOfSection = ''
+ static const _endOfSection =
+ ''
'--------------------------------'
' END OF OUTPUT '
'---------------------------------\n\n';
@@ -49,27 +50,29 @@
String currentTestFile,
String testName, {
required this.colors,
- }) : _goldenFile = File(
- p.join(
- 'test',
- 'testdata',
- 'goldens',
- p.relative(
- currentTestFile.replaceAll(RegExp(r'\.dart$'), ''),
- from: p.join(p.current, 'test'),
- ),
- // Sanitize the name, and add .ans or .txt.
- '${testName.replaceAll(RegExp(r'[<>:"/\|?*%#]'), '~')}.'
- '${colors ? 'ans' : 'txt'}',
- ),
- ),
- _header = '# GENERATED BY: ${p.relative(currentTestFile)}\n\n';
+ }) : _goldenFile = File(
+ p.join(
+ 'test',
+ 'testdata',
+ 'goldens',
+ p.relative(
+ currentTestFile.replaceAll(RegExp(r'\.dart$'), ''),
+ from: p.join(p.current, 'test'),
+ ),
+ // Sanitize the name, and add .ans or .txt.
+ '${testName.replaceAll(RegExp(r'[<>:"/\|?*%#]'), '~')}.'
+ '${colors ? 'ans' : 'txt'}',
+ ),
+ ),
+ _header = '# GENERATED BY: ${p.relative(currentTestFile)}\n\n';
String get _goldenFilePath => _goldenFile.path;
void _readGoldenFile() {
- if (RegExp(r'^1|(?:true)$', caseSensitive: false)
- .hasMatch(Platform.environment['_PUB_TEST_WRITE_GOLDEN'] ?? '') ||
+ if (RegExp(
+ r'^1|(?:true)$',
+ caseSensitive: false,
+ ).hasMatch(Platform.environment['_PUB_TEST_WRITE_GOLDEN'] ?? '') ||
!_goldenFile.existsSync()) {
_shouldRegenerateGolden = true;
} else {
@@ -126,8 +129,10 @@
// exist, or is missing entries.
// This typically happens if we forgot to commit a file to git.
if (_isCI) {
- fail('Missing golden file: "$_goldenFilePath", '
- 'try running tests again and commit the file');
+ fail(
+ 'Missing golden file: "$_goldenFilePath", '
+ 'try running tests again and commit the file',
+ );
} else {
// If not running in CI, then we consider the test as skipped, we've
// generated the file, but the user should run the tests again.
diff --git a/test/help_test.dart b/test/help_test.dart
index 3f27ff4..045bca0 100644
--- a/test/help_test.dart
+++ b/test/help_test.dart
@@ -30,10 +30,8 @@
yield* cmds
.where((sub) => !sub.hidden && names.add(sub.name))
.map(
- (sub) => _extractCommands(
- [...parents, sub.name],
- sub.subcommands.values,
- ),
+ (sub) =>
+ _extractCommands([...parents, sub.name], sub.subcommands.values),
)
.expand((cmds) => cmds);
}
diff --git a/test/hosted/fail_gracefully_on_bad_version_listing_response_test.dart b/test/hosted/fail_gracefully_on_bad_version_listing_response_test.dart
index d247245..839b612 100644
--- a/test/hosted/fail_gracefully_on_bad_version_listing_response_test.dart
+++ b/test/hosted/fail_gracefully_on_bad_version_listing_response_test.dart
@@ -14,10 +14,9 @@
void main() {
forBothPubGetAndUpgrade((command) {
- test(
- 'fails gracefully '
- 'if the package server responds with broken package listings',
- () async {
+ test('fails gracefully '
+ 'if the package server responds '
+ 'with broken package listings', () async {
final server = await servePackages();
server.serve('foo', '1.2.3');
server.expect(
diff --git a/test/hosted/fail_gracefully_on_invalid_url_test.dart b/test/hosted/fail_gracefully_on_invalid_url_test.dart
index 11cbccd..b92defc 100644
--- a/test/hosted/fail_gracefully_on_invalid_url_test.dart
+++ b/test/hosted/fail_gracefully_on_invalid_url_test.dart
@@ -25,9 +25,7 @@
command,
error: contains('url scheme must be https:// or http://'),
exitCode: exit_codes.DATA,
- environment: {
- 'PUB_MAX_HTTP_RETRIES': '2',
- },
+ environment: {'PUB_MAX_HTTP_RETRIES': '2'},
);
});
test('fails gracefully if the url has querystring', () async {
@@ -45,9 +43,7 @@
command,
error: contains('querystring'),
exitCode: exit_codes.DATA,
- environment: {
- 'PUB_MAX_HTTP_RETRIES': '2',
- },
+ environment: {'PUB_MAX_HTTP_RETRIES': '2'},
);
});
@@ -66,9 +62,7 @@
command,
error: contains('fragment'),
exitCode: exit_codes.DATA,
- environment: {
- 'PUB_MAX_HTTP_RETRIES': '2',
- },
+ environment: {'PUB_MAX_HTTP_RETRIES': '2'},
);
});
@@ -87,9 +81,7 @@
command,
error: contains('user-info'),
exitCode: exit_codes.DATA,
- environment: {
- 'PUB_MAX_HTTP_RETRIES': '2',
- },
+ environment: {'PUB_MAX_HTTP_RETRIES': '2'},
);
});
@@ -108,9 +100,7 @@
command,
error: contains('user-info'),
exitCode: exit_codes.DATA,
- environment: {
- 'PUB_MAX_HTTP_RETRIES': '2',
- },
+ environment: {'PUB_MAX_HTTP_RETRIES': '2'},
);
});
});
diff --git a/test/hosted/fail_gracefully_on_missing_package_test.dart b/test/hosted/fail_gracefully_on_missing_package_test.dart
index 5a6752a..f572380 100644
--- a/test/hosted/fail_gracefully_on_missing_package_test.dart
+++ b/test/hosted/fail_gracefully_on_missing_package_test.dart
@@ -19,8 +19,9 @@
command,
error: allOf([
contains(
- "Because myapp depends on foo any which doesn't exist (could "
- 'not find package foo at http://localhost:'),
+ "Because myapp depends on foo any which doesn't exist (could "
+ 'not find package foo at http://localhost:',
+ ),
contains('), version solving failed.'),
]),
exitCode: exit_codes.UNAVAILABLE,
@@ -29,25 +30,31 @@
});
forBothPubGetAndUpgrade((command) {
- test('fails gracefully if transitive dependencies does not exist',
- () async {
- final server = await servePackages();
- server.serve('foo', '1.2.3', deps: {'bar': '^1.0.0'});
+ test(
+ 'fails gracefully if transitive dependencies does not exist',
+ () async {
+ final server = await servePackages();
+ server.serve('foo', '1.2.3', deps: {'bar': '^1.0.0'});
- await d.appDir(dependencies: {'foo': '1.2.3'}).create();
+ await d.appDir(dependencies: {'foo': '1.2.3'}).create();
- await pubCommand(
- command,
- error: allOf(
- contains('Because every version of foo depends on bar any which '
+ await pubCommand(
+ command,
+ error: allOf(
+ contains(
+ 'Because every version of foo depends on bar any which '
'doesn\'t exist (could not find package bar at '
- 'http://localhost:'),
- contains('), foo is forbidden.\n'
+ 'http://localhost:',
+ ),
+ contains(
+ '), foo is forbidden.\n'
'So, because myapp depends on foo 1.2.3, '
- 'version solving failed.'),
- ),
- exitCode: exit_codes.UNAVAILABLE,
- );
- });
+ 'version solving failed.',
+ ),
+ ),
+ exitCode: exit_codes.UNAVAILABLE,
+ );
+ },
+ );
});
}
diff --git a/test/hosted/fail_gracefully_on_url_resolve_test.dart b/test/hosted/fail_gracefully_on_url_resolve_test.dart
index 938cc01..1e8e441 100644
--- a/test/hosted/fail_gracefully_on_url_resolve_test.dart
+++ b/test/hosted/fail_gracefully_on_url_resolve_test.dart
@@ -23,12 +23,11 @@
await pubCommand(
command,
- error: 'Got socket error trying to find package foo at '
+ error:
+ 'Got socket error trying to find package foo at '
'https://invalid-url.foo.',
exitCode: exit_codes.UNAVAILABLE,
- environment: {
- 'PUB_MAX_HTTP_RETRIES': '2',
- },
+ environment: {'PUB_MAX_HTTP_RETRIES': '2'},
);
});
});
diff --git a/test/hosted/metadata_test.dart b/test/hosted/metadata_test.dart
index 736fe0d..7764fb9 100644
--- a/test/hosted/metadata_test.dart
+++ b/test/hosted/metadata_test.dart
@@ -64,11 +64,13 @@
final server = await servePackages();
server.serve('bar', '1.0.0');
- await d.appDir(
- dependencies: {
- 'foo': {'path': '../foo'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'path': '../foo'},
+ },
+ )
+ .create();
await d.dir('foo', [
d.libPubspec('foo', '1.0.0', deps: {'bar': '1.0.0'}),
@@ -87,17 +89,23 @@
});
test("doesn't send metadata headers to a foreign server", () async {
- final server = await startPackageServer()
- ..serve('foo', '1.0.0');
+ final server =
+ await startPackageServer()
+ ..serve('foo', '1.0.0');
- await d.appDir(
- dependencies: {
- 'foo': {
- 'version': '1.0.0',
- 'hosted': {'name': 'foo', 'url': 'http://localhost:${server.port}'},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'version': '1.0.0',
+ 'hosted': {
+ 'name': 'foo',
+ 'url': 'http://localhost:${server.port}',
+ },
+ },
+ },
+ )
+ .create();
await pubCommand(command, silent: isNot(contains('X-Pub-')));
});
@@ -110,9 +118,7 @@
await pubCommand(
command,
silent: isNot(contains('X-Pub-')),
- environment: {
- 'CI': 'true',
- },
+ environment: {'CI': 'true'},
);
});
});
diff --git a/test/hosted/offline_test.dart b/test/hosted/offline_test.dart
index 90e2fd2..0436bc2 100644
--- a/test/hosted/offline_test.dart
+++ b/test/hosted/offline_test.dart
@@ -29,13 +29,10 @@
forBothPubGetAndUpgrade((command) {
test('upgrades a package using the cache', () async {
final server = await servePackages();
- await populateCache(
- {
- 'foo': ['1.2.2', '1.2.3'],
- 'bar': ['1.2.3'],
- },
- server,
- );
+ await populateCache({
+ 'foo': ['1.2.2', '1.2.3'],
+ 'bar': ['1.2.3'],
+ }, server);
// Now serve only errors - to validate we are truly offline.
server.serveErrors();
@@ -44,7 +41,8 @@
String? warning;
if (command == RunCommand.upgrade) {
- warning = 'Warning: Upgrading when offline may not update you '
+ warning =
+ 'Warning: Upgrading when offline may not update you '
'to the latest versions of your dependencies.';
}
@@ -57,12 +55,9 @@
test('supports prerelease versions', () async {
final server = await servePackages();
- await populateCache(
- {
- 'foo': ['1.2.3-alpha.1'],
- },
- server,
- );
+ await populateCache({
+ 'foo': ['1.2.3-alpha.1'],
+ }, server);
// Now serve only errors - to validate we are truly offline.
server.serveErrors();
@@ -70,7 +65,8 @@
String? warning;
if (command == RunCommand.upgrade) {
- warning = 'Warning: Upgrading when offline may not update you '
+ warning =
+ 'Warning: Upgrading when offline may not update you '
'to the latest versions of your dependencies.';
}
@@ -103,12 +99,9 @@
test('fails gracefully if no cached versions match', () async {
final server = await servePackages();
- await populateCache(
- {
- 'foo': ['1.2.2', '1.2.3'],
- },
- server,
- );
+ await populateCache({
+ 'foo': ['1.2.2', '1.2.3'],
+ }, server);
// Run the server so that we know what URL to use in the system cache.
server.serveErrors();
@@ -118,13 +111,14 @@
await pubCommand(
command,
args: ['--offline'],
- error: contains('''
-Because myapp depends on foo >2.0.0 which doesn't match any versions, version solving failed.'''),
+ error: contains(
+ '''
+Because myapp depends on foo >2.0.0 which doesn't match any versions, version solving failed.''',
+ ),
);
});
- test(
- 'fails gracefully if a dependency is not cached and a lockfile '
+ test('fails gracefully if a dependency is not cached and a lockfile '
'exists', () async {
final server = await servePackages();
@@ -151,12 +145,9 @@
test('downgrades to the version in the cache if necessary', () async {
final server = await servePackages();
- await populateCache(
- {
- 'foo': ['1.2.2', '1.2.3'],
- },
- server,
- );
+ await populateCache({
+ 'foo': ['1.2.2', '1.2.3'],
+ }, server);
// Run the server so that we know what URL to use in the system cache.
server.serveErrors();
@@ -174,12 +165,9 @@
test('skips invalid cached versions', () async {
final server = await servePackages();
- await populateCache(
- {
- 'foo': ['1.2.2', '1.2.3'],
- },
- server,
- );
+ await populateCache({
+ 'foo': ['1.2.2', '1.2.3'],
+ }, server);
// Run the server so that we know what URL to use in the system cache.
server.serveErrors();
@@ -200,12 +188,9 @@
test('skips invalid locked versions', () async {
final server = await servePackages();
- await populateCache(
- {
- 'foo': ['1.2.2', '1.2.3'],
- },
- server,
- );
+ await populateCache({
+ 'foo': ['1.2.2', '1.2.3'],
+ }, server);
// Run the server so that we know what URL to use in the system cache.
server.serveErrors();
diff --git a/test/hosted/remove_removed_transitive_dependency_test.dart b/test/hosted/remove_removed_transitive_dependency_test.dart
index 3f0c5d5..b3f99e8 100644
--- a/test/hosted/remove_removed_transitive_dependency_test.dart
+++ b/test/hosted/remove_removed_transitive_dependency_test.dart
@@ -9,8 +9,7 @@
void main() {
forBothPubGetAndUpgrade((command) {
- test(
- "removes a transitive dependency that's no longer depended "
+ test("removes a transitive dependency that's no longer depended "
'on', () async {
await servePackages()
..serve('foo', '1.0.0', deps: {'shared_dep': 'any'})
diff --git a/test/hosted/short_syntax_test.dart b/test/hosted/short_syntax_test.dart
index 38782a5..1f6da9c 100644
--- a/test/hosted/short_syntax_test.dart
+++ b/test/hosted/short_syntax_test.dart
@@ -25,12 +25,7 @@
forBothPubGetAndUpgrade((command) {
Future<void> testWith(dynamic dependency) async {
await d.dir(appPath, [
- d.libPubspec(
- 'app',
- '1.0.0',
- deps: {'foo': dependency},
- sdk: '^2.15.0',
- ),
+ d.libPubspec('app', '1.0.0', deps: {'foo': dependency}, sdk: '^2.15.0'),
]).create();
await pubCommand(
diff --git a/test/hosted/version_negotiation_test.dart b/test/hosted/version_negotiation_test.dart
index 657b435..56408ee 100644
--- a/test/hosted/version_negotiation_test.dart
+++ b/test/hosted/version_negotiation_test.dart
@@ -14,13 +14,15 @@
test('sends the correct Accept header', () async {
await servePackages();
- await d.appDir(
- dependencies: {
- 'foo': {
- 'hosted': {'name': 'foo', 'url': globalServer.url},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'hosted': {'name': 'foo', 'url': globalServer.url},
+ },
+ },
+ )
+ .create();
globalServer.expect('GET', '/api/packages/foo', (request) {
expect(
@@ -40,13 +42,15 @@
test('prints a friendly error if the version is out-of-date', () async {
await servePackages();
- await d.appDir(
- dependencies: {
- 'foo': {
- 'hosted': {'name': 'foo', 'url': globalServer.url},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'hosted': {'name': 'foo', 'url': globalServer.url},
+ },
+ },
+ )
+ .create();
final pub = await startPub(args: [command.name]);
diff --git a/test/hosted/will_normalize_hosted_url_test.dart b/test/hosted/will_normalize_hosted_url_test.dart
index 3eae81f..0103515 100644
--- a/test/hosted/will_normalize_hosted_url_test.dart
+++ b/test/hosted/will_normalize_hosted_url_test.dart
@@ -88,25 +88,18 @@
/// This is a bit of a hack, to easily test if hosted pub URLs with a path
/// segment works and if the slashes are normalized.
void proxyMyFolderToRoot() {
- globalServer.handle(
- RegExp('/my-folder/.*'),
- (r) async {
- if (r.method != 'GET' && r.method != 'HEAD') {
- return Response.forbidden(null);
- }
- final path = r.requestedUri.path.substring('/my-folder/'.length);
- final res = await http.get(
- Uri.parse('${globalServer.url}/$path'),
- );
- return Response(
- res.statusCode,
- body: res.bodyBytes,
- headers: {
- 'Content-Type': res.headers['content-type']!,
- },
- );
- },
- );
+ globalServer.handle(RegExp('/my-folder/.*'), (r) async {
+ if (r.method != 'GET' && r.method != 'HEAD') {
+ return Response.forbidden(null);
+ }
+ final path = r.requestedUri.path.substring('/my-folder/'.length);
+ final res = await http.get(Uri.parse('${globalServer.url}/$path'));
+ return Response(
+ res.statusCode,
+ body: res.bodyBytes,
+ headers: {'Content-Type': res.headers['content-type']!},
+ );
+ });
}
test('will use normalized url with path', () async {
diff --git a/test/ignore_test.dart b/test/ignore_test.dart
index 0563bdb..406525f 100644
--- a/test/ignore_test.dart
+++ b/test/ignore_test.dart
@@ -39,13 +39,14 @@
return [path.substring(0, nextSlash == -1 ? path.length : nextSlash)];
}
- Ignore? ignoreForDir(String dir) => c.patterns[dir] == null
- ? null
- : Ignore(
- c.patterns[dir]!,
- onInvalidPattern: (_, __) => hasWarning = true,
- ignoreCase: ignoreCase,
- );
+ Ignore? ignoreForDir(String dir) =>
+ c.patterns[dir] == null
+ ? null
+ : Ignore(
+ c.patterns[dir]!,
+ onInvalidPattern: (_, __) => hasWarning = true,
+ ignoreCase: ignoreCase,
+ );
bool isDir(String candidate) =>
candidate == '.' ||
@@ -65,11 +66,9 @@
reason: 'Expected "$path" to be ignored, it was NOT!',
);
} else {
- expect(
- r,
- [pathWithoutSlash],
- reason: 'Expected "$path" to NOT be ignored, it was IGNORED!',
- );
+ expect(r, [
+ pathWithoutSlash,
+ ], reason: 'Expected "$path" to NOT be ignored, it was IGNORED!');
}
// Also test that the logic of walking the tree works.
@@ -129,7 +128,8 @@
expect(
ret.exitCode,
equals(0),
- reason: 'Running "git init" failed. '
+ reason:
+ 'Running "git init" failed. '
'StdErr: ${ret.stderr} '
'StdOut: ${ret.stdout}',
);
@@ -156,10 +156,12 @@
'${c.name}: git check-ignore "$path" is $result $casing',
() async {
expect(
- runGit(
- ['config', '--local', 'core.ignoreCase', ignoreCase.toString()],
- workingDirectory: tmp!.path,
- ).exitCode,
+ runGit([
+ 'config',
+ '--local',
+ 'core.ignoreCase',
+ ignoreCase.toString(),
+ ], workingDirectory: tmp!.path).exitCode,
anyOf(0, 1),
reason: 'Running "git config --local core.ignoreCase ..." failed',
);
@@ -168,16 +170,20 @@
final resolvedDirectory =
directory == '' ? tmp!.uri : tmp!.uri.resolve('$directory/');
Directory.fromUri(resolvedDirectory).createSync(recursive: true);
- final gitIgnore =
- File.fromUri(resolvedDirectory.resolve('.gitignore'));
+ final gitIgnore = File.fromUri(
+ resolvedDirectory.resolve('.gitignore'),
+ );
gitIgnore.writeAsStringSync(
'${c.patterns[directory]!.join('\n')}\n',
);
}
- final process = runGit(
- ['-C', tmp!.path, 'check-ignore', '--no-index', path],
- workingDirectory: tmp!.path,
- );
+ final process = runGit([
+ '-C',
+ tmp!.path,
+ 'check-ignore',
+ '--no-index',
+ path,
+ ], workingDirectory: tmp!.path);
expect(
process.exitCode,
anyOf(0, 1),
@@ -191,7 +197,8 @@
fail('Expected "$path" to NOT be ignored, it was IGNORED!');
}
},
- skip: Platform.isMacOS || // System `git` on mac has issues...
+ skip:
+ Platform.isMacOS || // System `git` on mac has issues...
c.skipOnWindows && Platform.isWindows,
);
}
@@ -244,69 +251,80 @@
this.hasWarning = false,
this.skipOnWindows = false,
this.ignoreCase,
- }) : name = '"${pattern.replaceAll('\n', '\\n')}"',
- patterns = {
- '.': [pattern],
- };
+ }) : name = '"${pattern.replaceAll('\n', '\\n')}"',
+ patterns = {
+ '.': [pattern],
+ };
}
final testData = [
// Simple test case
- TestData('simple', {
- '.': [
- '/.git/',
- '*.o',
- ],
- }, {
- '.git/config': true,
- '.git/': true,
- 'README.md': false,
- 'main.c': false,
- 'main.o': true,
- }),
+ TestData(
+ 'simple',
+ {
+ '.': ['/.git/', '*.o'],
+ },
+ {
+ '.git/config': true,
+ '.git/': true,
+ 'README.md': false,
+ 'main.c': false,
+ 'main.o': true,
+ },
+ ),
// Test empty lines
- TestData('empty', {
- '.': [''],
- }, {
- 'README.md': false,
- }),
+ TestData(
+ 'empty',
+ {
+ '.': [''],
+ },
+ {'README.md': false},
+ ),
// Patterns given in multiple lines with comments
- TestData('multiple lines LF', {
- '.': [
- '#comment\n/.git/ \n*.o\n',
- // Using CR CR LF doesn't work
- '#comment\n*.md\r\r\n',
- // Tab is not ignored
- '#comment\nLICENSE\t\n',
- // Trailing comments not allowed
- '#comment\nLICENSE # ignore license\n',
- ],
- }, {
- '.git/config': true,
- '.git/': true,
- 'README.md': false,
- 'LICENSE': false,
- 'main.c': false,
- 'main.o': true,
- }),
- TestData('multiple lines CR LF', {
- '.': [
- '#comment\r\n/.git/ \r\n*.o\r\n',
- // Using CR CR LF doesn't work
- '#comment\r\n*.md\r\r\n',
- // Tab is not ignored
- '#comment\r\nLICENSE\t\r\n',
- // Trailing comments not allowed
- '#comment\r\nLICENSE # ignore license\r\n',
- ],
- }, {
- '.git/config': true,
- '.git/': true,
- 'README.md': false,
- 'LICENSE': false,
- 'main.c': false,
- 'main.o': true,
- }),
+ TestData(
+ 'multiple lines LF',
+ {
+ '.': [
+ '#comment\n/.git/ \n*.o\n',
+ // Using CR CR LF doesn't work
+ '#comment\n*.md\r\r\n',
+ // Tab is not ignored
+ '#comment\nLICENSE\t\n',
+ // Trailing comments not allowed
+ '#comment\nLICENSE # ignore license\n',
+ ],
+ },
+ {
+ '.git/config': true,
+ '.git/': true,
+ 'README.md': false,
+ 'LICENSE': false,
+ 'main.c': false,
+ 'main.o': true,
+ },
+ ),
+ TestData(
+ 'multiple lines CR LF',
+ {
+ '.': [
+ '#comment\r\n/.git/ \r\n*.o\r\n',
+ // Using CR CR LF doesn't work
+ '#comment\r\n*.md\r\r\n',
+ // Tab is not ignored
+ '#comment\r\nLICENSE\t\r\n',
+ // Trailing comments not allowed
+ '#comment\r\nLICENSE # ignore license\r\n',
+ ],
+ },
+ {
+ '.git/config': true,
+ '.git/': true,
+ 'README.md': false,
+ 'LICENSE': false,
+ 'main.c': false,
+ 'main.o': true,
+ },
+ ),
// Test simple patterns
TestData.single('file.txt', {
'file.txt': true,
@@ -323,10 +341,7 @@
'sub/folder/file.txt': false,
}),
// Test comments and escaping
- TestData.single('#file.txt', {
- 'file.txt': false,
- '#file.txt': false,
- }),
+ TestData.single('#file.txt', {'file.txt': false, '#file.txt': false}),
TestData.single(r'\#file.txt', {
'#file.txt': true,
'other.txt': false,
@@ -335,20 +350,13 @@
'sub/folder/#file.txt': true,
}),
// Test ! and escaping
- TestData.single('!file.txt', {
- 'file.txt': false,
- '!file.txt': false,
- }),
+ TestData.single('!file.txt', {'file.txt': false, '!file.txt': false}),
TestData(
'negation',
{
'.': ['f*', '!file.txt'],
},
- {
- 'file.txt': false,
- '!file.txt': false,
- 'filter.txt': true,
- },
+ {'file.txt': false, '!file.txt': false, 'filter.txt': true},
),
TestData.single(r'\!file.txt', {
'!file.txt': true,
@@ -454,112 +462,68 @@
}),
// Special characters from RegExp that are not special in .gitignore
for (final c in r'(){}+.^$|'.split('')) ...[
- TestData.single(
- '${c}file.txt',
- {
- '${c}file.txt': true,
- 'file.txt': false,
- 'file.txt$c': false,
- },
- skipOnWindows: c == '^' || c == '|',
- ),
- TestData.single(
- 'file.txt$c',
- {
- 'file.txt$c': true,
- 'file.txt': false,
- '${c}file.txt': false,
- },
- skipOnWindows: c == '^' || c == '|',
- ),
- TestData.single(
- 'fi${c}l)e.txt',
- {
- 'fi${c}l)e.txt': true,
- 'f${c}il)e.txt': false,
- 'fil)e.txt': false,
- },
- skipOnWindows: c == '^' || c == '|',
- ),
- TestData.single(
- 'fi${c}l}e.txt',
- {
- 'fi${c}l}e.txt': true,
- 'f${c}il}e.txt': false,
- 'fil}e.txt': false,
- },
- skipOnWindows: c == '^' || c == '|',
- ),
+ TestData.single('${c}file.txt', {
+ '${c}file.txt': true,
+ 'file.txt': false,
+ 'file.txt$c': false,
+ }, skipOnWindows: c == '^' || c == '|'),
+ TestData.single('file.txt$c', {
+ 'file.txt$c': true,
+ 'file.txt': false,
+ '${c}file.txt': false,
+ }, skipOnWindows: c == '^' || c == '|'),
+ TestData.single('fi${c}l)e.txt', {
+ 'fi${c}l)e.txt': true,
+ 'f${c}il)e.txt': false,
+ 'fil)e.txt': false,
+ }, skipOnWindows: c == '^' || c == '|'),
+ TestData.single('fi${c}l}e.txt', {
+ 'fi${c}l}e.txt': true,
+ 'f${c}il}e.txt': false,
+ 'fil}e.txt': false,
+ }, skipOnWindows: c == '^' || c == '|'),
],
// Special characters from RegExp that are also special in .gitignore
// can be escaped.
for (final c in r'[]*?\'.split('')) ...[
- TestData.single(
- '\\${c}file.txt',
- {
- '${c}file.txt': true,
- 'file.txt': false,
- 'file.txt$c': false,
- },
- skipOnWindows: c == r'\',
- ),
- TestData.single(
- 'file.txt\\$c',
- {
- 'file.txt$c': true,
- 'file.txt': false,
- '${c}file.txt': false,
- },
- skipOnWindows: c == r'\',
- ),
- TestData.single(
- 'fi\\${c}l)e.txt',
- {
- 'fi${c}l)e.txt': true,
- 'f${c}il)e.txt': false,
- 'fil)e.txt': false,
- },
- skipOnWindows: c == r'\',
- ),
- TestData.single(
- 'fi\\${c}l}e.txt',
- {
- 'fi${c}l}e.txt': true,
- 'f${c}il}e.txt': false,
- 'fil}e.txt': false,
- },
- skipOnWindows: c == r'\',
- ),
+ TestData.single('\\${c}file.txt', {
+ '${c}file.txt': true,
+ 'file.txt': false,
+ 'file.txt$c': false,
+ }, skipOnWindows: c == r'\'),
+ TestData.single('file.txt\\$c', {
+ 'file.txt$c': true,
+ 'file.txt': false,
+ '${c}file.txt': false,
+ }, skipOnWindows: c == r'\'),
+ TestData.single('fi\\${c}l)e.txt', {
+ 'fi${c}l)e.txt': true,
+ 'f${c}il)e.txt': false,
+ 'fil)e.txt': false,
+ }, skipOnWindows: c == r'\'),
+ TestData.single('fi\\${c}l}e.txt', {
+ 'fi${c}l}e.txt': true,
+ 'f${c}il}e.txt': false,
+ 'fil}e.txt': false,
+ }, skipOnWindows: c == r'\'),
],
// Special characters from RegExp can always be escaped
for (final c in r'()[]{}*+?.^$|\'.split('')) ...[
- TestData.single(
- '\\${c}file.txt',
- {
- '${c}file.txt': true,
- 'file.txt': false,
- 'file.txt$c': false,
- },
- skipOnWindows: c == '^' || c == '|' || c == r'\',
- ),
- TestData.single(
- 'file.txt\\$c',
- {
- 'file.txt$c': true,
- 'file.txt': false,
- '${c}file.txt': false,
- },
- skipOnWindows: c == '^' || c == '|' || c == r'\',
- ),
- TestData.single(
- 'file\\$c.txt',
- {
- 'file$c.txt': true,
- 'file.txt': false,
- '${c}file.txt': false,
- },
- skipOnWindows: c == '^' || c == '|' || c == r'\',
- ),
+ TestData.single('\\${c}file.txt', {
+ '${c}file.txt': true,
+ 'file.txt': false,
+ 'file.txt$c': false,
+ }, skipOnWindows: c == '^' || c == '|' || c == r'\'),
+ TestData.single('file.txt\\$c', {
+ 'file.txt$c': true,
+ 'file.txt': false,
+ '${c}file.txt': false,
+ }, skipOnWindows: c == '^' || c == '|' || c == r'\'),
+ TestData.single('file\\$c.txt', {
+ 'file$c.txt': true,
+ 'file.txt': false,
+ '${c}file.txt': false,
+ }, skipOnWindows: c == '^' || c == '|' || c == r'\'),
],
// Ending in backslash (unescaped)
TestData.single(
@@ -579,31 +543,20 @@
'file.txt\n': false,
'file.txt': false,
}),
- TestData.single(
- '**\\',
- {
- 'file.txt\\\n': false,
- 'file.txt ': false,
- 'file.txt\n': false,
- 'file.txt': false,
- },
- hasWarning: true,
- ),
- TestData.single(
- '*\\',
- {
- 'file.txt\\\n': false,
- 'file.txt ': false,
- 'file.txt\n': false,
- 'file.txt': false,
- },
- hasWarning: true,
- ),
- // ? matches anything except /
- TestData.single('?', {
- 'f': true,
+ TestData.single('**\\', {
+ 'file.txt\\\n': false,
+ 'file.txt ': false,
+ 'file.txt\n': false,
'file.txt': false,
- }),
+ }, hasWarning: true),
+ TestData.single('*\\', {
+ 'file.txt\\\n': false,
+ 'file.txt ': false,
+ 'file.txt\n': false,
+ 'file.txt': false,
+ }, hasWarning: true),
+ // ? matches anything except /
+ TestData.single('?', {'f': true, 'file.txt': false}),
TestData.single('a?c', {
'abc': true,
'abcd': false,
@@ -667,28 +620,20 @@
'abc/file.txt': true,
}),
// Empty character classes
- TestData.single(
- 'a[]c',
- {
- 'abc': false,
- 'ac': false,
- 'a': false,
- 'a[]c': false,
- 'c': false,
- },
- hasWarning: true,
- ),
- TestData.single(
- 'a[]',
- {
- 'abc': false,
- 'ac': false,
- 'a': false,
- 'a[]': false,
- 'c': false,
- },
- hasWarning: true,
- ),
+ TestData.single('a[]c', {
+ 'abc': false,
+ 'ac': false,
+ 'a': false,
+ 'a[]c': false,
+ 'c': false,
+ }, hasWarning: true),
+ TestData.single('a[]', {
+ 'abc': false,
+ 'ac': false,
+ 'a': false,
+ 'a[]': false,
+ 'c': false,
+ }, hasWarning: true),
// Invalid character classes
TestData.single(
r'a[\]',
@@ -720,57 +665,30 @@
skipOnWindows: true,
),
// Character classes with special characters
- TestData.single(
- r'a[\\]',
- {
- 'a': false,
- 'ab': false,
- 'a[]': false,
- 'a[': false,
- 'a\\': true,
- },
- skipOnWindows: true,
- ),
- TestData.single(
- r'a[^b]',
- {
- 'a': false,
- 'ab': false,
- 'ac': true,
- 'a[': true,
- 'a\\': true,
- },
- skipOnWindows: true,
- ),
- TestData.single(
- r'a[!b]',
- {
- 'a': false,
- 'ab': false,
- 'ac': true,
- 'a[': true,
- 'a\\': true,
- },
- skipOnWindows: true,
- ),
- TestData.single(r'a[[]', {
+ TestData.single(r'a[\\]', {
'a': false,
'ab': false,
- 'a[': true,
- 'a]': false,
- }),
- TestData.single(r'a[]]', {
- 'a': false,
- 'ab': false,
+ 'a[]': false,
'a[': false,
- 'a]': true,
- }),
- TestData.single(r'a[?]', {
+ 'a\\': true,
+ }, skipOnWindows: true),
+ TestData.single(r'a[^b]', {
'a': false,
'ab': false,
- 'a??': false,
- 'a?': true,
- }),
+ 'ac': true,
+ 'a[': true,
+ 'a\\': true,
+ }, skipOnWindows: true),
+ TestData.single(r'a[!b]', {
+ 'a': false,
+ 'ab': false,
+ 'ac': true,
+ 'a[': true,
+ 'a\\': true,
+ }, skipOnWindows: true),
+ TestData.single(r'a[[]', {'a': false, 'ab': false, 'a[': true, 'a]': false}),
+ TestData.single(r'a[]]', {'a': false, 'ab': false, 'a[': false, 'a]': true}),
+ TestData.single(r'a[?]', {'a': false, 'ab': false, 'a??': false, 'a?': true}),
// Character classes with characters
TestData.single(r'a[abc]', {
'a': false,
@@ -879,20 +797,9 @@
'ad': false,
}),
// Character classes with dashes
- TestData.single(r'a[-]', {
- 'a-': true,
- 'a': false,
- }),
- TestData.single(r'a[a-]', {
- 'a-': true,
- 'aa': true,
- 'ab': false,
- }),
- TestData.single(r'a[-a]', {
- 'a-': true,
- 'aa': true,
- 'ab': false,
- }),
+ TestData.single(r'a[-]', {'a-': true, 'a': false}),
+ TestData.single(r'a[a-]', {'a-': true, 'aa': true, 'ab': false}),
+ TestData.single(r'a[-a]', {'a-': true, 'aa': true, 'ab': false}),
// TODO: test slashes in character classes
// Test **, *, [, and [...] cases
TestData.single('x[a-c-e]', {
@@ -945,11 +852,7 @@
'sub/bolder/other.paf': false,
'subblob/file.txt': false,
}),
- TestData.single('sub/', {
- 'sub/': true,
- 'mop/': false,
- 'sup': false,
- }),
+ TestData.single('sub/', {'sub/': true, 'mop/': false, 'sup': false}),
TestData.single('sub/**/', {
'file.txt': false,
'otherf.txt': false,
@@ -988,58 +891,66 @@
'sub/bolder/other.paf': true,
'subblob/file.txt': false,
}),
- TestData('ignores in subfolders only target those', {
- '.': ['a.txt'],
- 'folder': ['b.txt'],
- 'folder/sub': ['c.txt'],
- }, {
- 'a.txt': true,
- 'b.txt': false,
- 'c.txt': false,
- 'folder/a.txt': true,
- 'folder/b.txt': true,
- 'folder/c.txt': false,
- 'folder/sub/a.txt': true,
- 'folder/sub/b.txt': true,
- 'folder/sub/c.txt': true,
- }),
- TestData('Cannot negate folders that were excluded', {
- '.': ['sub/', '!sub/foo.txt'],
- }, {
- 'sub/a.txt': true,
- 'sub/foo.txt': true,
- }),
- TestData('Can negate the exclusion of folders', {
- '.': ['*.txt', 'sub', '!sub', '!foo.txt'],
- }, {
- 'sub/a.txt': true,
- 'sub/foo.txt': false,
- }),
- TestData('Can negate the exclusion of folders 2', {
- '.': ['sub/', '*.txt'],
- 'folder': ['!sub/', '!foo.txt'],
- }, {
- 'folder/sub/a.txt': true,
- 'folder/sub/foo.txt': false,
- 'folder/foo.txt': false,
- 'folder/a.txt': true,
- }),
+ TestData(
+ 'ignores in subfolders only target those',
+ {
+ '.': ['a.txt'],
+ 'folder': ['b.txt'],
+ 'folder/sub': ['c.txt'],
+ },
+ {
+ 'a.txt': true,
+ 'b.txt': false,
+ 'c.txt': false,
+ 'folder/a.txt': true,
+ 'folder/b.txt': true,
+ 'folder/c.txt': false,
+ 'folder/sub/a.txt': true,
+ 'folder/sub/b.txt': true,
+ 'folder/sub/c.txt': true,
+ },
+ ),
+ TestData(
+ 'Cannot negate folders that were excluded',
+ {
+ '.': ['sub/', '!sub/foo.txt'],
+ },
+ {'sub/a.txt': true, 'sub/foo.txt': true},
+ ),
+ TestData(
+ 'Can negate the exclusion of folders',
+ {
+ '.': ['*.txt', 'sub', '!sub', '!foo.txt'],
+ },
+ {'sub/a.txt': true, 'sub/foo.txt': false},
+ ),
+ TestData(
+ 'Can negate the exclusion of folders 2',
+ {
+ '.': ['sub/', '*.txt'],
+ 'folder': ['!sub/', '!foo.txt'],
+ },
+ {
+ 'folder/sub/a.txt': true,
+ 'folder/sub/foo.txt': false,
+ 'folder/foo.txt': false,
+ 'folder/a.txt': true,
+ },
+ ),
- TestData('folder/* does not ignore `folder` itself', {
- '.': ['folder/*', '!folder/a.txt'],
- }, {
- 'folder/a.txt': false,
- 'folder/b.txt': true,
- }),
+ TestData(
+ 'folder/* does not ignore `folder` itself',
+ {
+ '.': ['folder/*', '!folder/a.txt'],
+ },
+ {'folder/a.txt': false, 'folder/b.txt': true},
+ ),
// Case sensitivity
TestData(
'simple',
{
- '.': [
- '/.git/',
- '*.o',
- ],
+ '.': ['/.git/', '*.o'],
},
{
'.git/config': true,
@@ -1052,31 +963,24 @@
ignoreCase: false,
),
// Test simple patterns
- TestData.single(
- 'file.txt',
- {
- 'file.TXT': false,
- 'file.txT': false,
- 'file.txt': true,
- 'other.txt': false,
- 'src/file.txt': true,
- '.obj/file.txt': true,
- 'sub/folder/file.txt': true,
- 'src/file.TXT': false,
- '.obj/file.TXT': false,
- 'sub/folder/file.TXT': false,
- },
- ignoreCase: false,
- ),
+ TestData.single('file.txt', {
+ 'file.TXT': false,
+ 'file.txT': false,
+ 'file.txt': true,
+ 'other.txt': false,
+ 'src/file.txt': true,
+ '.obj/file.txt': true,
+ 'sub/folder/file.txt': true,
+ 'src/file.TXT': false,
+ '.obj/file.TXT': false,
+ 'sub/folder/file.TXT': false,
+ }, ignoreCase: false),
// Case insensitivity
TestData(
'simple',
{
- '.': [
- '/.git/',
- '*.o',
- ],
+ '.': ['/.git/', '*.o'],
},
{
'.git/config': true,
@@ -1088,20 +992,16 @@
},
ignoreCase: true,
),
- TestData.single(
- 'file.txt',
- {
- 'file.TXT': true,
- 'file.txT': true,
- 'file.txt': true,
- 'other.txt': false,
- 'src/file.txt': true,
- '.obj/file.txt': true,
- 'sub/folder/file.txt': true,
- 'src/file.TXT': true,
- '.obj/file.TXT': true,
- 'sub/folder/file.TXT': true,
- },
- ignoreCase: true,
- ),
+ TestData.single('file.txt', {
+ 'file.TXT': true,
+ 'file.txT': true,
+ 'file.txt': true,
+ 'other.txt': false,
+ 'src/file.txt': true,
+ '.obj/file.txt': true,
+ 'sub/folder/file.txt': true,
+ 'src/file.TXT': true,
+ '.obj/file.TXT': true,
+ 'sub/folder/file.TXT': true,
+ }, ignoreCase: true),
];
diff --git a/test/io_test.dart b/test/io_test.dart
index c4ed470..bc6275a 100644
--- a/test/io_test.dart
+++ b/test/io_test.dart
@@ -59,9 +59,10 @@
expect(
listDir(temp, recursive: true),
- unorderedEquals(
- [p.join(temp, 'file1.txt'), p.join(temp, 'file2.txt')],
- ),
+ unorderedEquals([
+ p.join(temp, 'file1.txt'),
+ p.join(temp, 'file2.txt'),
+ ]),
);
}),
completes,
@@ -298,21 +299,23 @@
);
});
- test('resolves a symlink that links to a path that needs more resolving',
- () {
- expect(
- _withCanonicalTempDir((temp) {
- final dir = p.join(temp, 'dir');
- final linkdir = p.join(temp, 'linkdir');
- final linkfile = p.join(dir, 'link');
- _createDir(dir);
- createSymlink(dir, linkdir);
- createSymlink(p.join(linkdir, 'file'), linkfile);
- expect(canonicalize(linkfile), equals(p.join(dir, 'file')));
- }),
- completes,
- );
- });
+ test(
+ 'resolves a symlink that links to a path that needs more resolving',
+ () {
+ expect(
+ _withCanonicalTempDir((temp) {
+ final dir = p.join(temp, 'dir');
+ final linkdir = p.join(temp, 'linkdir');
+ final linkfile = p.join(dir, 'link');
+ _createDir(dir);
+ createSymlink(dir, linkdir);
+ createSymlink(p.join(linkdir, 'file'), linkfile);
+ expect(canonicalize(linkfile), equals(p.join(dir, 'file')));
+ }),
+ completes,
+ );
+ },
+ );
test('resolves a pair of pathologically-recursive symlinks', () {
expect(
@@ -337,25 +340,25 @@
test('decompresses simple archive', () async {
await withTempDir((tempDir) async {
await extractTarGz(
- Stream.fromIterable(
- [
- base64Decode('H4sIAP2weF4AA+3S0QqCMBiG4V2KeAE1nfuF7m'
- 'aViNBqzDyQ6N4z6yCIogOtg97ncAz2wTvfuxCW'
- 'alZ6UFqttIiUYpXObWlzM57fqcyIkcxoU2ZKZy'
- 'YvtErsvLNuuvboYpKotqm7uPUv74XYeBf7Oh66'
- '8I1dX+LH/qFbt6HaLHrnd9O/cQ0sxZv++UP/Qo'
- 'b+1srQX08/5dmf9z+le+erdJWOHyE9/3oPAAAA'
- 'AAAAAAAAAAAAgM9dALkoaRMAKAAA'),
- ],
- ),
+ Stream.fromIterable([
+ base64Decode(
+ 'H4sIAP2weF4AA+3S0QqCMBiG4V2KeAE1nfuF7m'
+ 'aViNBqzDyQ6N4z6yCIogOtg97ncAz2wTvfuxCW'
+ 'alZ6UFqttIiUYpXObWlzM57fqcyIkcxoU2ZKZy'
+ 'YvtErsvLNuuvboYpKotqm7uPUv74XYeBf7Oh66'
+ '8I1dX+LH/qFbt6HaLHrnd9O/cQ0sxZv++UP/Qo'
+ 'b+1srQX08/5dmf9z+le+erdJWOHyE9/3oPAAAA'
+ 'AAAAAAAAAAAAgM9dALkoaRMAKAAA',
+ ),
+ ]),
tempDir,
);
- await d.dir(appPath, [
- d.rawPubspec({
- 'name': 'myapp',
- }),
- ]).validate(tempDir);
+ await d
+ .dir(appPath, [
+ d.rawPubspec({'name': 'myapp'}),
+ ])
+ .validate(tempDir);
});
});
@@ -363,16 +366,15 @@
await withTempDir((tempDir) async {
await expectLater(
() async => await extractTarGz(
- Stream.fromIterable(
- [
- base64Decode(
- // Correct Gzip of a faulty tar archive.
- 'H4sICBKyeF4AA215YXBwLnRhcgDt0sEKgjAAh/GdewrxAWpzbkJvs0pEaDVmHiR699Q6BBJ00Dr0'
- '/Y5jsD98850LYSMWJXuFkUJaITNTmEyPR09Caaut0lIXSkils1yKxCy76KFtLi4miWjqqo0H//Ze'
- 'iLV3saviuQ3f2PUlfkwf2l0Tyv26c/44/xtDYJsP6a0trJn2z1765/3/UMbYvr+cf8rUn/e/pifn'
- 'y3Sbjh8hvf16DwAAAAAAAAAAAAAAAIDPre4CU/3q/CcAAA=='),
- ],
- ),
+ Stream.fromIterable([
+ base64Decode(
+ // Correct Gzip of a faulty tar archive.
+ 'H4sICBKyeF4AA215YXBwLnRhcgDt0sEKgjAAh/GdewrxAWpzbkJvs0pEaDVmHiR699Q6BBJ00Dr0'
+ '/Y5jsD98850LYSMWJXuFkUJaITNTmEyPR09Caaut0lIXSkils1yKxCy76KFtLi4miWjqqo0H//Ze'
+ 'iLV3saviuQ3f2PUlfkwf2l0Tyv26c/44/xtDYJsP6a0trJn2z1765/3/UMbYvr+cf8rUn/e/pifn'
+ 'y3Sbjh8hvf16DwAAAAAAAAAAAAAAAIDPre4CU/3q/CcAAA==',
+ ),
+ ]),
tempDir,
),
throwsA(isA<TarException>()),
@@ -384,11 +386,9 @@
await withTempDir((tempDir) async {
await expectLater(
() async => await extractTarGz(
- Stream.fromIterable(
- [
- [10, 20, 30], // Not a good gz stream.
- ],
- ),
+ Stream.fromIterable([
+ [10, 20, 30], // Not a good gz stream.
+ ]),
tempDir,
),
throwsA(
@@ -463,14 +463,13 @@
tempDir,
);
- await d.dir(
- '.',
- [
- d.file('lib/main.txt', 'text content'),
- d.file('bin/main.txt', 'text content'),
- d.file('test/main.txt', 'text content'),
- ],
- ).validate(tempDir);
+ await d
+ .dir('.', [
+ d.file('lib/main.txt', 'text content'),
+ d.file('bin/main.txt', 'text content'),
+ d.file('test/main.txt', 'text content'),
+ ])
+ .validate(tempDir);
});
});
@@ -495,8 +494,11 @@
await expectLater(
Directory(tempDir).list(),
emits(
- isA<Directory>()
- .having((e) => p.basename(e.path), 'basename', 'bin'),
+ isA<Directory>().having(
+ (e) => p.basename(e.path),
+ 'basename',
+ 'bin',
+ ),
),
);
});
@@ -675,8 +677,7 @@
);
});
- test(
- 'returns $forMultiLevelDirectorySymlink for a multi-level symlink to '
+ test('returns $forMultiLevelDirectorySymlink for a multi-level symlink to '
'a directory', () {
expect(
withTempDir((temp) {
@@ -709,22 +710,24 @@
);
});
- test('returns $forMultiLevelBrokenSymlink for a multi-level broken symlink',
- () {
- expect(
- withTempDir((temp) {
- final targetPath = p.join(temp, 'dir');
- final symlink1Path = p.join(temp, 'link1dir');
- final symlink2Path = p.join(temp, 'link2dir');
- _createDir(targetPath);
- createSymlink(targetPath, symlink1Path);
- createSymlink(symlink1Path, symlink2Path);
- deleteEntry(targetPath);
- expect(predicate(symlink2Path), equals(forMultiLevelBrokenSymlink));
- }),
- completes,
- );
- });
+ test(
+ 'returns $forMultiLevelBrokenSymlink for a multi-level broken symlink',
+ () {
+ expect(
+ withTempDir((temp) {
+ final targetPath = p.join(temp, 'dir');
+ final symlink1Path = p.join(temp, 'link1dir');
+ final symlink2Path = p.join(temp, 'link2dir');
+ _createDir(targetPath);
+ createSymlink(targetPath, symlink1Path);
+ createSymlink(symlink1Path, symlink2Path);
+ deleteEntry(targetPath);
+ expect(predicate(symlink2Path), equals(forMultiLevelBrokenSymlink));
+ }),
+ completes,
+ );
+ },
+ );
// Windows doesn't support symlinking to files.
if (!Platform.isWindows) {
@@ -741,8 +744,7 @@
);
});
- test(
- 'returns $forMultiLevelFileSymlink for a multi-level symlink to a '
+ test('returns $forMultiLevelFileSymlink for a multi-level symlink to a '
'file', () {
expect(
withTempDir((temp) {
diff --git a/test/lish/archive_contents_test.dart b/test/lish/archive_contents_test.dart
index 29499e5..3f9e7c4 100644
--- a/test/lish/archive_contents_test.dart
+++ b/test/lish/archive_contents_test.dart
@@ -22,8 +22,7 @@
const _executableMask = 0x49; // 001 001 001
void main() {
- test(
- 'archives and uploads empty directories in package. '
+ test('archives and uploads empty directories in package. '
'Maintains the executable bit', () async {
await d.validPackage().create();
await d.dir(appPath, [
@@ -32,10 +31,10 @@
]).create();
if (!Platform.isWindows) {
- Process.runSync(
- 'chmod',
- ['+x', p.join(d.sandbox, appPath, 'tool', 'tool.sh')],
- );
+ Process.runSync('chmod', [
+ '+x',
+ p.join(d.sandbox, appPath, 'tool', 'tool.sh'),
+ ]);
}
await servePackages();
diff --git a/test/lish/cloud_storage_upload_provides_an_error_test.dart b/test/lish/cloud_storage_upload_provides_an_error_test.dart
index 071ca45..179b9bc 100644
--- a/test/lish/cloud_storage_upload_provides_an_error_test.dart
+++ b/test/lish/cloud_storage_upload_provides_an_error_test.dart
@@ -29,12 +29,7 @@
});
});
- expect(
- pub.stderr,
- emits(
- 'Server error code: EntityTooLarge',
- ),
- );
+ expect(pub.stderr, emits('Server error code: EntityTooLarge'));
expect(
pub.stderr,
emits(
diff --git a/test/lish/does_not_include_dot_file.dart b/test/lish/does_not_include_dot_file.dart
index 8ddbcca..e1378e5 100644
--- a/test/lish/does_not_include_dot_file.dart
+++ b/test/lish/does_not_include_dot_file.dart
@@ -15,15 +15,15 @@
/// Describes a package with dot-files in tree.
td.DirectoryDescriptor get validPackageWithDotFiles => d.dir(appPath, [
- d.libPubspec('test_pkg', '1.0.0', sdk: '>=1.8.0 <=2.0.0'),
- td.dir('.dart_tool', [td.file('package_config.json')]),
- td.dir('.github', [td.file('ignored.yml')]),
- td.file('.gitignore'),
- td.file('LICENSE', 'Eh, do what you want.'),
- td.file('README.md', "This package isn't real."),
- td.file('CHANGELOG.md', '# 1.0.0\nFirst version\n'),
- td.dir('lib', [td.file('test_pkg.dart', 'int i = 1;')]),
- ]);
+ d.libPubspec('test_pkg', '1.0.0', sdk: '>=1.8.0 <=2.0.0'),
+ td.dir('.dart_tool', [td.file('package_config.json')]),
+ td.dir('.github', [td.file('ignored.yml')]),
+ td.file('.gitignore'),
+ td.file('LICENSE', 'Eh, do what you want.'),
+ td.file('README.md', "This package isn't real."),
+ td.file('CHANGELOG.md', '# 1.0.0\nFirst version\n'),
+ td.dir('lib', [td.file('test_pkg.dart', 'int i = 1;')]),
+]);
void main() {
setUp(validPackageWithDotFiles.create);
diff --git a/test/lish/does_not_publish_if_private_with_server_arg_test.dart b/test/lish/does_not_publish_if_private_with_server_arg_test.dart
index c4fe7b9..0950dde 100644
--- a/test/lish/does_not_publish_if_private_with_server_arg_test.dart
+++ b/test/lish/does_not_publish_if_private_with_server_arg_test.dart
@@ -9,8 +9,7 @@
import '../test_pub.dart';
void main() {
- test(
- 'does not publish if the package is private even if a server '
+ test('does not publish if the package is private even if a server '
'argument is provided', () async {
await d.validPackage(pubspecExtras: {'publish_to': 'none'}).create();
diff --git a/test/lish/dry_run_package_validation_has_a_warning_test.dart b/test/lish/dry_run_package_validation_has_a_warning_test.dart
index c748a91..7d48115 100644
--- a/test/lish/dry_run_package_validation_has_a_warning_test.dart
+++ b/test/lish/dry_run_package_validation_has_a_warning_test.dart
@@ -14,22 +14,15 @@
(await servePackages()).serve('foo', '1.0.0');
await d.validPackage().create();
- final pkg = packageMap(
- 'test_pkg',
- '1.0.0',
- null,
- null,
- {'sdk': defaultSdkConstraint},
- );
+ final pkg = packageMap('test_pkg', '1.0.0', null, null, {
+ 'sdk': defaultSdkConstraint,
+ });
pkg['dependencies'] = {'foo': 'any'};
await d.dir(appPath, [d.pubspec(pkg)]).create();
final pub = await startPublish(globalServer, args: ['--dry-run']);
await pub.shouldExit(exit_codes.DATA);
- expect(
- pub.stdout,
- emitsThrough('Package has 1 warning.'),
- );
+ expect(pub.stdout, emitsThrough('Package has 1 warning.'));
});
}
diff --git a/test/lish/dry_run_package_validation_has_no_warnings_test.dart b/test/lish/dry_run_package_validation_has_no_warnings_test.dart
index fabef41..af13a1a 100644
--- a/test/lish/dry_run_package_validation_has_no_warnings_test.dart
+++ b/test/lish/dry_run_package_validation_has_no_warnings_test.dart
@@ -10,14 +10,16 @@
import '../test_pub.dart';
void main() {
- test('--dry-run package validation on valid package has no warnings',
- () async {
- await d.validPackage().create();
+ test(
+ '--dry-run package validation on valid package has no warnings',
+ () async {
+ await d.validPackage().create();
- await servePackages();
- final pub = await startPublish(globalServer, args: ['--dry-run']);
+ await servePackages();
+ final pub = await startPublish(globalServer, args: ['--dry-run']);
- await pub.shouldExit(exit_codes.SUCCESS);
- expect(pub.stdout, emitsThrough('Package has 0 warnings.'));
- });
+ await pub.shouldExit(exit_codes.SUCCESS);
+ expect(pub.stdout, emitsThrough('Package has 0 warnings.'));
+ },
+ );
}
diff --git a/test/lish/force_publishes_if_there_are_warnings_test.dart b/test/lish/force_publishes_if_there_are_warnings_test.dart
index 49fca21..9587e8e 100644
--- a/test/lish/force_publishes_if_there_are_warnings_test.dart
+++ b/test/lish/force_publishes_if_there_are_warnings_test.dart
@@ -15,13 +15,9 @@
void main() {
test('--force publishes if there are warnings', () async {
await d.validPackage().create();
- final pkg = packageMap(
- 'test_pkg',
- '1.0.0',
- null,
- null,
- {'sdk': defaultSdkConstraint},
- );
+ final pkg = packageMap('test_pkg', '1.0.0', null, null, {
+ 'sdk': defaultSdkConstraint,
+ });
pkg['dependencies'] = {'foo': 'any'};
await d.dir(appPath, [d.pubspec(pkg)]).create();
diff --git a/test/lish/many_files_test.dart b/test/lish/many_files_test.dart
index c9b4691..8a9f700 100644
--- a/test/lish/many_files_test.dart
+++ b/test/lish/many_files_test.dart
@@ -30,15 +30,9 @@
void main() {
testWithGolden('displays all files', (context) async {
await d.validPackage().create();
- await d.dir(
- appPath,
- [
- d.dir(
- 'lib',
- List.generate(20, (i) => d.file('file_$i.dart')),
- ),
- ],
- ).create();
+ await d.dir(appPath, [
+ d.dir('lib', List.generate(20, (i) => d.file('file_$i.dart'))),
+ ]).create();
await servePackages();
await d.credentialsFile(globalServer, 'access-token').create();
final pub = await startPublish(globalServer);
@@ -61,8 +55,7 @@
);
});
- test(
- 'archives and uploads a package with more files than can fit on '
+ test('archives and uploads a package with more files than can fit on '
'the command line', () async {
await d.validPackage().create();
@@ -75,8 +68,10 @@
// automatically.
final result = Process.runSync('getconf', ['ARG_MAX']);
if (result.exitCode != 0) {
- fail('getconf failed with exit code ${result.exitCode}:\n'
- '${result.stderr}');
+ fail(
+ 'getconf failed with exit code ${result.exitCode}:\n'
+ '${result.stderr}',
+ );
}
argMax = int.parse(result.stdout as String);
diff --git a/test/lish/package_creation_provides_invalid_json_test.dart b/test/lish/package_creation_provides_invalid_json_test.dart
index a4b2fe8..a8b3ced 100644
--- a/test/lish/package_creation_provides_invalid_json_test.dart
+++ b/test/lish/package_creation_provides_invalid_json_test.dart
@@ -26,8 +26,10 @@
expect(
pub.stderr,
- emitsLines('Invalid server response:\n'
- '{not json'),
+ emitsLines(
+ 'Invalid server response:\n'
+ '{not json',
+ ),
);
await pub.shouldExit(1);
});
diff --git a/test/lish/package_validation_has_a_warning_and_is_canceled_test.dart b/test/lish/package_validation_has_a_warning_and_is_canceled_test.dart
index ccd221e..5a7e862 100644
--- a/test/lish/package_validation_has_a_warning_and_is_canceled_test.dart
+++ b/test/lish/package_validation_has_a_warning_and_is_canceled_test.dart
@@ -11,17 +11,11 @@
void main() {
test('package validation has a warning and is canceled', () async {
await d.validPackage().create();
- final pkg = packageMap(
- 'test_pkg',
- '1.0.0',
- null,
- null,
- {'sdk': defaultSdkConstraint},
- );
+ final pkg = packageMap('test_pkg', '1.0.0', null, null, {
+ 'sdk': defaultSdkConstraint,
+ });
pkg['author'] = 'Natalie Weizenbaum';
- await d.dir(appPath, [
- d.pubspec(pkg),
- ]).create();
+ await d.dir(appPath, [d.pubspec(pkg)]).create();
await servePackages();
final pub = await startPublish(globalServer);
diff --git a/test/lish/package_validation_has_an_error_test.dart b/test/lish/package_validation_has_an_error_test.dart
index 46c75d7..1ee0eb8 100644
--- a/test/lish/package_validation_has_an_error_test.dart
+++ b/test/lish/package_validation_has_an_error_test.dart
@@ -25,8 +25,10 @@
await pub.shouldExit(exit_codes.DATA);
expect(
pub.stderr,
- emitsThrough('Sorry, your package is missing some '
- "requirements and can't be published yet."),
+ emitsThrough(
+ 'Sorry, your package is missing some '
+ "requirements and can't be published yet.",
+ ),
);
});
}
diff --git a/test/lish/skip_validation_test.dart b/test/lish/skip_validation_test.dart
index 9788814..c636aae 100644
--- a/test/lish/skip_validation_test.dart
+++ b/test/lish/skip_validation_test.dart
@@ -15,8 +15,7 @@
import 'utils.dart';
void main() {
- test(
- 'with --skip-validation dependency resolution '
+ test('with --skip-validation dependency resolution '
'and validations are skipped.', () async {
await servePackages();
await d.validPackage().create();
diff --git a/test/lish/symlinks_test.dart b/test/lish/symlinks_test.dart
index b2b3ab7..ca22454 100644
--- a/test/lish/symlinks_test.dart
+++ b/test/lish/symlinks_test.dart
@@ -44,9 +44,9 @@
await runPub(args: ['publish', '--to-archive=archive.tar.gz']);
final reader = TarReader(
- File(p.join(d.sandbox, appPath, 'archive.tar.gz'))
- .openRead()
- .transform(GZipCodec().decoder),
+ File(
+ p.join(d.sandbox, appPath, 'archive.tar.gz'),
+ ).openRead().transform(GZipCodec().decoder),
);
while (await reader.moveNext()) {
@@ -56,25 +56,23 @@
await runPub(args: ['cache', 'preload', 'archive.tar.gz']);
- await d.dir('test_pkg-1.0.0', [
- ...d.validPackage().contents,
- d.dir('symlink_to_dir_outside_package', [
- d.file('aa', 'aaa'),
- ]),
- d.dir('symlink_to_dir_outside_package_relative', [
- d.file('aa', 'aaa'),
- ]),
- d.dir('b', [d.file('bb', 'bbb')]),
- d.dir('symlink_to_dir_inside_package', [
- d.file('bb', 'bbb'),
- d.file('l', 'ttt'),
- ]),
- d.dir('symlink_to_dir_inside_package_relative', [
- d.file('bb', 'bbb'),
- d.file('l', 'ttt'),
- ]),
- ]).validate(
- p.join(d.sandbox, cachePath, 'hosted', 'pub.dev'),
- );
+ await d
+ .dir('test_pkg-1.0.0', [
+ ...d.validPackage().contents,
+ d.dir('symlink_to_dir_outside_package', [d.file('aa', 'aaa')]),
+ d.dir('symlink_to_dir_outside_package_relative', [
+ d.file('aa', 'aaa'),
+ ]),
+ d.dir('b', [d.file('bb', 'bbb')]),
+ d.dir('symlink_to_dir_inside_package', [
+ d.file('bb', 'bbb'),
+ d.file('l', 'ttt'),
+ ]),
+ d.dir('symlink_to_dir_inside_package_relative', [
+ d.file('bb', 'bbb'),
+ d.file('l', 'ttt'),
+ ]),
+ ])
+ .validate(p.join(d.sandbox, cachePath, 'hosted', 'pub.dev'));
});
}
diff --git a/test/lish/upload_form_provides_invalid_json_test.dart b/test/lish/upload_form_provides_invalid_json_test.dart
index 1c12f0c..ba50d56 100644
--- a/test/lish/upload_form_provides_invalid_json_test.dart
+++ b/test/lish/upload_form_provides_invalid_json_test.dart
@@ -26,8 +26,10 @@
expect(
pub.stderr,
- emitsLines('Invalid server response:\n'
- '{not json'),
+ emitsLines(
+ 'Invalid server response:\n'
+ '{not json',
+ ),
);
await pub.shouldExit(1);
});
diff --git a/test/lock_file_test.dart b/test/lock_file_test.dart
index 93b2364..85c3dab 100644
--- a/test/lock_file_test.dart
+++ b/test/lock_file_test.dart
@@ -27,8 +27,7 @@
});
test('parses a series of package descriptions', () {
- final lockFile = LockFile.parse(
- '''
+ final lockFile = LockFile.parse('''
packages:
bar:
version: 1.2.3
@@ -42,9 +41,7 @@
description:
name: foo
url: https://foo.com
-''',
- cache.sources,
- );
+''', cache.sources);
expect(lockFile.packages.length, equals(2));
@@ -68,27 +65,21 @@
});
test('allows an unknown source', () {
- final lockFile = LockFile.parse(
- '''
+ final lockFile = LockFile.parse('''
packages:
foo:
source: bad
version: 1.2.3
description: foo desc
-''',
- cache.sources,
- );
+''', cache.sources);
final foo = lockFile.packages['foo']!;
expect(foo.source, equals(sources('bad')));
});
test('allows an empty dependency map', () {
- final lockFile = LockFile.parse(
- '''
+ final lockFile = LockFile.parse('''
packages:
-''',
- sources,
- );
+''', sources);
expect(lockFile.packages, isEmpty);
});
@@ -103,15 +94,12 @@
});
test('allows new-style SDK constraints', () {
- final lockFile = LockFile.parse(
- '''
+ final lockFile = LockFile.parse('''
sdks:
dart: ">=1.2.3 <4.0.0"
flutter: ^0.1.2
fuchsia: ^5.6.7
-''',
- sources,
- );
+''', sources);
expect(
lockFile.sdkConstraints['dart']!.effectiveConstraint,
VersionConstraint.parse('>=1.2.3 <4.0.0'),
@@ -127,118 +115,76 @@
});
test('throws if the top level is not a map', () {
- expect(
- () {
- LockFile.parse(
- '''
+ expect(() {
+ LockFile.parse('''
not a map
-''',
- sources,
- );
- },
- throwsSourceSpanException,
- );
+''', sources);
+ }, throwsSourceSpanException);
});
test("throws if the contents of 'packages' is not a map", () {
- expect(
- () {
- LockFile.parse(
- '''
+ expect(() {
+ LockFile.parse('''
packages: not a map
-''',
- sources,
- );
- },
- throwsSourceSpanException,
- );
+''', sources);
+ }, throwsSourceSpanException);
});
test('throws if the version is missing', () {
- expect(
- () {
- LockFile.parse(
- '''
+ expect(() {
+ LockFile.parse('''
packages:
foo:
source: fake
description: foo desc
-''',
- sources,
- );
- },
- throwsSourceSpanException,
- );
+''', sources);
+ }, throwsSourceSpanException);
});
test('throws if the version is invalid', () {
- expect(
- () {
- LockFile.parse(
- '''
+ expect(() {
+ LockFile.parse('''
packages:
foo:
version: vorpal
source: fake
description: foo desc
-''',
- sources,
- );
- },
- throwsSourceSpanException,
- );
+''', sources);
+ }, throwsSourceSpanException);
});
test('throws if the source is missing', () {
- expect(
- () {
- LockFile.parse(
- '''
+ expect(() {
+ LockFile.parse('''
packages:
foo:
version: 1.2.3
description: foo desc
-''',
- sources,
- );
- },
- throwsSourceSpanException,
- );
+''', sources);
+ }, throwsSourceSpanException);
});
test('throws if the description is missing', () {
- expect(
- () {
- LockFile.parse(
- '''
+ expect(() {
+ LockFile.parse('''
packages:
foo:
version: 1.2.3
source: fake
-''',
- sources,
- );
- },
- throwsSourceSpanException,
- );
+''', sources);
+ }, throwsSourceSpanException);
});
test('throws if the description is invalid', () {
- expect(
- () {
- LockFile.parse(
- '''
+ expect(() {
+ LockFile.parse('''
packages:
foo:
version: 1.2.3
source: hosted
description: foam
-''',
- sources,
- );
- },
- throwsSourceSpanException,
- );
+''', sources);
+ }, throwsSourceSpanException);
});
test("throws if the old-style SDK constraint isn't a string", () {
@@ -267,12 +213,9 @@
() => LockFile.parse('sdks: {dart: 1.0}', sources),
throwsSourceSpanException,
);
- expect(
- () {
- LockFile.parse('sdks: {dart: 1.0.0, flutter: 1.0}', sources);
- },
- throwsSourceSpanException,
- );
+ expect(() {
+ LockFile.parse('sdks: {dart: 1.0.0, flutter: 1.0}', sources);
+ }, throwsSourceSpanException);
});
test('throws if an sdk constraint is invalid', () {
@@ -280,17 +223,13 @@
() => LockFile.parse('sdks: {dart: oops}', sources),
throwsSourceSpanException,
);
- expect(
- () {
- LockFile.parse('sdks: {dart: 1.0.0, flutter: oops}', sources);
- },
- throwsSourceSpanException,
- );
+ expect(() {
+ LockFile.parse('sdks: {dart: 1.0.0, flutter: oops}', sources);
+ }, throwsSourceSpanException);
});
test('Reads pub.dartlang.org as pub.dev in hosted descriptions', () {
- final lockfile = LockFile.parse(
- '''
+ final lockfile = LockFile.parse('''
packages:
characters:
dependency: transitive
@@ -307,16 +246,12 @@
sha256:
source: hosted
version: "1.0.0"
-''',
- sources,
- );
+''', sources);
void expectComesFromPubDev(String name) {
- final description = lockfile.packages[name]!.description.description
- as HostedDescription;
- expect(
- description.url,
- 'https://pub.dev',
- );
+ final description =
+ lockfile.packages[name]!.description.description
+ as HostedDescription;
+ expect(description.url, 'https://pub.dev');
}
expectComesFromPubDev('characters');
@@ -325,8 +260,7 @@
test('Complains about malformed content-hashes', () {
expect(
- () => LockFile.parse(
- '''
+ () => LockFile.parse('''
packages:
retry:
dependency: transitive
@@ -336,9 +270,7 @@
sha256: abc # Not long enough
source: hosted
version: "1.0.0"
-''',
- sources,
- ),
+''', sources),
throwsA(
isA<FormatException>().having(
(e) => e.message,
@@ -350,8 +282,7 @@
});
test('ignores extra stuff in file', () {
- LockFile.parse(
- '''
+ LockFile.parse('''
extra:
some: stuff
packages:
@@ -360,9 +291,7 @@
version: 1.2.3
source: fake
description: foo desc
-''',
- sources,
- );
+''', sources);
});
});
diff --git a/test/oauth2/utils.dart b/test/oauth2/utils.dart
index dc9da93..8d0fd81 100644
--- a/test/oauth2/utils.dart
+++ b/test/oauth2/utils.dart
@@ -20,8 +20,10 @@
]) async {
await expectLater(
pub.stdout,
- emits('Pub needs your authorization to upload packages on your '
- 'behalf.'),
+ emits(
+ 'Pub needs your authorization to upload packages on your '
+ 'behalf.',
+ ),
);
final line = await pub.stdout.next;
@@ -67,13 +69,16 @@
final pairs = <List<String?>>[];
map.forEach((key, value) {
key = Uri.encodeQueryComponent(key);
- value = (value == null || value.isEmpty)
- ? null
- : Uri.encodeQueryComponent(value);
+ value =
+ (value == null || value.isEmpty)
+ ? null
+ : Uri.encodeQueryComponent(value);
pairs.add([key, value]);
});
- return pairs.map((pair) {
- if (pair[1] == null) return pair[0];
- return '${pair[0]}=${pair[1]}';
- }).join('&');
+ return pairs
+ .map((pair) {
+ if (pair[1] == null) return pair[0];
+ return '${pair[0]}=${pair[1]}';
+ })
+ .join('&');
}
diff --git a/test/oauth2/with_a_malformed_credentials_authenticates_again_test.dart b/test/oauth2/with_a_malformed_credentials_authenticates_again_test.dart
index d4e62ac..b55d415 100644
--- a/test/oauth2/with_a_malformed_credentials_authenticates_again_test.dart
+++ b/test/oauth2/with_a_malformed_credentials_authenticates_again_test.dart
@@ -10,8 +10,7 @@
import 'utils.dart';
void main() {
- test(
- 'with a malformed credentials.json, authenticates again and '
+ test('with a malformed credentials.json, authenticates again and '
'saves credentials.json', () async {
await d.validPackage().create();
diff --git a/test/oauth2/with_a_server_rejected_refresh_token_authenticates_again_test.dart b/test/oauth2/with_a_server_rejected_refresh_token_authenticates_again_test.dart
index e712967..71ea476 100644
--- a/test/oauth2/with_a_server_rejected_refresh_token_authenticates_again_test.dart
+++ b/test/oauth2/with_a_server_rejected_refresh_token_authenticates_again_test.dart
@@ -14,8 +14,7 @@
void main() {
// Regression test for issue 8849.
- test(
- 'with a server-rejected refresh token, authenticates again and '
+ test('with a server-rejected refresh token, authenticates again and '
'saves credentials.json', () async {
await d.validPackage().create();
diff --git a/test/oauth2/with_an_expired_credentials_refreshes_and_saves_test.dart b/test/oauth2/with_an_expired_credentials_refreshes_and_saves_test.dart
index aeeacf0..b8d69d9 100644
--- a/test/oauth2/with_an_expired_credentials_refreshes_and_saves_test.dart
+++ b/test/oauth2/with_an_expired_credentials_refreshes_and_saves_test.dart
@@ -11,8 +11,7 @@
import '../test_pub.dart';
void main() {
- test(
- 'with an expired credentials.json, refreshes and saves the '
+ test('with an expired credentials.json, refreshes and saves the '
'refreshed access token to credentials.json', () async {
await d.validPackage().create();
@@ -37,9 +36,10 @@
);
return shelf.Response.ok(
- jsonEncode(
- {'access_token': 'new access token', 'token_type': 'bearer'},
- ),
+ jsonEncode({
+ 'access_token': 'new access token',
+ 'token_type': 'bearer',
+ }),
headers: {'content-type': 'application/json'},
);
});
diff --git a/test/oauth2/with_an_expired_credentials_without_a_refresh_token_authenticates_again_test.dart b/test/oauth2/with_an_expired_credentials_without_a_refresh_token_authenticates_again_test.dart
index 8428bb0..3f1c1c3 100644
--- a/test/oauth2/with_an_expired_credentials_without_a_refresh_token_authenticates_again_test.dart
+++ b/test/oauth2/with_an_expired_credentials_without_a_refresh_token_authenticates_again_test.dart
@@ -10,8 +10,7 @@
import 'utils.dart';
void main() {
- test(
- 'with an expired credentials.json without a refresh token, '
+ test('with an expired credentials.json without a refresh token, '
'authenticates again and saves credentials.json', () async {
await servePackages();
await d.validPackage().create();
@@ -29,8 +28,10 @@
await expectLater(
pub.stderr,
- emits("Pub's authorization to upload packages has expired and "
- "can't be automatically refreshed."),
+ emits(
+ "Pub's authorization to upload packages has expired and "
+ "can't be automatically refreshed.",
+ ),
);
await authorizePub(pub, globalServer, 'new access token');
diff --git a/test/oauth2/with_no_credentials_authenticates_and_saves_credentials_test.dart b/test/oauth2/with_no_credentials_authenticates_and_saves_credentials_test.dart
index 9562345..a39ff86 100644
--- a/test/oauth2/with_no_credentials_authenticates_and_saves_credentials_test.dart
+++ b/test/oauth2/with_no_credentials_authenticates_and_saves_credentials_test.dart
@@ -10,8 +10,7 @@
import 'utils.dart';
void main() {
- test(
- 'with no credentials.json, authenticates and saves '
+ test('with no credentials.json, authenticates and saves '
'credentials.json', () async {
await d.validPackage().create();
await servePackages();
diff --git a/test/oauth2/with_server_rejected_credentials_authenticates_again_test.dart b/test/oauth2/with_server_rejected_credentials_authenticates_again_test.dart
index 8deb238..1080772 100644
--- a/test/oauth2/with_server_rejected_credentials_authenticates_again_test.dart
+++ b/test/oauth2/with_server_rejected_credentials_authenticates_again_test.dart
@@ -11,8 +11,7 @@
import '../test_pub.dart';
void main() {
- test(
- 'with server-rejected credentials, authenticates again and saves '
+ test('with server-rejected credentials, authenticates again and saves '
'credentials.json', () async {
await d.validPackage().create();
await servePackages();
@@ -28,7 +27,8 @@
'error': {'message': 'your token sucks'},
}),
headers: {
- 'www-authenticate': 'Bearer error="invalid_token",'
+ 'www-authenticate':
+ 'Bearer error="invalid_token",'
' error_description="your token sucks"',
},
);
diff --git a/test/outdated/outdated_test.dart b/test/outdated/outdated_test.dart
index 26a81a6..7899a28 100644
--- a/test/outdated/outdated_test.dart
+++ b/test/outdated/outdated_test.dart
@@ -65,10 +65,7 @@
..serve(
'builder',
'1.2.3',
- deps: {
- 'transitive': '^1.0.0',
- 'dev_trans': '^1.0.0',
- },
+ deps: {'transitive': '^1.0.0', 'dev_trans': '^1.0.0'},
)
..serve('transitive', '1.2.3')
..serve('dev_trans', '1.0.0')
@@ -134,11 +131,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- 'bar': '^1.0.0',
- 'baz': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0', 'bar': '^1.0.0', 'baz': '^1.0.0'},
}),
]).create();
await pubGet();
@@ -158,11 +151,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- 'bar': '^1.0.0',
- 'baz': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0', 'bar': '^1.0.0', 'baz': '^1.0.0'},
}),
]).create();
await pubGet();
@@ -181,9 +170,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0'},
}),
]).create();
await pubGet();
@@ -201,9 +188,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0'},
}),
]).create();
await pubGet();
@@ -223,10 +208,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- 'bar': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0', 'bar': '^1.0.0'},
}),
]).create();
await pubGet();
@@ -247,9 +229,7 @@
d.pubspec({
'name': 'app',
'version': '1.0.1',
- 'dependencies': {
- 'foo': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0'},
}),
]).create();
@@ -264,10 +244,7 @@
d.pubspec({
'name': 'app',
'version': '1.0.1',
- 'dependencies': {
- 'foo': '^1.0.0',
- 'bar': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0', 'bar': '^1.0.0'},
}),
]).create();
@@ -291,23 +268,15 @@
..serve('baz', '1.0.0')
..serve('baz', '2.0.0');
- await d.git('foo.git', [
- d.libPubspec('foo', '1.0.1'),
- ]).create();
+ await d.git('foo.git', [d.libPubspec('foo', '1.0.1')]).create();
- await d.dir('bar', [
- d.libPubspec('bar', '1.0.1'),
- ]).create();
+ await d.dir('bar', [d.libPubspec('bar', '1.0.1')]).create();
await d.dir(appPath, [
d.pubspec({
'name': 'app',
'version': '1.0.1',
- 'dependencies': {
- 'foo': '^1.0.0',
- 'bar': '^2.0.0',
- 'baz': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0', 'bar': '^2.0.0', 'baz': '^1.0.0'},
'dependency_overrides': {
'foo': {
'git': {'url': '../foo.git'},
@@ -335,14 +304,8 @@
d.pubspec({
'name': 'app',
'version': '1.0.1',
- 'dependencies': {
- 'foo': 'any',
- 'bar': 'any',
- },
- 'dependency_overrides': {
- 'foo': '1.0.0',
- 'bar': '1.0.0',
- },
+ 'dependencies': {'foo': 'any', 'bar': 'any'},
+ 'dependency_overrides': {'foo': '1.0.0', 'bar': '1.0.0'},
}),
]).create();
@@ -351,27 +314,23 @@
await ctx.runOutdatedTests();
});
- testWithGolden('overridden dependencies with retraction- no resolution ',
- (ctx) async {
+ testWithGolden('overridden dependencies with retraction- no resolution ', (
+ ctx,
+ ) async {
ensureGit();
- final builder = await servePackages()
- ..serve('foo', '1.0.0', deps: {'bar': '^2.0.0'})
- ..serve('foo', '2.0.0', deps: {'bar': '^1.0.0'})
- ..serve('bar', '1.0.0', deps: {'foo': '^1.0.0'})
- ..serve('bar', '2.0.0', deps: {'foo': '^2.0.0'});
+ final builder =
+ await servePackages()
+ ..serve('foo', '1.0.0', deps: {'bar': '^2.0.0'})
+ ..serve('foo', '2.0.0', deps: {'bar': '^1.0.0'})
+ ..serve('bar', '1.0.0', deps: {'foo': '^1.0.0'})
+ ..serve('bar', '2.0.0', deps: {'foo': '^2.0.0'});
await d.dir(appPath, [
d.pubspec({
'name': 'app',
'version': '1.0.1',
- 'dependencies': {
- 'foo': 'any',
- 'bar': 'any',
- },
- 'dependency_overrides': {
- 'foo': '1.0.0',
- 'bar': '1.0.0',
- },
+ 'dependencies': {'foo': 'any', 'bar': 'any'},
+ 'dependency_overrides': {'foo': '1.0.0', 'bar': '1.0.0'},
}),
]).create();
@@ -390,9 +349,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0'},
'ignored_advisories': ['ABCD-1234-5678-9101', '1234-ABCD-EFGH-IJKL'],
}),
]).create();
@@ -402,10 +359,7 @@
advisoryId: 'ABCD-1234-5678-9101',
displayUrl: 'https://github.com/advisories/ABCD-1234-5678-9101',
affectedPackages: [
- AffectedPackage(
- name: 'foo',
- versions: ['1.0.0'],
- ),
+ AffectedPackage(name: 'foo', versions: ['1.0.0']),
],
);
@@ -414,10 +368,7 @@
displayUrl: 'https://github.com/advisories/EFGH-0000-1111-2222',
aliases: ['1234-ABCD-EFGH-IJKL'],
affectedPackages: [
- AffectedPackage(
- name: 'foo',
- versions: ['1.0.0'],
- ),
+ AffectedPackage(name: 'foo', versions: ['1.0.0']),
],
);
@@ -434,9 +385,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0'},
'ignored_advisories': ['ABCD-1234-5678-9101', '1234-ABCD-EFGH-IJKL'],
}),
]).create();
@@ -446,10 +395,7 @@
advisoryId: 'ABCD-1234-5678-9101',
displayUrl: 'https://github.com/advisories/ABCD-1234-5678-9101',
affectedPackages: [
- AffectedPackage(
- name: 'foo',
- versions: ['1.0.0'],
- ),
+ AffectedPackage(name: 'foo', versions: ['1.0.0']),
],
);
@@ -458,10 +404,7 @@
aliases: ['1234-ABCD-EFGH-IJKL'],
displayUrl: 'https://github.com/advisories/EFGH-0000-1111-2222',
affectedPackages: [
- AffectedPackage(
- name: 'foo',
- versions: ['1.0.0'],
- ),
+ AffectedPackage(name: 'foo', versions: ['1.0.0']),
],
);
@@ -469,10 +412,7 @@
advisoryId: 'VXYZ-1234-5678-9101',
displayUrl: 'https://github.com/advisories/VXYZ-1234-5678-9101',
affectedPackages: [
- AffectedPackage(
- name: 'foo',
- versions: ['1.0.0'],
- ),
+ AffectedPackage(name: 'foo', versions: ['1.0.0']),
],
);
@@ -480,8 +420,9 @@
await ctx.runOutdatedTests();
});
- testWithGolden('do not show advisories if no version is affected',
- (ctx) async {
+ testWithGolden('do not show advisories if no version is affected', (
+ ctx,
+ ) async {
final builder = await servePackages();
builder
..serve('foo', '1.0.0', deps: {'transitive': '^1.0.0'})
@@ -490,9 +431,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0'},
}),
]).create();
await pubGet();
@@ -501,10 +440,7 @@
advisoryId: 'ABCD-1234-5678-9101',
displayUrl: 'https://github.com/advisories/ABCD-1234-5678-9101',
affectedPackages: [
- AffectedPackage(
- name: 'foo',
- versions: ['0.1.0'],
- ),
+ AffectedPackage(name: 'foo', versions: ['0.1.0']),
],
);
@@ -521,9 +457,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0'},
}),
]).create();
await pubGet();
@@ -532,10 +466,7 @@
advisoryId: 'ABCD-1234-5678-9101',
displayUrl: 'https://github.com/advisories/ABCD-1234-5678-9101',
affectedPackages: [
- AffectedPackage(
- name: 'foo',
- versions: ['1.0.0'],
- ),
+ AffectedPackage(name: 'foo', versions: ['1.0.0']),
],
);
@@ -543,8 +474,9 @@
await ctx.runOutdatedTests();
});
- testWithGolden('show advisory - current, same package mentioned twice',
- (ctx) async {
+ testWithGolden('show advisory - current, same package mentioned twice', (
+ ctx,
+ ) async {
final builder = await servePackages();
builder
..serve('foo', '1.0.0', deps: {'transitive': '^1.0.0'})
@@ -553,9 +485,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0'},
}),
]).create();
await pubGet();
@@ -564,14 +494,8 @@
advisoryId: 'ABCD-1234-5678-9101',
displayUrl: 'https://github.com/advisories/ABCD-1234-5678-9101',
affectedPackages: [
- AffectedPackage(
- name: 'foo',
- versions: ['0.0.1'],
- ),
- AffectedPackage(
- name: 'foo',
- versions: ['1.0.0'],
- ),
+ AffectedPackage(name: 'foo', versions: ['0.0.1']),
+ AffectedPackage(name: 'foo', versions: ['1.0.0']),
],
);
@@ -588,9 +512,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0'},
}),
]).create();
await pubGet();
@@ -601,10 +523,7 @@
advisoryId: 'ABCD-1234-5678-9101',
displayUrl: 'https://github.com/advisories/ABCD-1234-5678-9101',
affectedPackages: [
- AffectedPackage(
- name: 'foo',
- versions: ['1.0.0'],
- ),
+ AffectedPackage(name: 'foo', versions: ['1.0.0']),
],
);
@@ -621,9 +540,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0'},
}),
]).create();
await pubGet();
@@ -632,10 +549,7 @@
advisoryId: 'ABCD-1234-5678-9101',
displayUrl: 'https://github.com/advisories/ABCD-1234-5678-9101',
affectedPackages: [
- AffectedPackage(
- name: 'foo',
- versions: ['1.2.0'],
- ),
+ AffectedPackage(name: 'foo', versions: ['1.2.0']),
],
);
@@ -652,9 +566,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0'},
}),
]).create();
await pubGet();
@@ -664,10 +576,7 @@
advisoryId: 'ABCD-1234-5678-9101',
displayUrl: 'https://github.com/advisories/ABCD-1234-5678-9101',
affectedPackages: [
- AffectedPackage(
- name: 'foo',
- versions: ['1.2.0'],
- ),
+ AffectedPackage(name: 'foo', versions: ['1.2.0']),
],
);
@@ -684,9 +593,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0'},
}),
]).create();
await pubGet();
@@ -695,10 +602,7 @@
advisoryId: 'ABCD-1234-5678-9101',
displayUrl: 'https://github.com/advisories/ABCD-1234-5678-9101',
affectedPackages: [
- AffectedPackage(
- name: 'foo',
- versions: ['1.0.0', '1.2.0'],
- ),
+ AffectedPackage(name: 'foo', versions: ['1.0.0', '1.2.0']),
],
);
@@ -715,9 +619,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'app',
- 'dependencies': {
- 'foo': '^1.0.0',
- },
+ 'dependencies': {'foo': '^1.0.0'},
}),
]).create();
await pubGet();
@@ -726,10 +628,7 @@
advisoryId: 'ABCD-1234-5678-9101',
displayUrl: 'https://github.com/advisories/ABCD-1234-5678-9101',
affectedPackages: [
- AffectedPackage(
- name: 'foo',
- versions: ['1.0.0', '1.2.0'],
- ),
+ AffectedPackage(name: 'foo', versions: ['1.0.0', '1.2.0']),
],
);
@@ -737,10 +636,7 @@
advisoryId: 'VXYZ-1234-5678-9101',
displayUrl: 'https://github.com/advisories/VXYZ-1234-5678-9101',
affectedPackages: [
- AffectedPackage(
- name: 'foo',
- versions: ['1.0.0'],
- ),
+ AffectedPackage(name: 'foo', versions: ['1.0.0']),
],
);
@@ -748,8 +644,7 @@
await ctx.runOutdatedTests();
});
- testWithGolden(
- 'latest version reported while locked on a prerelease '
+ testWithGolden('latest version reported while locked on a prerelease '
'can be a prerelease', (ctx) async {
await servePackages()
..serve('foo', '0.9.0')
@@ -815,17 +710,13 @@
]),
]).create();
- await pubGet(
- environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- );
+ await pubGet(environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'});
server.serve('dep', '0.9.5');
server.serve('dep_a', '0.9.5');
server.serve('dev_dep_a', '0.9.5');
- await ctx.runOutdatedTests(
- environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- );
+ await ctx.runOutdatedTests(environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'});
});
testWithGolden('Handles SDK dependencies', (ctx) async {
@@ -871,15 +762,11 @@
'environment': {'sdk': '>=2.12.0 <3.0.0'},
'dependencies': {
'foo': '^1.0.0',
- 'flutter': {
- 'sdk': 'flutter',
- },
+ 'flutter': {'sdk': 'flutter'},
},
'dev_dependencies': {
'foo': '^1.0.0',
- 'flutter_test': {
- 'sdk': 'flutter',
- },
+ 'flutter_test': {'sdk': 'flutter'},
},
}),
]).create();
@@ -908,14 +795,16 @@
testWithGolden('Handles packages that are not found on server', (ctx) async {
await servePackages();
- await d.appDir(
- dependencies: {'foo': 'any'},
- pubspec: {
- 'dependency_overrides': {
- 'foo': {'path': '../foo'},
- },
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {'foo': 'any'},
+ pubspec: {
+ 'dependency_overrides': {
+ 'foo': {'path': '../foo'},
+ },
+ },
+ )
+ .create();
await d.dir('foo', [d.libPubspec('foo', '1.0.0')]).create();
await ctx.run(['outdated']);
});
diff --git a/test/package_config_file_test.dart b/test/package_config_file_test.dart
index c92d37f..c4c59c0 100644
--- a/test/package_config_file_test.dart
+++ b/test/package_config_file_test.dart
@@ -61,20 +61,20 @@
]).validate();
});
- test('package_config.json uses relative paths if PUB_CACHE is relative',
- () async {
- final server = await servePackages();
- server.serve('foo', '1.2.3');
+ test(
+ 'package_config.json uses relative paths if PUB_CACHE is relative',
+ () async {
+ final server = await servePackages();
+ server.serve('foo', '1.2.3');
- await d.dir(appPath, [
- d.appPubspec(dependencies: {'foo': '1.2.3'}),
- ]).create();
+ await d.dir(appPath, [
+ d.appPubspec(dependencies: {'foo': '1.2.3'}),
+ ]).create();
- await pubCommand(command, environment: {'PUB_CACHE': './pub_cache'});
+ await pubCommand(command, environment: {'PUB_CACHE': './pub_cache'});
- await d.dir(appPath, [
- d.packageConfigFile(
- [
+ await d.dir(appPath, [
+ d.packageConfigFile([
PackageConfigEntry(
name: 'foo',
rootUri: p.toUri(
@@ -87,11 +87,10 @@
path: '.',
languageVersion: '3.0',
),
- ],
- pubCache: p.join(d.sandbox, appPath, 'pub_cache'),
- ),
- ]).validate();
- });
+ ], pubCache: p.join(d.sandbox, appPath, 'pub_cache')),
+ ]).validate();
+ },
+ );
test('package_config.json file is overwritten', () async {
await servePackages()
@@ -178,60 +177,56 @@
});
test(
- '.dart_tool/package_config.json file has relative path to path dependency',
- () async {
- await servePackages()
- ..serve(
- 'foo',
- '1.2.3',
- deps: {'baz': 'any'},
- contents: [d.dir('lib', [])],
- )
- ..serve('baz', '9.9.9', deps: {}, contents: [d.dir('lib', [])]);
+ '.dart_tool/package_config.json file has relative path to path dependency',
+ () async {
+ await servePackages()
+ ..serve(
+ 'foo',
+ '1.2.3',
+ deps: {'baz': 'any'},
+ contents: [d.dir('lib', [])],
+ )
+ ..serve('baz', '9.9.9', deps: {}, contents: [d.dir('lib', [])]);
- await d.dir('local_baz', [
- d.libDir('baz', 'baz 3.2.1'),
- d.pubspec({
- 'name': 'baz',
- 'version': '3.2.1',
- }),
- ]).create();
+ await d.dir('local_baz', [
+ d.libDir('baz', 'baz 3.2.1'),
+ d.pubspec({'name': 'baz', 'version': '3.2.1'}),
+ ]).create();
- await d.dir(appPath, [
- d.pubspec({
- 'name': 'myapp',
- 'dependencies': {
- 'foo': '^1.2.3',
- },
- 'dependency_overrides': {
- 'baz': {'path': '../local_baz'},
- },
- }),
- d.dir('lib'),
- ]).create();
+ await d.dir(appPath, [
+ d.pubspec({
+ 'name': 'myapp',
+ 'dependencies': {'foo': '^1.2.3'},
+ 'dependency_overrides': {
+ 'baz': {'path': '../local_baz'},
+ },
+ }),
+ d.dir('lib'),
+ ]).create();
- await pubCommand(command);
+ await pubCommand(command);
- await d.dir(appPath, [
- d.packageConfigFile([
- d.packageConfigEntry(
- name: 'foo',
- version: '1.2.3',
- languageVersion: '3.0',
- ),
- d.packageConfigEntry(
- name: 'baz',
- path: '../local_baz',
- languageVersion: '3.0',
- ),
- d.packageConfigEntry(
- name: 'myapp',
- path: '.',
- languageVersion: '3.0',
- ),
- ]),
- ]).validate();
- });
+ await d.dir(appPath, [
+ d.packageConfigFile([
+ d.packageConfigEntry(
+ name: 'foo',
+ version: '1.2.3',
+ languageVersion: '3.0',
+ ),
+ d.packageConfigEntry(
+ name: 'baz',
+ path: '../local_baz',
+ languageVersion: '3.0',
+ ),
+ d.packageConfigEntry(
+ name: 'myapp',
+ path: '.',
+ languageVersion: '3.0',
+ ),
+ ]),
+ ]).validate();
+ },
+ );
test('package_config.json has language version', () async {
final server = await servePackages();
@@ -249,9 +244,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'myapp',
- 'dependencies': {
- 'foo': '^1.2.3',
- },
+ 'dependencies': {'foo': '^1.2.3'},
'environment': {
'sdk': '>=3.1.0 <=3.2.2+2', // tests runs with '3.1.2+3'
},
@@ -284,9 +277,7 @@
'foo',
'1.2.3',
pubspec: {
- 'environment': {
- 'sdk': '<4.0.0',
- },
+ 'environment': {'sdk': '<4.0.0'},
},
contents: [d.dir('lib', [])],
);
@@ -294,9 +285,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'myapp',
- 'dependencies': {
- 'foo': '^1.2.3',
- },
+ 'dependencies': {'foo': '^1.2.3'},
}),
d.dir('lib'),
]).create();
diff --git a/test/package_graph_file_test.dart b/test/package_graph_file_test.dart
index 823ba00..efd3a71 100644
--- a/test/package_graph_file_test.dart
+++ b/test/package_graph_file_test.dart
@@ -25,11 +25,7 @@
'dev_dependencies': {'test': '^1.0.0'},
},
)
- ..serve(
- 'bar',
- '3.2.1',
- sdk: '^3.5.0',
- )
+ ..serve('bar', '3.2.1', sdk: '^3.5.0')
..serve(
'baz',
'2.2.2',
@@ -37,16 +33,8 @@
deps: {'bar': '3.2.1'},
contents: [d.dir('lib', [])],
)
- ..serve(
- 'test',
- '1.0.0',
- sdk: '^3.5.0',
- )
- ..serve(
- 'test',
- '2.0.0',
- sdk: '^3.5.0',
- );
+ ..serve('test', '1.0.0', sdk: '^3.5.0')
+ ..serve('test', '2.0.0', sdk: '^3.5.0');
await d.dir('boo', [
d.libPubspec(
@@ -65,25 +53,17 @@
'boo': {'path': '../boo'},
},
extras: {
- 'environment': {
- 'sdk': '^3.5.0',
- },
+ 'environment': {'sdk': '^3.5.0'},
'dev_dependencies': {'test': '^2.0.0'},
'workspace': ['helper/'],
},
),
d.dir('helper', [
- d.libPubspec(
- 'helper',
- '2.0.0',
- resolutionWorkspace: true,
- ),
+ d.libPubspec('helper', '2.0.0', resolutionWorkspace: true),
]),
]).create();
- await pubGet(
- environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- );
+ await pubGet(environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'});
final packageGraph = jsonDecode(
File(p.join(d.sandbox, packageGraphFilePath)).readAsStringSync(),
@@ -119,7 +99,7 @@
'name': 'baz',
'version': '2.2.2',
'dependencies': ['bar'],
- }
+ },
],
'configVersion': 1,
});
diff --git a/test/package_list_files_test.dart b/test/package_list_files_test.dart
index 8aa1e99..4df3d2a 100644
--- a/test/package_list_files_test.dart
+++ b/test/package_list_files_test.dart
@@ -136,9 +136,7 @@
isA<DataException>().having(
(e) => e.message,
'message',
- contains(
- 'Pub does not support symlink cycles.',
- ),
+ contains('Pub does not support symlink cycles.'),
),
),
);
@@ -253,39 +251,37 @@
);
});
- test('throws on link to subdirectory of loop back to parent directory',
- () async {
- await d.dir('src', [
- d.dir(appPath, [
- d.pubspec({'name': 'myapp'}),
- d.link(
- 'symlink',
- p.join(d.sandbox, 'source'),
- forceDirectory: true,
- ),
- ]),
- ]).create();
- await d
- .link(
- 'source',
- p.join(d.sandbox, 'src'),
- forceDirectory: true,
- )
- .create();
+ test(
+ 'throws on link to subdirectory of loop back to parent directory',
+ () async {
+ await d.dir('src', [
+ d.dir(appPath, [
+ d.pubspec({'name': 'myapp'}),
+ d.link(
+ 'symlink',
+ p.join(d.sandbox, 'source'),
+ forceDirectory: true,
+ ),
+ ]),
+ ]).create();
+ await d
+ .link('source', p.join(d.sandbox, 'src'), forceDirectory: true)
+ .create();
- createEntrypoint(p.join('source', appPath));
+ createEntrypoint(p.join('source', appPath));
- expect(
- () => entrypoint!.workspaceRoot.listFiles(),
- throwsA(
- isA<DataException>().having(
- (e) => e.message,
- 'message',
- contains('Pub does not support symlink cycles.'),
+ expect(
+ () => entrypoint!.workspaceRoot.listFiles(),
+ throwsA(
+ isA<DataException>().having(
+ (e) => e.message,
+ 'message',
+ contains('Pub does not support symlink cycles.'),
+ ),
),
- ),
- );
- });
+ );
+ },
+ );
test('Does not throw when publishing via symlink', () async {
await d.dir('src', [
@@ -294,11 +290,7 @@
]),
]).create();
await d
- .link(
- 'source',
- p.join(d.sandbox, 'src'),
- forceDirectory: true,
- )
+ .link('source', p.join(d.sandbox, 'src'), forceDirectory: true)
.create();
createEntrypoint(p.join('source', appPath));
@@ -403,9 +395,7 @@
isA<DataException>().having(
(e) => e.message,
'message',
- contains(
- 'Could not resolve symbolic link',
- ),
+ contains('Could not resolve symbolic link'),
),
),
);
@@ -430,9 +420,7 @@
isA<DataException>().having(
(e) => e.message,
'message',
- contains(
- 'Could not resolve symbolic link',
- ),
+ contains('Could not resolve symbolic link'),
),
),
);
@@ -447,10 +435,12 @@
d.dir('a', [d.file('file')]),
]),
]).create();
- Link(p.join(d.sandbox, appPath, 'subdir', 'symlink1'))
- .createSync('symlink2');
- Link(p.join(d.sandbox, appPath, 'subdir', 'symlink2'))
- .createSync('symlink1');
+ Link(
+ p.join(d.sandbox, appPath, 'subdir', 'symlink1'),
+ ).createSync('symlink2');
+ Link(
+ p.join(d.sandbox, appPath, 'subdir', 'symlink2'),
+ ).createSync('symlink1');
createEntrypoint();
expect(
@@ -522,8 +512,7 @@
});
});
- test(
- "ignores files that are gitignored even if the package isn't "
+ test("ignores files that are gitignored even if the package isn't "
'the repo root', () async {
await d.dir(appPath, [
d.file('.gitignore', '*.bak'),
@@ -591,17 +580,14 @@
d.dir('subdir', [d.file('pubspec.lock')]),
]).create();
- expect(
- entrypoint!.workspaceRoot.listFiles(),
- {p.join(root, 'pubspec.yaml')},
- );
+ expect(entrypoint!.workspaceRoot.listFiles(), {
+ p.join(root, 'pubspec.yaml'),
+ });
});
test('allows pubspec.lock directories', () async {
await d.dir(appPath, [
- d.dir('pubspec.lock', [
- d.file('file.txt', 'contents'),
- ]),
+ d.dir('pubspec.lock', [d.file('file.txt', 'contents')]),
]).create();
expect(entrypoint!.workspaceRoot.listFiles(), {
@@ -719,9 +705,7 @@
d.dir('nested', [
d.file('.gitignore', '/bin/'),
d.appPubspec(),
- d.dir('bin', [
- d.file('run.dart'),
- ]),
+ d.dir('bin', [d.file('run.dart')]),
]),
]),
]);
@@ -780,9 +764,7 @@
d.dir('bin', [
d.file('.gitignore', '/run.dart'),
d.file('run.dart'),
- d.dir('nested_again', [
- d.file('run.dart'),
- ]),
+ d.dir('nested_again', [d.file('run.dart')]),
]),
]),
]),
@@ -805,9 +787,7 @@
d.dir('bin', [
d.file('.gitignore', '!/run.dart'),
d.file('run.dart'),
- d.dir('nested_again', [
- d.file('run.dart'),
- ]),
+ d.dir('nested_again', [d.file('run.dart')]),
]),
]),
]),
@@ -832,9 +812,7 @@
d.appPubspec(),
d.dir('bin', [
d.file('run.dart'),
- d.dir('nested_again', [
- d.file('run.dart'),
- ]),
+ d.dir('nested_again', [d.file('run.dart')]),
]),
]),
]),
@@ -856,9 +834,7 @@
d.appPubspec(),
d.dir('bin', [
d.file('run.dart'),
- d.dir('nested_again', [
- d.file('run.dart'),
- ]),
+ d.dir('nested_again', [d.file('run.dart')]),
]),
]),
]),
diff --git a/test/package_server.dart b/test/package_server.dart
index 7967aa4..6c2e775 100644
--- a/test/package_server.dart
+++ b/test/package_server.dart
@@ -70,153 +70,138 @@
}
static final _versionInfoPattern = RegExp(r'/api/packages/([a-zA-Z_0-9]*)');
- static final _advisoriesPattern =
- RegExp(r'/api/packages/([a-zA-Z_0-9]*)/advisories');
+ static final _advisoriesPattern = RegExp(
+ r'/api/packages/([a-zA-Z_0-9]*)/advisories',
+ );
- static final _downloadPattern =
- RegExp(r'/packages/([^/]*)/versions/([^/]*).tar.gz');
+ static final _downloadPattern = RegExp(
+ r'/packages/([^/]*)/versions/([^/]*).tar.gz',
+ );
static Future<PackageServer> start() async {
final server = PackageServer._(
await shelf_io.IOServer.bind(InternetAddress.loopbackIPv4, 0),
);
- server.handle(
- _versionInfoPattern,
- (shelf.Request request) async {
- final parts = request.url.pathSegments;
- assert(parts[0] == 'api');
- assert(parts[1] == 'packages');
- final name = parts[2];
+ server.handle(_versionInfoPattern, (shelf.Request request) async {
+ final parts = request.url.pathSegments;
+ assert(parts[0] == 'api');
+ assert(parts[1] == 'packages');
+ final name = parts[2];
- final package = server._packages[name];
- if (package == null) {
- return shelf.Response.notFound('No package named $name');
- }
+ final package = server._packages[name];
+ if (package == null) {
+ return shelf.Response.notFound('No package named $name');
+ }
- return shelf.Response.ok(
- jsonEncode({
- 'name': name,
- 'uploaders': ['nweiz@google.com'],
- 'versions': [
- for (final version in package.versions.values)
- {
- 'pubspec': version.pubspec,
- 'version': version.version.toString(),
- 'archive_url':
- '${server.url}/packages/$name/versions/${version.version}.tar.gz',
- if (version.isRetracted) 'retracted': true,
- if (version.sha256 != null || server.serveContentHashes)
- 'archive_sha256': version.sha256 ??
- hexEncode(
- (await sha256.bind(version.contents()).first).bytes,
- ),
- },
- ],
- if (package.isDiscontinued) 'isDiscontinued': true,
- if (package.advisoriesUpdated != null)
- 'advisoriesUpdated': package.advisoriesUpdated!.toIso8601String(),
- if (package.discontinuedReplacementText != null)
- 'replacedBy': package.discontinuedReplacementText,
- }),
- headers: {
- HttpHeaders.contentTypeHeader: 'application/vnd.pub.v2+json',
- },
- );
- },
- );
+ return shelf.Response.ok(
+ jsonEncode({
+ 'name': name,
+ 'uploaders': ['nweiz@google.com'],
+ 'versions': [
+ for (final version in package.versions.values)
+ {
+ 'pubspec': version.pubspec,
+ 'version': version.version.toString(),
+ 'archive_url':
+ '${server.url}/packages/$name/versions/${version.version}.tar.gz',
+ if (version.isRetracted) 'retracted': true,
+ if (version.sha256 != null || server.serveContentHashes)
+ 'archive_sha256':
+ version.sha256 ??
+ hexEncode(
+ (await sha256.bind(version.contents()).first).bytes,
+ ),
+ },
+ ],
+ if (package.isDiscontinued) 'isDiscontinued': true,
+ if (package.advisoriesUpdated != null)
+ 'advisoriesUpdated': package.advisoriesUpdated!.toIso8601String(),
+ if (package.discontinuedReplacementText != null)
+ 'replacedBy': package.discontinuedReplacementText,
+ }),
+ headers: {HttpHeaders.contentTypeHeader: 'application/vnd.pub.v2+json'},
+ );
+ });
- server.handle(
- _advisoriesPattern,
- (shelf.Request request) async {
- final parts = request.url.pathSegments;
- assert(parts[0] == 'api');
- assert(parts[1] == 'packages');
- final name = parts[2];
- assert(parts[3] == 'advisories');
+ server.handle(_advisoriesPattern, (shelf.Request request) async {
+ final parts = request.url.pathSegments;
+ assert(parts[0] == 'api');
+ assert(parts[1] == 'packages');
+ final name = parts[2];
+ assert(parts[3] == 'advisories');
- final package = server._packages[name];
- if (package == null) {
- return shelf.Response.notFound('No package named $name');
- }
+ final package = server._packages[name];
+ if (package == null) {
+ return shelf.Response.notFound('No package named $name');
+ }
- return shelf.Response.ok(
- jsonEncode({
- 'advisoriesUpdated': defaultAdvisoriesUpdated.toIso8601String(),
- 'advisories': [
- for (final advisory in package.advisories.values)
- {
- 'id': advisory.id,
- 'summary': 'Example',
- 'aliases': [...advisory.aliases],
- 'details': 'This is a dummy example.',
- 'modified': defaultAdvisoriesUpdated.toIso8601String(),
- 'published': defaultAdvisoriesUpdated.toIso8601String(),
- 'affected': [
- for (final package in advisory.affectedPackages)
- {
- 'package': {
- 'name': package.name,
- 'ecosystem': package.ecosystem,
- },
- 'versions': [...package.versions],
+ return shelf.Response.ok(
+ jsonEncode({
+ 'advisoriesUpdated': defaultAdvisoriesUpdated.toIso8601String(),
+ 'advisories': [
+ for (final advisory in package.advisories.values)
+ {
+ 'id': advisory.id,
+ 'summary': 'Example',
+ 'aliases': [...advisory.aliases],
+ 'details': 'This is a dummy example.',
+ 'modified': defaultAdvisoriesUpdated.toIso8601String(),
+ 'published': defaultAdvisoriesUpdated.toIso8601String(),
+ 'affected': [
+ for (final package in advisory.affectedPackages)
+ {
+ 'package': {
+ 'name': package.name,
+ 'ecosystem': package.ecosystem,
},
- ],
- if (advisory.displayUrl != null)
- 'database_specific': {
- 'pub_display_url': advisory.displayUrl,
+ 'versions': [...package.versions],
},
- },
- ],
- }),
- headers: {
- HttpHeaders.contentTypeHeader: 'application/vnd.pub.v2+json',
- },
- );
- },
- );
+ ],
+ if (advisory.displayUrl != null)
+ 'database_specific': {'pub_display_url': advisory.displayUrl},
+ },
+ ],
+ }),
+ headers: {HttpHeaders.contentTypeHeader: 'application/vnd.pub.v2+json'},
+ );
+ });
- server.handle(
- _downloadPattern,
- (shelf.Request request) async {
- final parts = request.url.pathSegments;
- assert(parts[0] == 'packages');
- final name = parts[1];
- assert(parts[2] == 'versions');
- final package = server._packages[name];
- if (package == null) {
- return shelf.Response.notFound('No package $name');
- }
+ server.handle(_downloadPattern, (shelf.Request request) async {
+ final parts = request.url.pathSegments;
+ assert(parts[0] == 'packages');
+ final name = parts[1];
+ assert(parts[2] == 'versions');
+ final package = server._packages[name];
+ if (package == null) {
+ return shelf.Response.notFound('No package $name');
+ }
- final version = Version.parse(
- parts[3].substring(0, parts[3].length - '.tar.gz'.length),
- );
- assert(parts[3].endsWith('.tar.gz'));
+ final version = Version.parse(
+ parts[3].substring(0, parts[3].length - '.tar.gz'.length),
+ );
+ assert(parts[3].endsWith('.tar.gz'));
- for (final packageVersion in package.versions.values) {
- if (packageVersion.version == version) {
- final headers = packageVersion.headers ?? {};
- headers[HttpHeaders.contentTypeHeader] ??= [
- 'application/octet-stream',
- ];
+ for (final packageVersion in package.versions.values) {
+ if (packageVersion.version == version) {
+ final headers = packageVersion.headers ?? {};
+ headers[HttpHeaders.contentTypeHeader] ??= [
+ 'application/octet-stream',
+ ];
- // This gate enables tests to validate the CRC32C parser by
- // passing in arbitrary values for the checksum header.
- if (server.serveChecksums &&
- !headers.containsKey(checksumHeaderName)) {
- headers[checksumHeaderName] = composeChecksumHeader(
- crc32c: await packageVersion.computeArchiveCrc32c(),
- );
- }
-
- return shelf.Response.ok(
- packageVersion.contents(),
- headers: headers,
+ // This gate enables tests to validate the CRC32C parser by
+ // passing in arbitrary values for the checksum header.
+ if (server.serveChecksums &&
+ !headers.containsKey(checksumHeaderName)) {
+ headers[checksumHeaderName] = composeChecksumHeader(
+ crc32c: await packageVersion.computeArchiveCrc32c(),
);
}
+
+ return shelf.Response.ok(packageVersion.contents(), headers: headers);
}
- return shelf.Response.notFound('No version $version of $name');
- },
- );
+ }
+ return shelf.Response.notFound('No version $version of $name');
+ });
return server;
}
@@ -231,25 +216,20 @@
String get url => _inner.url.toString();
/// From now on report errors on any request.
- void serveErrors() => _handlers
- ..clear()
- ..add(
- _PatternAndHandler(
- RegExp('.*'),
- (request) {
- fail('The HTTP server received an unexpected request:\n'
- '${request.method} ${request.requestedUri}');
- },
- ),
- );
+ void serveErrors() =>
+ _handlers
+ ..clear()
+ ..add(
+ _PatternAndHandler(RegExp('.*'), (request) {
+ fail(
+ 'The HTTP server received an unexpected request:\n'
+ '${request.method} ${request.requestedUri}',
+ );
+ }),
+ );
void handle(Pattern pattern, shelf.Handler handler) {
- _handlers.add(
- _PatternAndHandler(
- pattern,
- handler,
- ),
- );
+ _handlers.add(_PatternAndHandler(pattern, handler));
}
// Installs a handler at [pattern] that expects to be called exactly once with
@@ -260,12 +240,10 @@
void expect(String method, Pattern pattern, shelf.Handler handler) {
handle(
pattern,
- expectAsync1(
- (request) {
- test.expect(request.method, method);
- return handler(request);
- },
- ),
+ expectAsync1((request) {
+ test.expect(request.method, method);
+ return handler(request);
+ }),
);
}
@@ -330,8 +308,9 @@
..discontinuedReplacementText = replacementText;
}
- static final defaultAdvisoriesUpdated =
- DateTime.fromMicrosecondsSinceEpoch(0);
+ static final defaultAdvisoriesUpdated = DateTime.fromMicrosecondsSinceEpoch(
+ 0,
+ );
/// Add a security advisory which affects versions in [affectedPackages].
void addAdvisory({
@@ -345,14 +324,10 @@
_packages[package.name]!.advisoriesUpdated =
advisoriesUpdated ?? defaultAdvisoriesUpdated;
_packages[package.name]!.advisories.putIfAbsent(
- advisoryId,
- () => _ServedAdvisory(
- advisoryId,
- affectedPackages,
- aliases,
- displayUrl,
- ),
- );
+ advisoryId,
+ () =>
+ _ServedAdvisory(advisoryId, affectedPackages, aliases, displayUrl),
+ );
}
}
@@ -383,8 +358,9 @@
// Otherwise, compute from package contents.
if (serveChecksums) {
- checksumHeader ??=
- composeChecksumHeader(crc32c: await v.computeArchiveCrc32c());
+ checksumHeader ??= composeChecksumHeader(
+ crc32c: await v.computeArchiveCrc32c(),
+ );
}
return checksumHeader?.join(',');
diff --git a/test/pinned_dependency_hint_test.dart b/test/pinned_dependency_hint_test.dart
index d076c1f..de883b0 100644
--- a/test/pinned_dependency_hint_test.dart
+++ b/test/pinned_dependency_hint_test.dart
@@ -9,68 +9,70 @@
import 'test_pub.dart';
void main() {
- test('Gives hint when solve failure concerns a pinned flutter package',
- () async {
- await d.dir('flutter', [
- d.dir('packages', [
- d.dir(
- 'flutter_foo',
- [
+ test(
+ 'Gives hint when solve failure concerns a pinned flutter package',
+ () async {
+ await d.dir('flutter', [
+ d.dir('packages', [
+ d.dir('flutter_foo', [
d.libPubspec('flutter_foo', '0.0.1', deps: {'tool': '1.0.0'}),
- ],
+ ]),
+ ]),
+ d.flutterVersion('1.2.3'),
+ ]).create();
+ await servePackages()
+ ..serve('bar', '1.0.0', deps: {'tool': '^2.0.0'})
+ ..serve('tool', '1.0.0')
+ ..serve('tool', '2.0.0');
+
+ await d
+ .appDir(
+ dependencies: {
+ 'bar': 'any',
+ 'flutter_foo': {'sdk': 'flutter'},
+ },
+ )
+ .create();
+ await pubGet(
+ environment: {'FLUTTER_ROOT': p.join(d.sandbox, 'flutter')},
+ error: contains(
+ 'Note: tool is pinned to version 1.0.0 by '
+ 'flutter_foo from the flutter SDK.',
),
- ]),
- d.flutterVersion('1.2.3'),
- ]).create();
- await servePackages()
- ..serve('bar', '1.0.0', deps: {'tool': '^2.0.0'})
- ..serve('tool', '1.0.0')
- ..serve('tool', '2.0.0');
+ );
+ },
+ );
- await d.appDir(
- dependencies: {
- 'bar': 'any',
- 'flutter_foo': {'sdk': 'flutter'},
- },
- ).create();
- await pubGet(
- environment: {'FLUTTER_ROOT': p.join(d.sandbox, 'flutter')},
- error: contains(
- 'Note: tool is pinned to version 1.0.0 by '
- 'flutter_foo from the flutter SDK.',
- ),
- );
- });
-
- test('Gives hint when solve failure concerns a pinned flutter package',
- () async {
- await d.dir('flutter', [
- d.dir('packages', [
- d.dir(
- 'flutter_foo',
- [
+ test(
+ 'Gives hint when solve failure concerns a pinned flutter package',
+ () async {
+ await d.dir('flutter', [
+ d.dir('packages', [
+ d.dir('flutter_foo', [
d.libPubspec('flutter_foo', '0.0.1', deps: {'tool': '1.0.0'}),
- ],
- ),
- ]),
- d.flutterVersion('1.2.3'),
- ]).create();
- await servePackages()
- ..serve('tool', '1.0.0', deps: {'bar': '^2.0.0'})
- ..serve('bar', '1.0.0');
+ ]),
+ ]),
+ d.flutterVersion('1.2.3'),
+ ]).create();
+ await servePackages()
+ ..serve('tool', '1.0.0', deps: {'bar': '^2.0.0'})
+ ..serve('bar', '1.0.0');
- await d.appDir(
- dependencies: {
- 'bar': 'any',
- 'flutter_foo': {'sdk': 'flutter'},
- },
- ).create();
- await pubGet(
- environment: {'FLUTTER_ROOT': p.join(d.sandbox, 'flutter')},
- error: contains(
- 'Note: tool is pinned to version 1.0.0 by '
- 'flutter_foo from the flutter SDK.',
- ),
- );
- });
+ await d
+ .appDir(
+ dependencies: {
+ 'bar': 'any',
+ 'flutter_foo': {'sdk': 'flutter'},
+ },
+ )
+ .create();
+ await pubGet(
+ environment: {'FLUTTER_ROOT': p.join(d.sandbox, 'flutter')},
+ error: contains(
+ 'Note: tool is pinned to version 1.0.0 by '
+ 'flutter_foo from the flutter SDK.',
+ ),
+ );
+ },
+ );
}
diff --git a/test/precompilation_test.dart b/test/precompilation_test.dart
index 7ed02ea..1cf69fa 100644
--- a/test/precompilation_test.dart
+++ b/test/precompilation_test.dart
@@ -18,27 +18,21 @@
}
''');
-FileDescriptor workingMain = file(
- 'main.dart',
- '''
+FileDescriptor workingMain = file('main.dart', '''
import 'foo.dart';
main() async {
foo();
}
-''',
-);
+''');
-FileDescriptor brokenMain = file(
- 'main.dart',
- '''
+FileDescriptor brokenMain = file('main.dart', '''
import 'foo.dart';
yadda yadda
main() asyncc {
foo();
}
-''',
-);
+''');
Future<Duration> timeCompilation(
String executable, {
@@ -65,47 +59,41 @@
}
void main() {
- test('Precompilation is much faster second time and removes old artifacts',
- () async {
- await dir('app', [
- workingMain,
- foo,
- packageConfigFile([]),
- ]).create();
- final first = await timeCompilation(path('app/main.dart'));
- check(
- because: 'Should not leave a stray directory.',
- File(incrementalDillPath()).existsSync(),
- ).isFalse();
- check(File(outputPath()).existsSync()).isTrue();
+ test(
+ 'Precompilation is much faster second time and removes old artifacts',
+ () async {
+ await dir('app', [workingMain, foo, packageConfigFile([])]).create();
+ final first = await timeCompilation(path('app/main.dart'));
+ check(
+ because: 'Should not leave a stray directory.',
+ File(incrementalDillPath()).existsSync(),
+ ).isFalse();
+ check(File(outputPath()).existsSync()).isTrue();
- // Do a second compilation to compare the compile times, it should be much
- // faster because it can reuse the compiled data in the dill file.
- final second = await timeCompilation(path('app/main.dart'));
- check(first).isGreaterThan(second * 2);
+ // Do a second compilation to compare the compile times, it should be much
+ // faster because it can reuse the compiled data in the dill file.
+ final second = await timeCompilation(path('app/main.dart'));
+ check(first).isGreaterThan(second * 2);
- // Now create an error to test that the output is placed at a different
- // location.
- await dir('app', [
- brokenMain,
- foo,
- packageConfigFile([]),
- ]).create();
- final afterErrors =
- await timeCompilation(path('app/main.dart'), fails: true);
- check(File(incrementalDillPath()).existsSync()).isTrue();
- check(File(outputPath()).existsSync()).isFalse();
- check(first).isGreaterThan(afterErrors * 2);
+ // Now create an error to test that the output is placed at a different
+ // location.
+ await dir('app', [brokenMain, foo, packageConfigFile([])]).create();
+ final afterErrors = await timeCompilation(
+ path('app/main.dart'),
+ fails: true,
+ );
+ check(File(incrementalDillPath()).existsSync()).isTrue();
+ check(File(outputPath()).existsSync()).isFalse();
+ check(first).isGreaterThan(afterErrors * 2);
- // Fix the error, and check that we still use the cached output to improve
- // compile times.
- await dir('app', [
- workingMain,
- ]).create();
- final afterFix = await timeCompilation(path('app/main.dart'));
- // The output from the failed compilation should now be gone.
- check(File('${outputPath()}.incremental').existsSync()).isFalse();
- check(File(outputPath()).existsSync()).isTrue();
- check(first).isGreaterThan(afterFix * 2);
- });
+ // Fix the error, and check that we still use the cached output to improve
+ // compile times.
+ await dir('app', [workingMain]).create();
+ final afterFix = await timeCompilation(path('app/main.dart'));
+ // The output from the failed compilation should now be gone.
+ check(File('${outputPath()}.incremental').existsSync()).isFalse();
+ check(File(outputPath()).existsSync()).isTrue();
+ check(first).isGreaterThan(afterFix * 2);
+ },
+ );
}
diff --git a/test/pub_get_and_upgrade_test.dart b/test/pub_get_and_upgrade_test.dart
index 08e9c5c..ae8f1ee 100644
--- a/test/pub_get_and_upgrade_test.dart
+++ b/test/pub_get_and_upgrade_test.dart
@@ -51,9 +51,9 @@
await pubCommand(command);
await d.dir('myapp', [
- d.packageConfigFile(
- [d.packageConfigEntry(name: 'myapp_name', path: '.')],
- ),
+ d.packageConfigFile([
+ d.packageConfigEntry(name: 'myapp_name', path: '.'),
+ ]),
]).validate();
});
diff --git a/test/pubspec_overrides_test.dart b/test/pubspec_overrides_test.dart
index 291b9c8..aa34e44 100644
--- a/test/pubspec_overrides_test.dart
+++ b/test/pubspec_overrides_test.dart
@@ -26,9 +26,7 @@
final overridesPath = p.join('.', 'pubspec_overrides.yaml');
await pubCommand(
command,
- output: contains(
- '! lib 2.0.0 (overridden in $overridesPath)',
- ),
+ output: contains('! lib 2.0.0 (overridden in $overridesPath)'),
);
await d.dir(appPath, [
@@ -78,8 +76,7 @@
),
);
});
- test(
- "An empty pubspec_overrides.yaml doesn't shadow overrides "
+ test("An empty pubspec_overrides.yaml doesn't shadow overrides "
'from pubspec.yaml', () async {
await servePackages()
..serve('lib', '1.0.0')
@@ -87,9 +84,7 @@
await d.dir(appPath, [
d.appPubspec(
- dependencies: {
- 'lib': '1.0.0',
- },
+ dependencies: {'lib': '1.0.0'},
extras: {
'dependency_overrides': {'lib': '2.0.0'},
},
@@ -99,8 +94,6 @@
d.pubspecOverrides({}),
]).create();
- await pubGet(
- output: contains('! lib 2.0.0 (overridden)'),
- );
+ await pubGet(output: contains('! lib 2.0.0 (overridden)'));
});
}
diff --git a/test/pubspec_test.dart b/test/pubspec_test.dart
index f0f5a0f..60db310 100644
--- a/test/pubspec_test.dart
+++ b/test/pubspec_test.dart
@@ -17,8 +17,9 @@
group('parse()', () {
final sources = SystemCache().sources;
- final throwsPubspecException =
- throwsA(const TypeMatcher<SourceSpanApplicationException>());
+ final throwsPubspecException = throwsA(
+ const TypeMatcher<SourceSpanApplicationException>(),
+ );
void expectPubspecException(
String contents,
@@ -60,8 +61,7 @@
);
});
- test(
- "eagerly throws an error if the pubspec name doesn't match the "
+ test("eagerly throws an error if the pubspec name doesn't match the "
'expected name', () {
expect(
() => Pubspec.parse(
@@ -74,8 +74,7 @@
);
});
- test(
- "eagerly throws an error if the pubspec doesn't have a name and an "
+ test("eagerly throws an error if the pubspec doesn't have a name and an "
'expected name is passed', () {
expect(
() => Pubspec.parse(
@@ -239,77 +238,59 @@
});
test('throws if it depends on itself', () {
- expectPubspecException(
- '''
+ expectPubspecException('''
name: myapp
dependencies:
myapp:
fake: ok
-''',
- (pubspec) => pubspec.dependencies,
- );
+''', (pubspec) => pubspec.dependencies);
});
test('throws if it has a dev dependency on itself', () {
- expectPubspecException(
- '''
+ expectPubspecException('''
name: myapp
dev_dependencies:
myapp:
fake: ok
-''',
- (pubspec) => pubspec.devDependencies,
- );
+''', (pubspec) => pubspec.devDependencies);
});
test('throws if it has an override on itself', () {
- expectPubspecException(
- '''
+ expectPubspecException('''
name: myapp
dependency_overrides:
myapp:
fake: ok
-''',
- (pubspec) => pubspec.dependencyOverrides,
- );
+''', (pubspec) => pubspec.dependencyOverrides);
});
test("throws if the description isn't valid", () {
- expectPubspecException(
- '''
+ expectPubspecException('''
name: myapp
dependencies:
foo:
hosted:
name: foo
url: '::'
-''',
- (pubspec) => pubspec.dependencies,
- );
+''', (pubspec) => pubspec.dependencies);
});
test('throws if dependency version is not a string', () {
- expectPubspecException(
- '''
+ expectPubspecException('''
dependencies:
foo:
fake: ok
version: 1.2
-''',
- (pubspec) => pubspec.dependencies,
- );
+''', (pubspec) => pubspec.dependencies);
});
test('throws if version is not a version constraint', () {
- expectPubspecException(
- '''
+ expectPubspecException('''
dependencies:
foo:
fake: ok
version: not constraint
-''',
- (pubspec) => pubspec.dependencies,
- );
+''', (pubspec) => pubspec.dependencies);
});
test("throws if 'name' is not a string", () {
@@ -392,7 +373,8 @@
workspace: ['a', 'b', 'c']
''',
(p) => p.workspace,
- expectedContains: '`workspace` and `resolution` '
+ expectedContains:
+ '`workspace` and `resolution` '
'requires at least language version 3.5',
hintContains: '''
Consider updating the SDK constraint to:
@@ -425,7 +407,8 @@
resolution: workspace
''',
(p) => p.resolution,
- expectedContains: '`workspace` and `resolution` '
+ expectedContains:
+ '`workspace` and `resolution` '
'requires at least language version 3.5',
hintContains: '''
Consider updating the SDK constraint to:
@@ -437,35 +420,26 @@
});
test('throws if workspace is not a list', () {
- expectPubspecException(
- '''
+ expectPubspecException('''
environment:
sdk: ^3.5.0
workspace: 'a string'
-''',
- (pubspec) => pubspec.workspace,
- );
+''', (pubspec) => pubspec.workspace);
});
test('throws if workspace is a list of not-strings', () {
- expectPubspecException(
- '''
+ expectPubspecException('''
environment:
sdk: ^3.5.0
workspace: ['a string', 24]
-''',
- (pubspec) => pubspec.workspace,
- );
+''', (pubspec) => pubspec.workspace);
});
test('throws if resolution is not a reasonable string', () {
- expectPubspecException(
- '''
+ expectPubspecException('''
environment:
sdk: ^3.5.0
-resolution: "sometimes"''',
- (pubspec) => pubspec.resolution,
- );
+resolution: "sometimes"''', (pubspec) => pubspec.resolution);
});
test('allows comment-only files', () {
@@ -519,14 +493,12 @@
expect(foo.name, equals('foo'));
expect(foo.source.name, 'hosted');
expect(
- ResolvedHostedDescription(
- foo.description as HostedDescription,
- sha256: null,
- ).serializeForLockfile(containingDir: null),
- {
- 'url': 'https://example.org/pub/',
- 'name': 'bar',
- });
+ ResolvedHostedDescription(
+ foo.description as HostedDescription,
+ sha256: null,
+ ).serializeForLockfile(containingDir: null),
+ {'url': 'https://example.org/pub/', 'name': 'bar'},
+ );
});
test('with url only', () {
@@ -548,14 +520,12 @@
expect(foo.name, equals('foo'));
expect(foo.source.name, 'hosted');
expect(
- ResolvedHostedDescription(
- foo.description as HostedDescription,
- sha256: null,
- ).serializeForLockfile(containingDir: null),
- {
- 'url': 'https://example.org/pub/',
- 'name': 'foo',
- });
+ ResolvedHostedDescription(
+ foo.description as HostedDescription,
+ sha256: null,
+ ).serializeForLockfile(containingDir: null),
+ {'url': 'https://example.org/pub/', 'name': 'foo'},
+ );
});
test('with url as string', () {
@@ -576,14 +546,12 @@
expect(foo.name, equals('foo'));
expect(foo.source.name, 'hosted');
expect(
- ResolvedHostedDescription(
- foo.description as HostedDescription,
- sha256: null,
- ).serializeForLockfile(containingDir: null),
- {
- 'url': 'https://example.org/pub/',
- 'name': 'foo',
- });
+ ResolvedHostedDescription(
+ foo.description as HostedDescription,
+ sha256: null,
+ ).serializeForLockfile(containingDir: null),
+ {'url': 'https://example.org/pub/', 'name': 'foo'},
+ );
});
test('interprets string description as name for older versions', () {
@@ -604,14 +572,12 @@
expect(foo.name, equals('foo'));
expect(foo.source.name, 'hosted');
expect(
- ResolvedHostedDescription(
- foo.description as HostedDescription,
- sha256: null,
- ).serializeForLockfile(containingDir: null),
- {
- 'url': 'https://pub.dev',
- 'name': 'bar',
- });
+ ResolvedHostedDescription(
+ foo.description as HostedDescription,
+ sha256: null,
+ ).serializeForLockfile(containingDir: null),
+ {'url': 'https://pub.dev', 'name': 'bar'},
+ );
});
test(
@@ -633,8 +599,11 @@
expect(
() => pubspec.dependencies,
throwsA(
- isA<SourceSpanApplicationException>()
- .having((e) => e.span!.text, 'span.text', 'invalid value'),
+ isA<SourceSpanApplicationException>().having(
+ (e) => e.span!.text,
+ 'span.text',
+ 'invalid value',
+ ),
),
);
},
@@ -655,14 +624,12 @@
expect(foo.name, equals('foo'));
expect(foo.source.name, 'hosted');
expect(
- ResolvedHostedDescription(
- foo.description as HostedDescription,
- sha256: null,
- ).serializeForLockfile(containingDir: null),
- {
- 'url': 'https://pub.dev',
- 'name': 'foo',
- });
+ ResolvedHostedDescription(
+ foo.description as HostedDescription,
+ sha256: null,
+ ).serializeForLockfile(containingDir: null),
+ {'url': 'https://pub.dev', 'name': 'foo'},
+ );
});
group('throws without a min SDK constraint', () {
@@ -676,7 +643,8 @@
url: https://example.org/pub/
''',
(pubspec) => pubspec.dependencies,
- expectedContains: "The 'name' key must have a "
+ expectedContains:
+ "The 'name' key must have a "
'string value without a minimum Dart '
'SDK constraint of 2.15.',
);
@@ -693,7 +661,8 @@
hosted: http://pub.example.org
''',
(pubspec) => pubspec.dependencies,
- expectedContains: 'Using `hosted: <url>` is only supported '
+ expectedContains:
+ 'Using `hosted: <url>` is only supported '
'with a minimum SDK constraint of 2.15.',
);
},
@@ -703,53 +672,41 @@
group('git dependencies', () {
test('path must be a string', () {
- expectPubspecException(
- '''
+ expectPubspecException('''
dependencies:
foo:
git:
url: git://github.com/dart-lang/foo
path: 12
-''',
- (pubspec) => pubspec.dependencies,
- );
+''', (pubspec) => pubspec.dependencies);
});
test('path must be relative', () {
- expectPubspecException(
- '''
+ expectPubspecException('''
dependencies:
foo:
git:
url: git://github.com/dart-lang/foo
path: git://github.com/dart-lang/foo/bar
-''',
- (pubspec) => pubspec.dependencies,
- );
+''', (pubspec) => pubspec.dependencies);
- expectPubspecException(
- '''
+ expectPubspecException('''
dependencies:
foo:
git:
url: git://github.com/dart-lang/foo
path: /foo
-''',
- (pubspec) => pubspec.dependencies,
- );
+''', (pubspec) => pubspec.dependencies);
});
test('path must be within the repository', () {
- expectPubspecException(
- '''
+ expectPubspecException('''
dependencies:
foo:
git:
url: git://github.com/dart-lang/foo
path: foo/../../bar
-''',
- (pubspec) => pubspec.dependencies,
- );
+''', (pubspec) => pubspec.dependencies);
});
});
@@ -801,8 +758,7 @@
expect(pubspec.sdkConstraints, isNot(contains('fuchsia')));
});
- test(
- 'default upper constraint for the SDK applies only if compatible '
+ test('default upper constraint for the SDK applies only if compatible '
'with the lower bound', () {
final pubspec = Pubspec.parse(
'''
@@ -1060,13 +1016,10 @@
);
}
- expect(
- () {
- final pubspec = parsePubspecOverrides(contents);
- fn(pubspec);
- },
- throwsA(expectation),
- );
+ expect(() {
+ final pubspec = parsePubspecOverrides(contents);
+ fn(pubspec);
+ }, throwsA(expectation));
}
test('allows empty overrides file', () {
@@ -1116,25 +1069,18 @@
});
test('throws if overrides contain invalid dependency section', () {
- expectPubspecOverridesException(
- '''
+ expectPubspecOverridesException('''
dependency_overrides: false
-''',
- (pubspecOverrides) => pubspecOverrides.dependencyOverrides,
- );
+''', (pubspecOverrides) => pubspecOverrides.dependencyOverrides);
});
test('throws if overrides contain an unknown field', () {
- expectPubspecOverridesException(
- '''
+ expectPubspecOverridesException('''
name: 'foo'
-''',
- (pubspecOverrides) => pubspecOverrides.dependencyOverrides,
- );
+''', (pubspecOverrides) => pubspecOverrides.dependencyOverrides);
});
});
- test(
- 'Throws after language 3.7 '
+ test('Throws after language 3.7 '
'if using unknown keys in dependency description', () {
expectPubspecException(
'''
diff --git a/test/pubspec_utils_test.dart b/test/pubspec_utils_test.dart
index 6622c1c..e38ead6 100644
--- a/test/pubspec_utils_test.dart
+++ b/test/pubspec_utils_test.dart
@@ -59,12 +59,14 @@
});
test(
- 'returns the empty version constraint when an empty version constraint '
- 'is provided', () {
- final constraint = VersionConstraint.empty;
+ 'returns the empty version constraint when an empty version constraint '
+ 'is provided',
+ () {
+ final constraint = VersionConstraint.empty;
- expect(stripUpperBound(constraint), VersionConstraint.empty);
- });
+ expect(stripUpperBound(constraint), VersionConstraint.empty);
+ },
+ );
test('returns the empty version constraint on empty version union', () {
final constraint = VersionUnion.fromRanges([]);
diff --git a/test/rate_limited_scheduler_test.dart b/test/rate_limited_scheduler_test.dart
index 082eaa1..d2a88c8 100644
--- a/test/rate_limited_scheduler_test.dart
+++ b/test/rate_limited_scheduler_test.dart
@@ -8,8 +8,11 @@
import 'package:test/test.dart';
void main() {
- Map<String, Completer> threeCompleters() =>
- {'a': Completer(), 'b': Completer(), 'c': Completer()};
+ Map<String, Completer> threeCompleters() => {
+ 'a': Completer(),
+ 'b': Completer(),
+ 'c': Completer(),
+ };
test('scheduler is rate limited', () async {
final completers = threeCompleters();
@@ -26,9 +29,10 @@
preschedule('a');
preschedule('b');
preschedule('c');
- await Future.wait(
- [isBeingProcessed['a']!.future, isBeingProcessed['b']!.future],
- );
+ await Future.wait([
+ isBeingProcessed['a']!.future,
+ isBeingProcessed['b']!.future,
+ ]);
expect(isBeingProcessed['c']!.isCompleted, isFalse);
completers['a']!.complete();
await isBeingProcessed['c']!.future;
@@ -37,68 +41,72 @@
});
});
- test('scheduler.preschedule cancels unrun prescheduled task after callback',
- () async {
- final completers = threeCompleters();
- final isBeingProcessed = threeCompleters();
+ test(
+ 'scheduler.preschedule cancels unrun prescheduled task after callback',
+ () async {
+ final completers = threeCompleters();
+ final isBeingProcessed = threeCompleters();
- Future<String> f(String i) async {
- isBeingProcessed[i]!.complete();
- await completers[i]!.future;
- return i.toUpperCase();
- }
+ Future<String> f(String i) async {
+ isBeingProcessed[i]!.complete();
+ await completers[i]!.future;
+ return i.toUpperCase();
+ }
- final scheduler = RateLimitedScheduler(f, maxConcurrentOperations: 1);
+ final scheduler = RateLimitedScheduler(f, maxConcurrentOperations: 1);
- await scheduler.withPrescheduling((preschedule1) async {
- await scheduler.withPrescheduling((preschedule2) async {
- preschedule1('a');
- preschedule2('b');
- preschedule1('c');
- await isBeingProcessed['a']!.future;
- // b, c should not start processing due to rate-limiting.
+ await scheduler.withPrescheduling((preschedule1) async {
+ await scheduler.withPrescheduling((preschedule2) async {
+ preschedule1('a');
+ preschedule2('b');
+ preschedule1('c');
+ await isBeingProcessed['a']!.future;
+ // b, c should not start processing due to rate-limiting.
+ expect(isBeingProcessed['b']!.isCompleted, isFalse);
+ expect(isBeingProcessed['c']!.isCompleted, isFalse);
+ });
+ completers['a']!.complete();
+ // b is removed from the queue, now c should start processing.
+ await isBeingProcessed['c']!.future;
+ completers['c']!.complete();
+ expect(await scheduler.schedule('c'), 'C');
+ // b is not on the queue anymore.
expect(isBeingProcessed['b']!.isCompleted, isFalse);
- expect(isBeingProcessed['c']!.isCompleted, isFalse);
+ });
+ },
+ );
+
+ test(
+ 'scheduler.preschedule does not cancel tasks that are scheduled',
+ () async {
+ final completers = threeCompleters();
+ final isBeingProcessed = threeCompleters();
+
+ Future<String> f(String i) async {
+ isBeingProcessed[i]!.complete();
+ await completers[i]!.future;
+ return i.toUpperCase();
+ }
+
+ final scheduler = RateLimitedScheduler(f, maxConcurrentOperations: 1);
+
+ Future? b;
+ await scheduler.withPrescheduling((preschedule) async {
+ preschedule('a');
+ preschedule('b');
+ await isBeingProcessed['a']!.future;
+ // b should not start processing due to rate-limiting.
+ expect(isBeingProcessed['b']!.isCompleted, isFalse);
+ b = scheduler.schedule('b');
});
completers['a']!.complete();
- // b is removed from the queue, now c should start processing.
- await isBeingProcessed['c']!.future;
- completers['c']!.complete();
- expect(await scheduler.schedule('c'), 'C');
- // b is not on the queue anymore.
- expect(isBeingProcessed['b']!.isCompleted, isFalse);
- });
- });
-
- test('scheduler.preschedule does not cancel tasks that are scheduled',
- () async {
- final completers = threeCompleters();
- final isBeingProcessed = threeCompleters();
-
- Future<String> f(String i) async {
- isBeingProcessed[i]!.complete();
- await completers[i]!.future;
- return i.toUpperCase();
- }
-
- final scheduler = RateLimitedScheduler(f, maxConcurrentOperations: 1);
-
- Future? b;
- await scheduler.withPrescheduling((preschedule) async {
- preschedule('a');
- preschedule('b');
- await isBeingProcessed['a']!.future;
- // b should not start processing due to rate-limiting.
- expect(isBeingProcessed['b']!.isCompleted, isFalse);
- b = scheduler.schedule('b');
- });
- completers['a']!.complete();
- expect(await scheduler.schedule('a'), 'A');
- // b was scheduled, so it should get processed now
- await isBeingProcessed['b']!.future;
- completers['b']!.complete();
- expect(await b, 'B');
- });
+ expect(await scheduler.schedule('a'), 'A');
+ // b was scheduled, so it should get processed now
+ await isBeingProcessed['b']!.future;
+ completers['b']!.complete();
+ expect(await b, 'B');
+ },
+ );
test('scheduler caches results', () async {
final completers = threeCompleters();
@@ -186,41 +194,29 @@
final scheduler = RateLimitedScheduler(f, maxConcurrentOperations: 2);
await scheduler.withPrescheduling((preschedule) async {
- runZoned(
- () {
- preschedule('a');
- },
- zoneValues: {'zoneValue': 'A'},
- );
- runZoned(
- () {
- preschedule('b');
- },
- zoneValues: {'zoneValue': 'B'},
- );
- runZoned(
- () {
- preschedule('c');
- },
- zoneValues: {'zoneValue': 'C'},
- );
+ runZoned(() {
+ preschedule('a');
+ }, zoneValues: {'zoneValue': 'A'});
+ runZoned(() {
+ preschedule('b');
+ }, zoneValues: {'zoneValue': 'B'});
+ runZoned(() {
+ preschedule('c');
+ }, zoneValues: {'zoneValue': 'C'});
- await runZoned(
- () async {
- await isBeingProcessed['a']!.future;
- await isBeingProcessed['b']!.future;
- // This will put 'c' in front of the queue, but in a zone with
- // zoneValue bound to S.
- final f = expectLater(scheduler.schedule('c'), completion('S'));
- completers['a']!.complete();
- completers['b']!.complete();
- expect(await scheduler.schedule('a'), 'A');
- expect(await scheduler.schedule('b'), 'B');
- completers['c']!.complete();
- await f;
- },
- zoneValues: {'zoneValue': 'S'},
- );
+ await runZoned(() async {
+ await isBeingProcessed['a']!.future;
+ await isBeingProcessed['b']!.future;
+ // This will put 'c' in front of the queue, but in a zone with
+ // zoneValue bound to S.
+ final f = expectLater(scheduler.schedule('c'), completion('S'));
+ completers['a']!.complete();
+ completers['b']!.complete();
+ expect(await scheduler.schedule('a'), 'A');
+ expect(await scheduler.schedule('b'), 'B');
+ completers['c']!.complete();
+ await f;
+ }, zoneValues: {'zoneValue': 'S'});
});
});
}
diff --git a/test/reformat_ranges_test.dart b/test/reformat_ranges_test.dart
index c967847..6a78ce5 100644
--- a/test/reformat_ranges_test.dart
+++ b/test/reformat_ranges_test.dart
@@ -23,34 +23,18 @@
alwaysIncludeMaxPreRelease: true,
),
),
- equals(
- (
- Version.parse('1.2.4-0'),
- false,
- ),
- ),
+ equals((Version.parse('1.2.4-0'), false)),
);
expect(
reformatMax(
- [
- PackageId(
- 'abc',
- Version.parse('1.2.4-3'),
- description,
- ),
- ],
+ [PackageId('abc', Version.parse('1.2.4-3'), description)],
VersionRange(
min: Version.parse('0.2.4'),
max: Version.parse('1.2.4'),
alwaysIncludeMaxPreRelease: true,
),
),
- equals(
- (
- Version.parse('1.2.4-3'),
- true,
- ),
- ),
+ equals((Version.parse('1.2.4-3'), true)),
);
expect(
reformatMax(
diff --git a/test/remove/remove_test.dart b/test/remove/remove_test.dart
index 7c5fbc6..7066d82 100644
--- a/test/remove/remove_test.dart
+++ b/test/remove/remove_test.dart
@@ -25,15 +25,16 @@
await d.appDir().validate();
});
- test('removing a package from dependencies does not affect dev_dependencies',
- () async {
- await servePackages()
- ..serve('foo', '1.2.3')
- ..serve('foo', '1.2.2')
- ..serve('bar', '2.0.0');
+ test(
+ 'removing a package from dependencies does not affect dev_dependencies',
+ () async {
+ await servePackages()
+ ..serve('foo', '1.2.3')
+ ..serve('foo', '1.2.2')
+ ..serve('bar', '2.0.0');
- await d.dir(appPath, [
- d.file('pubspec.yaml', '''
+ await d.dir(appPath, [
+ d.file('pubspec.yaml', '''
name: myapp
dependencies:
foo: 1.2.3
@@ -44,22 +45,23 @@
environment:
sdk: '$defaultSdkConstraint'
'''),
- ]).create();
+ ]).create();
- await pubRemove(args: ['foo']);
+ await pubRemove(args: ['foo']);
- await d.cacheDir({'bar': '2.0.0'}).validate();
- await d.appPackageConfigFile([
- d.packageConfigEntry(name: 'bar', version: '2.0.0'),
- ]).validate();
+ await d.cacheDir({'bar': '2.0.0'}).validate();
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(name: 'bar', version: '2.0.0'),
+ ]).validate();
- await d.dir(appPath, [
- d.pubspec({
- 'name': 'myapp',
- 'dev_dependencies': {'bar': '2.0.0'},
- }),
- ]).validate();
- });
+ await d.dir(appPath, [
+ d.pubspec({
+ 'name': 'myapp',
+ 'dev_dependencies': {'bar': '2.0.0'},
+ }),
+ ]).validate();
+ },
+ );
test('dry-run does not actually remove dependency', () async {
final server = await servePackages();
@@ -125,38 +127,40 @@
]).validate();
});
- test('removes multiple packages from dependencies and dev_dependencies',
- () async {
- await servePackages()
- ..serve('foo', '1.2.3')
- ..serve('bar', '2.3.4')
- ..serve('baz', '3.2.1')
- ..serve('jfj', '0.2.1');
+ test(
+ 'removes multiple packages from dependencies and dev_dependencies',
+ () async {
+ await servePackages()
+ ..serve('foo', '1.2.3')
+ ..serve('bar', '2.3.4')
+ ..serve('baz', '3.2.1')
+ ..serve('jfj', '0.2.1');
- await d.dir(appPath, [
- d.pubspec({
- 'name': 'myapp',
- 'dependencies': {'bar': '>=2.3.4', 'jfj': '0.2.1'},
- 'dev_dependencies': {'foo': '^1.2.3', 'baz': '3.2.1'},
- }),
- ]).create();
- await pubGet();
+ await d.dir(appPath, [
+ d.pubspec({
+ 'name': 'myapp',
+ 'dependencies': {'bar': '>=2.3.4', 'jfj': '0.2.1'},
+ 'dev_dependencies': {'foo': '^1.2.3', 'baz': '3.2.1'},
+ }),
+ ]).create();
+ await pubGet();
- await pubRemove(args: ['foo', 'bar', 'baz']);
+ await pubRemove(args: ['foo', 'bar', 'baz']);
- await d.cacheDir({'jfj': '0.2.1'}).validate();
+ await d.cacheDir({'jfj': '0.2.1'}).validate();
- await d.appPackageConfigFile([
- d.packageConfigEntry(name: 'jfj', version: '0.2.1'),
- ]).validate();
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(name: 'jfj', version: '0.2.1'),
+ ]).validate();
- await d.dir(appPath, [
- d.pubspec({
- 'name': 'myapp',
- 'dependencies': {'jfj': '0.2.1'},
- }),
- ]).validate();
- });
+ await d.dir(appPath, [
+ d.pubspec({
+ 'name': 'myapp',
+ 'dependencies': {'jfj': '0.2.1'},
+ }),
+ ]).validate();
+ },
+ );
test('removes git dependencies', () async {
final server = await servePackages();
@@ -168,14 +172,16 @@
]);
await repo.create();
- await d.appDir(
- dependencies: {
- 'foo': {
- 'git': {'url': '../foo.git', 'path': 'subdir'},
- },
- 'bar': '1.2.3',
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'git': {'url': '../foo.git', 'path': 'subdir'},
+ },
+ 'bar': '1.2.3',
+ },
+ )
+ .create();
await pubGet();
@@ -190,15 +196,19 @@
test('removes path dependencies', () async {
final server = await servePackages();
server.serve('bar', '1.2.3');
- await d
- .dir('foo', [d.libDir('foo'), d.libPubspec('foo', '0.0.1')]).create();
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '0.0.1'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'path': '../foo'},
- 'bar': '1.2.3',
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'path': '../foo'},
+ 'bar': '1.2.3',
+ },
+ )
+ .create();
await pubGet();
@@ -216,15 +226,20 @@
final custom = await startPackageServer();
custom.serve('foo', '1.2.3');
- await d.appDir(
- dependencies: {
- 'foo': {
- 'version': '1.2.3',
- 'hosted': {'name': 'foo', 'url': 'http://localhost:${custom.port}'},
- },
- 'bar': '2.0.1',
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {
+ 'version': '1.2.3',
+ 'hosted': {
+ 'name': 'foo',
+ 'url': 'http://localhost:${custom.port}',
+ },
+ },
+ 'bar': '2.0.1',
+ },
+ )
+ .create();
await pubGet();
@@ -321,9 +336,7 @@
await d.appPackageConfigFile([]).validate();
await d.dir(appPath, [
- d.pubspec({
- 'name': 'myapp',
- }),
+ d.pubspec({'name': 'myapp'}),
]).validate();
});
}
diff --git a/test/run/errors_if_path_in_dependency_test.dart b/test/run/errors_if_path_in_dependency_test.dart
index 6fdfae0..5a62685 100644
--- a/test/run/errors_if_path_in_dependency_test.dart
+++ b/test/run/errors_if_path_in_dependency_test.dart
@@ -9,8 +9,7 @@
import '../test_pub.dart';
void main() {
- test(
- 'Errors if the executable is in a subdirectory in a '
+ test('Errors if the executable is in a subdirectory in a '
'dependency.', () async {
await d.dir('foo', [d.libPubspec('foo', '1.0.0')]).create();
diff --git a/test/run/includes_parent_directories_of_entrypoint_test.dart b/test/run/includes_parent_directories_of_entrypoint_test.dart
index 668714d..31b23da 100644
--- a/test/run/includes_parent_directories_of_entrypoint_test.dart
+++ b/test/run/includes_parent_directories_of_entrypoint_test.dart
@@ -17,8 +17,7 @@
""";
void main() {
- test(
- 'allows assets in parent directories of the entrypoint to be '
+ test('allows assets in parent directories of the entrypoint to be '
'accessed', () async {
await d.dir(appPath, [
d.appPubspec(),
diff --git a/test/run/nonexistent_dependency_test.dart b/test/run/nonexistent_dependency_test.dart
index 1f420fa..3682ae7 100644
--- a/test/run/nonexistent_dependency_test.dart
+++ b/test/run/nonexistent_dependency_test.dart
@@ -16,8 +16,10 @@
final pub = await pubRun(args: ['foo:script']);
expect(
pub.stderr,
- emits('Could not find package "foo". Did you forget to add a '
- 'dependency?'),
+ emits(
+ 'Could not find package "foo". Did you forget to add a '
+ 'dependency?',
+ ),
);
await pub.shouldExit(exit_codes.DATA);
});
diff --git a/test/run/nonexistent_script_in_dependency_test.dart b/test/run/nonexistent_script_in_dependency_test.dart
index 15c0e3e..5281804 100644
--- a/test/run/nonexistent_script_in_dependency_test.dart
+++ b/test/run/nonexistent_script_in_dependency_test.dart
@@ -26,9 +26,7 @@
final pub = await pubRun(args: ['foo:script']);
expect(
pub.stderr,
- emits(
- "Could not find ${p.join("bin", "script.dart")} in package foo.",
- ),
+ emits("Could not find ${p.join("bin", "script.dart")} in package foo."),
);
await pub.shouldExit(exit_codes.NO_INPUT);
});
diff --git a/test/run/package_api_test.dart b/test/run/package_api_test.dart
index 6d95d9d..1afcec6 100644
--- a/test/run/package_api_test.dart
+++ b/test/run/package_api_test.dart
@@ -87,8 +87,10 @@
pub.stdout,
emits(p.toUri(p.join(d.sandbox, 'myapp/lib/resource.txt')).toString()),
);
- final fooResourcePath =
- p.join(globalServer.pathInCache('foo', '1.0.0'), 'lib/resource.txt');
+ final fooResourcePath = p.join(
+ globalServer.pathInCache('foo', '1.0.0'),
+ 'lib/resource.txt',
+ );
expect(pub.stdout, emits(p.toUri(fooResourcePath).toString()));
await pub.shouldExit(0);
});
diff --git a/test/run/passes_along_arguments_test.dart b/test/run/passes_along_arguments_test.dart
index 15f383c..7a38602 100644
--- a/test/run/passes_along_arguments_test.dart
+++ b/test/run/passes_along_arguments_test.dart
@@ -24,8 +24,9 @@
// Use some args that would trip up pub's arg parser to ensure that it
// isn't trying to look at them.
- final pub =
- await pubRun(args: ['bin/args', '--verbose', '-m', '--', 'help']);
+ final pub = await pubRun(
+ args: ['bin/args', '--verbose', '-m', '--', 'help'],
+ );
expect(pub.stdout, emitsThrough('--verbose -m -- help'));
await pub.shouldExit();
diff --git a/test/run/precompile_test.dart b/test/run/precompile_test.dart
index 46a19b8..10d3508 100644
--- a/test/run/precompile_test.dart
+++ b/test/run/precompile_test.dart
@@ -26,10 +26,9 @@
'test',
'1.0.0',
contents: [
- d.dir(
- 'bin',
- [d.file('test.dart', 'main(List<String> args) => print("hello");')],
- ),
+ d.dir('bin', [
+ d.file('test.dart', 'main(List<String> args) => print("hello");'),
+ ]),
],
);
@@ -45,8 +44,7 @@
expect(lines, contains('hello'));
});
- test(
- "`pub run` doesn't write about precompilation "
+ test("`pub run` doesn't write about precompilation "
'when a terminal is not attached', () async {
await setupForPubRunToPrecompile();
@@ -106,43 +104,43 @@
output: contains('Building package executables...'),
);
- final pub = await pubRun(
- args: ['test'],
- );
+ final pub = await pubRun(args: ['test']);
await pub.shouldExit(0);
final lines = await pub.stdout.rest.toList();
expect(lines, isNot(contains('Building package executable...')));
});
// Regression test of https://github.com/dart-lang/pub/issues/2483
- test('`get --precompile` precompiles script with relative PUB_CACHE',
- () async {
- await d.dir(appPath, [
- d.appPubspec(dependencies: {'test': '1.0.0'}),
- ]).create();
+ test(
+ '`get --precompile` precompiles script with relative PUB_CACHE',
+ () async {
+ await d.dir(appPath, [
+ d.appPubspec(dependencies: {'test': '1.0.0'}),
+ ]).create();
- final server = await servePackages();
- server.serve(
- 'test',
- '1.0.0',
- contents: [
- d.dir('bin', [d.file('test.dart', _script)]),
- ],
- );
+ final server = await servePackages();
+ server.serve(
+ 'test',
+ '1.0.0',
+ contents: [
+ d.dir('bin', [d.file('test.dart', _script)]),
+ ],
+ );
- await pubGet(
- args: ['--precompile'],
- environment: {'PUB_CACHE': '.pub_cache'},
- output: contains('Building package executables...'),
- );
+ await pubGet(
+ args: ['--precompile'],
+ environment: {'PUB_CACHE': '.pub_cache'},
+ output: contains('Building package executables...'),
+ );
- final pub = await pubRun(
- args: ['test'],
- environment: {'PUB_CACHE': '.pub_cache'},
- );
- await pub.shouldExit(0);
- final lines = await pub.stdout.rest.toList();
- expect(lines, isNot(contains('Building package executable...')));
- expect(lines, contains('running with PUB_CACHE: ".pub_cache"'));
- });
+ final pub = await pubRun(
+ args: ['test'],
+ environment: {'PUB_CACHE': '.pub_cache'},
+ );
+ await pub.shouldExit(0);
+ final lines = await pub.stdout.rest.toList();
+ expect(lines, isNot(contains('Building package executable...')));
+ expect(lines, contains('running with PUB_CACHE: ".pub_cache"'));
+ },
+ );
}
diff --git a/test/sdk_test.dart b/test/sdk_test.dart
index 531fc60..915db5b 100644
--- a/test/sdk_test.dart
+++ b/test/sdk_test.dart
@@ -26,67 +26,84 @@
]),
]),
d.dir('bin/cache/pkg', [
- d.dir(
- 'baz',
- [d.libDir('baz', 'foo 0.0.1'), d.libPubspec('baz', '0.0.1')],
- ),
+ d.dir('baz', [
+ d.libDir('baz', 'foo 0.0.1'),
+ d.libPubspec('baz', '0.0.1'),
+ ]),
]),
d.flutterVersion('1.2.3'),
]).create();
});
test("gets an SDK dependency's dependencies", () async {
- await d.appDir(
- dependencies: {
- 'foo': {'sdk': 'flutter'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'sdk': 'flutter'},
+ },
+ )
+ .create();
await pubCommand(
command,
environment: {'FLUTTER_ROOT': p.join(d.sandbox, 'flutter')},
);
- await d.appPackageConfigFile(
- [
- d.packageConfigEntry(
- name: 'foo',
- path: p.join(d.sandbox, 'flutter', 'packages', 'foo'),
- ),
- d.packageConfigEntry(name: 'bar', version: '1.0.0'),
- ],
- flutterRoot: p.join(d.sandbox, 'flutter'),
- flutterVersion: '1.2.3',
- ).validate();
+ await d
+ .appPackageConfigFile(
+ [
+ d.packageConfigEntry(
+ name: 'foo',
+ path: p.join(d.sandbox, 'flutter', 'packages', 'foo'),
+ ),
+ d.packageConfigEntry(name: 'bar', version: '1.0.0'),
+ ],
+ flutterRoot: p.join(d.sandbox, 'flutter'),
+ flutterVersion: '1.2.3',
+ )
+ .validate();
});
test('gets an SDK dependency from bin/cache/pkg', () async {
- await d.appDir(
- dependencies: {
- 'baz': {'sdk': 'flutter'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'baz': {'sdk': 'flutter'},
+ },
+ )
+ .create();
await pubCommand(
command,
environment: {'FLUTTER_ROOT': p.join(d.sandbox, 'flutter')},
);
- await d.appPackageConfigFile(
- [
- d.packageConfigEntry(
- name: 'baz',
- path: p.join(d.sandbox, 'flutter', 'bin', 'cache', 'pkg', 'baz'),
- ),
- ],
- flutterRoot: p.join(d.sandbox, 'flutter'),
- flutterVersion: '1.2.3',
- ).validate();
+ await d
+ .appPackageConfigFile(
+ [
+ d.packageConfigEntry(
+ name: 'baz',
+ path: p.join(
+ d.sandbox,
+ 'flutter',
+ 'bin',
+ 'cache',
+ 'pkg',
+ 'baz',
+ ),
+ ),
+ ],
+ flutterRoot: p.join(d.sandbox, 'flutter'),
+ flutterVersion: '1.2.3',
+ )
+ .validate();
});
test('unlocks an SDK dependency when the version changes', () async {
- await d.appDir(
- dependencies: {
- 'foo': {'sdk': 'flutter'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'sdk': 'flutter'},
+ },
+ )
+ .create();
await pubCommand(
command,
environment: {'FLUTTER_ROOT': p.join(d.sandbox, 'flutter')},
@@ -99,10 +116,9 @@
)
.validate();
- await d.dir(
- 'flutter/packages/foo',
- [d.libPubspec('foo', '0.0.2')],
- ).create();
+ await d.dir('flutter/packages/foo', [
+ d.libPubspec('foo', '0.0.2'),
+ ]).create();
await pubCommand(
command,
environment: {'FLUTTER_ROOT': p.join(d.sandbox, 'flutter')},
@@ -117,8 +133,7 @@
});
// Regression test for #1883
- test(
- "doesn't fail if the Flutter SDK's version file doesn't exist when "
+ test("doesn't fail if the Flutter SDK's version file doesn't exist when "
'nothing depends on Flutter', () async {
await d.appDir().create();
deleteEntry(
@@ -128,48 +143,60 @@
command,
environment: {'FLUTTER_ROOT': p.join(d.sandbox, 'flutter')},
);
- await d.appPackageConfigFile(
- [],
- flutterRoot: p.join(d.sandbox, 'flutter'),
- flutterVersion: '1.2.3',
- ).validate();
+ await d
+ .appPackageConfigFile(
+ [],
+ flutterRoot: p.join(d.sandbox, 'flutter'),
+ flutterVersion: '1.2.3',
+ )
+ .validate();
});
group('fails if', () {
test("the version constraint doesn't match", () async {
- await d.appDir(
- dependencies: {
- 'foo': {'sdk': 'flutter', 'version': '^1.0.0'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'sdk': 'flutter', 'version': '^1.0.0'},
+ },
+ )
+ .create();
await pubCommand(
command,
environment: {'FLUTTER_ROOT': p.join(d.sandbox, 'flutter')},
- error: contains('''
-Because myapp depends on foo ^1.0.0 from sdk which doesn't match any versions, version solving failed.'''),
+ error: contains(
+ '''
+Because myapp depends on foo ^1.0.0 from sdk which doesn't match any versions, version solving failed.''',
+ ),
);
});
test('the SDK is unknown', () async {
- await d.appDir(
- dependencies: {
- 'foo': {'sdk': 'unknown'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'sdk': 'unknown'},
+ },
+ )
+ .create();
await pubCommand(
command,
- error: equalsIgnoringWhitespace('''
-Because myapp depends on foo from sdk which doesn't exist (unknown SDK "unknown"), version solving failed.'''),
+ error: equalsIgnoringWhitespace(
+ '''
+Because myapp depends on foo from sdk which doesn't exist (unknown SDK "unknown"), version solving failed.''',
+ ),
exitCode: exit_codes.UNAVAILABLE,
);
});
test('the SDK is unavailable', () async {
- await d.appDir(
- dependencies: {
- 'foo': {'sdk': 'flutter'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'sdk': 'flutter'},
+ },
+ )
+ .create();
await pubCommand(
command,
error: equalsIgnoringWhitespace("""
@@ -183,11 +210,13 @@
});
test("the SDK doesn't contain the package", () async {
- await d.appDir(
- dependencies: {
- 'bar': {'sdk': 'flutter'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'bar': {'sdk': 'flutter'},
+ },
+ )
+ .create();
await pubCommand(
command,
environment: {'FLUTTER_ROOT': p.join(d.sandbox, 'flutter')},
@@ -201,11 +230,13 @@
});
test("the Dart SDK doesn't contain the package", () async {
- await d.appDir(
- dependencies: {
- 'bar': {'sdk': 'dart'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'bar': {'sdk': 'dart'},
+ },
+ )
+ .create();
await pubCommand(
command,
error: equalsIgnoringWhitespace("""
@@ -221,11 +252,13 @@
test('supports the Fuchsia SDK', () async {
renameDir(p.join(d.sandbox, 'flutter'), p.join(d.sandbox, 'fuchsia'));
- await d.appDir(
- dependencies: {
- 'foo': {'sdk': 'fuchsia'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'sdk': 'fuchsia'},
+ },
+ )
+ .create();
await pubCommand(
command,
environment: {'FUCHSIA_DART_SDK_ROOT': p.join(d.sandbox, 'fuchsia')},
@@ -254,21 +287,21 @@
]),
]),
d.sdkPackagesConfig(
- SdkPackageConfig(
- 'dart',
- {'foo': SdkPackage('foo', 'packages/foo')},
- 1,
- ),
+ SdkPackageConfig('dart', {
+ 'foo': SdkPackage('foo', 'packages/foo'),
+ }, 1),
),
]).create();
});
test('gets an SDK dependency from sdk_packages.yaml', () async {
- await d.appDir(
- dependencies: {
- 'foo': {'sdk': 'dart', 'version': '^0.0.1'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'sdk': 'dart', 'version': '^0.0.1'},
+ },
+ )
+ .create();
await pubCommand(
command,
@@ -284,14 +317,15 @@
]).validate();
});
- test(
- 'fails if the version range isn\'t compatible with the SDK '
+ test('fails if the version range isn\'t compatible with the SDK '
'dependency from sdk_packages.yaml', () async {
- await d.appDir(
- dependencies: {
- 'foo': {'sdk': 'dart', 'version': '^1.0.0'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'sdk': 'dart', 'version': '^1.0.0'},
+ },
+ )
+ .create();
await pubCommand(
command,
@@ -321,28 +355,29 @@
]),
]),
d.sdkPackagesConfig(
- SdkPackageConfig(
- 'dart',
- {'foo': SdkPackage('foo', 'packages/foo')},
- 1,
- ),
+ SdkPackageConfig('dart', {
+ 'foo': SdkPackage('foo', 'packages/foo'),
+ }, 1),
),
]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'sdk': 'dart', 'version': '^1.0.0'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'sdk': 'dart', 'version': '^1.0.0'},
+ },
+ )
+ .create();
await pubCommand(
command,
environment: {'DART_ROOT': p.join(d.sandbox, 'dart')},
error: contains(
- 'Unsupported operation: Only SDK packages are allowed as regular '
- 'dependencies for packages vendored '
- 'by the dart SDK, but the `foo` '
- 'package has a hosted dependency on `bar`.'),
+ 'Unsupported operation: Only SDK packages are allowed as regular '
+ 'dependencies for packages vendored '
+ 'by the dart SDK, but the `foo` '
+ 'package has a hosted dependency on `bar`.',
+ ),
);
});
@@ -358,26 +393,27 @@
]),
]),
d.sdkPackagesConfig(
- SdkPackageConfig(
- 'fuschia',
- {'foo': SdkPackage('foo', 'packages/foo')},
- 1,
- ),
+ SdkPackageConfig('fuschia', {
+ 'foo': SdkPackage('foo', 'packages/foo'),
+ }, 1),
),
]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'sdk': 'dart', 'version': '^1.0.0'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'sdk': 'dart', 'version': '^1.0.0'},
+ },
+ )
+ .create();
await pubCommand(
command,
environment: {'DART_ROOT': p.join(d.sandbox, 'dart')},
error: contains(
- 'Expected a configuration for the `dart` sdk but got one for '
- '`fuschia`. (at character 8)'),
+ 'Expected a configuration for the `dart` sdk but got one for '
+ '`fuschia`. (at character 8)',
+ ),
exitCode: 65,
);
});
diff --git a/test/snapshot_test.dart b/test/snapshot_test.dart
index 1d623e3..ee470e5 100644
--- a/test/snapshot_test.dart
+++ b/test/snapshot_test.dart
@@ -20,10 +20,9 @@
d.file('hello.dart', "void main() => print('hello!');"),
d.file('goodbye.dart', "void main() => print('goodbye!');"),
d.file('shell.sh', 'echo shell'),
- d.dir(
- 'subdir',
- [d.file('sub.dart', "void main() => print('sub!');")],
- ),
+ d.dir('subdir', [
+ d.file('sub.dart', "void main() => print('sub!');"),
+ ]),
]),
],
);
@@ -32,9 +31,10 @@
await pubGet(
args: ['--precompile'],
- output: allOf(
- [contains('Built foo:hello.'), contains('Built foo:goodbye.')],
- ),
+ output: allOf([
+ contains('Built foo:hello.'),
+ contains('Built foo:goodbye.'),
+ ]),
);
await d.dir(p.join(appPath, '.dart_tool', 'pub', 'bin'), [
@@ -65,10 +65,9 @@
d.file('hello.dart', "void main() => print('hello!');"),
d.file('goodbye.dart', "void main() => print('goodbye!');"),
d.file('shell.sh', 'echo shell'),
- d.dir(
- 'subdir',
- [d.file('sub.dart', "void main() => print('sub!');")],
- ),
+ d.dir('subdir', [
+ d.file('sub.dart', "void main() => print('sub!');"),
+ ]),
]),
],
)
@@ -78,9 +77,10 @@
await pubGet(
args: ['--precompile'],
- output: allOf(
- [contains('Built foo:hello.'), contains('Built foo:goodbye.')],
- ),
+ output: allOf([
+ contains('Built foo:hello.'),
+ contains('Built foo:goodbye.'),
+ ]),
);
await d.dir(p.join(appPath, '.dart_tool', 'pub', 'bin'), [
@@ -108,10 +108,9 @@
'foo',
'1.2.3',
contents: [
- d.dir(
- 'bin',
- [d.file('hello.dart', "void main() => print('hello!');")],
- ),
+ d.dir('bin', [
+ d.file('hello.dart', "void main() => print('hello!');"),
+ ]),
],
);
@@ -130,10 +129,9 @@
'foo',
'1.2.4',
contents: [
- d.dir(
- 'bin',
- [d.file('hello.dart', "void main() => print('hello 2!');")],
- ),
+ d.dir('bin', [
+ d.file('hello.dart', "void main() => print('hello 2!');"),
+ ]),
],
);
@@ -216,17 +214,18 @@
await d.git('foo.git', [
d.pubspec({'name': 'foo', 'version': '0.0.1'}),
- d.dir(
- 'bin',
- [d.file('hello.dart', "void main() => print('Hello!');")],
- ),
+ d.dir('bin', [
+ d.file('hello.dart', "void main() => print('Hello!');"),
+ ]),
]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .create();
await pubGet(
args: ['--precompile'],
@@ -238,10 +237,9 @@
]).validate();
await d.git('foo.git', [
- d.dir(
- 'bin',
- [d.file('hello.dart', "void main() => print('Goodbye!');")],
- ),
+ d.dir('bin', [
+ d.file('hello.dart', "void main() => print('Goodbye!');"),
+ ]),
]).commit();
await pubUpgrade(
@@ -264,10 +262,9 @@
'foo',
'5.6.7',
contents: [
- d.dir(
- 'bin',
- [d.file('hello.dart', "void main() => print('hello!');")],
- ),
+ d.dir('bin', [
+ d.file('hello.dart', "void main() => print('hello!');"),
+ ]),
],
);
@@ -276,10 +273,9 @@
await pubGet(args: ['--no-precompile']);
await d.dir(p.join(appPath, '.dart_tool', 'pub', 'bin'), [
- d.dir(
- 'foo',
- [d.outOfDateSnapshot('hello.dart-$versionSuffix.snapshot')],
- ),
+ d.dir('foo', [
+ d.outOfDateSnapshot('hello.dart-$versionSuffix.snapshot'),
+ ]),
]).create();
final process = await pubRun(args: ['foo:hello']);
diff --git a/test/solve_suggestions_test.dart b/test/solve_suggestions_test.dart
index fccf1a7..f97864c 100644
--- a/test/solve_suggestions_test.dart
+++ b/test/solve_suggestions_test.dart
@@ -81,18 +81,16 @@
),
]).create();
await pubGet(
- error: allOf(
- [
- contains(
- '* Consider downgrading your constraint on foo: '
- 'dart pub add foo:^0.9.0',
- ),
- contains(
- '* Try upgrading your constraint on bar: '
- 'dart pub add dev:bar:^2.0.0',
- ),
- ],
- ),
+ error: allOf([
+ contains(
+ '* Consider downgrading your constraint on foo: '
+ 'dart pub add foo:^0.9.0',
+ ),
+ contains(
+ '* Try upgrading your constraint on bar: '
+ 'dart pub add dev:bar:^2.0.0',
+ ),
+ ]),
);
});
@@ -101,20 +99,14 @@
server.serve('foo', '1.0.0');
await d.dir(appPath, [
- d.libPubspec(
- 'myApp',
- '1.0.0',
- deps: {'foo': '>1.0.0 <=0.0.0'},
- ),
+ d.libPubspec('myApp', '1.0.0', deps: {'foo': '>1.0.0 <=0.0.0'}),
]).create();
await pubGet(
- error: allOf(
- [
- contains(
- '* Try updating your constraint on foo: dart pub add foo:^1.0.0',
- ),
- ],
- ),
+ error: allOf([
+ contains(
+ '* Try updating your constraint on foo: dart pub add foo:^1.0.0',
+ ),
+ ]),
);
});
@@ -141,43 +133,45 @@
);
});
- test('suggests a major upgrade if more than 5 needs to be upgraded',
- () async {
- final server = await servePackages();
- server.serve('foo', '1.0.0', deps: {'bar': '2.0.0'});
- server.serve('bar', '1.0.0', deps: {'foo': '2.0.0'});
- server.serve('foo', '2.0.0', deps: {'bar': '2.0.0'});
- server.serve('bar', '2.0.0', deps: {'foo': '2.0.0'});
- server.serve('foo1', '1.0.0', deps: {'bar1': '2.0.0'});
- server.serve('bar1', '1.0.0', deps: {'foo1': '2.0.0'});
- server.serve('foo1', '2.0.0', deps: {'bar1': '2.0.0'});
- server.serve('bar1', '2.0.0', deps: {'foo1': '2.0.0'});
- server.serve('foo2', '1.0.0', deps: {'bar2': '2.0.0'});
- server.serve('bar2', '1.0.0', deps: {'foo2': '2.0.0'});
- server.serve('foo2', '2.0.0', deps: {'bar2': '2.0.0'});
- server.serve('bar2', '2.0.0', deps: {'foo2': '2.0.0'});
+ test(
+ 'suggests a major upgrade if more than 5 needs to be upgraded',
+ () async {
+ final server = await servePackages();
+ server.serve('foo', '1.0.0', deps: {'bar': '2.0.0'});
+ server.serve('bar', '1.0.0', deps: {'foo': '2.0.0'});
+ server.serve('foo', '2.0.0', deps: {'bar': '2.0.0'});
+ server.serve('bar', '2.0.0', deps: {'foo': '2.0.0'});
+ server.serve('foo1', '1.0.0', deps: {'bar1': '2.0.0'});
+ server.serve('bar1', '1.0.0', deps: {'foo1': '2.0.0'});
+ server.serve('foo1', '2.0.0', deps: {'bar1': '2.0.0'});
+ server.serve('bar1', '2.0.0', deps: {'foo1': '2.0.0'});
+ server.serve('foo2', '1.0.0', deps: {'bar2': '2.0.0'});
+ server.serve('bar2', '1.0.0', deps: {'foo2': '2.0.0'});
+ server.serve('foo2', '2.0.0', deps: {'bar2': '2.0.0'});
+ server.serve('bar2', '2.0.0', deps: {'foo2': '2.0.0'});
- await d.dir(appPath, [
- d.libPubspec(
- 'myApp',
- '1.0.0',
- deps: {
- 'foo': '1.0.0',
- 'bar': '1.0.0',
- 'foo1': '1.0.0',
- 'bar1': '1.0.0',
- 'foo2': '1.0.0',
- 'bar2': '1.0.0',
- },
- ),
- ]).create();
- await pubGet(
- error: contains(
- '* Try an upgrade of your constraints: '
- 'dart pub upgrade --major-versions',
- ),
- );
- });
+ await d.dir(appPath, [
+ d.libPubspec(
+ 'myApp',
+ '1.0.0',
+ deps: {
+ 'foo': '1.0.0',
+ 'bar': '1.0.0',
+ 'foo1': '1.0.0',
+ 'bar1': '1.0.0',
+ 'foo2': '1.0.0',
+ 'bar2': '1.0.0',
+ },
+ ),
+ ]).create();
+ await pubGet(
+ error: contains(
+ '* Try an upgrade of your constraints: '
+ 'dart pub upgrade --major-versions',
+ ),
+ );
+ },
+ );
test('suggests upgrades to non-default servers', () async {
final server = await servePackages();
@@ -209,9 +203,7 @@
'bar:\'{"version":"^2.0.0","hosted":"${server2.url}"}\'',
),
);
- await pubAdd(
- args: ['bar:{"version":"^2.0.0","hosted":"${server2.url}"}'],
- );
+ await pubAdd(args: ['bar:{"version":"^2.0.0","hosted":"${server2.url}"}']);
await d.dir(appPath, [
d.libPubspec(
'myApp',
diff --git a/test/test_pub.dart b/test/test_pub.dart
index ac4bf6c..83eb7b7 100644
--- a/test/test_pub.dart
+++ b/test/test_pub.dart
@@ -44,13 +44,15 @@
/// A [Matcher] that matches JavaScript generated by dart2js with minification
/// enabled.
-Matcher isMinifiedDart2JSOutput =
- isNot(contains('// The code supports the following hooks'));
+Matcher isMinifiedDart2JSOutput = isNot(
+ contains('// The code supports the following hooks'),
+);
/// A [Matcher] that matches JavaScript generated by dart2js with minification
/// disabled.
-Matcher isUnminifiedDart2JSOutput =
- contains('// The code supports the following hooks');
+Matcher isUnminifiedDart2JSOutput = contains(
+ '// The code supports the following hooks',
+);
/// Converts [value] into a YAML string.
String yaml(Object? value) => jsonEncode(value);
@@ -72,19 +74,25 @@
/// The path of the ".dart_tool/package_config.json" file in the mock app used
/// for tests, relative to the sandbox directory.
-String packageConfigFilePath =
- p.join(appPath, '.dart_tool', 'package_config.json');
+String packageConfigFilePath = p.join(
+ appPath,
+ '.dart_tool',
+ 'package_config.json',
+);
/// The path of the ".dart_tool/package_graph.json" file in the mock app used
/// for tests, relative to the sandbox directory.
-String packageGraphFilePath =
- p.join(appPath, '.dart_tool', 'package_graph.json');
+String packageGraphFilePath = p.join(
+ appPath,
+ '.dart_tool',
+ 'package_graph.json',
+);
/// The entry from the `.dart_tool/package_config.json` file for [packageName].
Map<String, dynamic> packageSpec(String packageName) => dig(
- json.decode(File(d.path(packageConfigFilePath)).readAsStringSync()),
- ['packages', ('name', packageName)],
- );
+ json.decode(File(d.path(packageConfigFilePath)).readAsStringSync()),
+ ['packages', ('name', packageName)],
+);
/// The suffix appended to a built snapshot.
const versionSuffix = testVersion;
@@ -193,17 +201,16 @@
int? exitCode,
Map<String, String>? environment,
String? workingDirectory,
-}) async =>
- await pubCommand(
- RunCommand.add,
- args: args,
- output: output,
- error: error,
- warning: warning,
- exitCode: exitCode,
- environment: environment,
- workingDirectory: workingDirectory,
- );
+}) async => await pubCommand(
+ RunCommand.add,
+ args: args,
+ output: output,
+ error: error,
+ warning: warning,
+ exitCode: exitCode,
+ environment: environment,
+ workingDirectory: workingDirectory,
+);
Future<void> pubGet({
Iterable<String>? args,
@@ -215,19 +222,18 @@
Map<String, String?>? environment,
String? workingDirectory,
bool includeParentHomeAndPath = true,
-}) async =>
- await pubCommand(
- RunCommand.get,
- args: args,
- output: output,
- error: error,
- silent: silent,
- warning: warning,
- exitCode: exitCode,
- environment: environment,
- workingDirectory: workingDirectory,
- includeParentHomeAndPath: includeParentHomeAndPath,
- );
+}) async => await pubCommand(
+ RunCommand.get,
+ args: args,
+ output: output,
+ error: error,
+ silent: silent,
+ warning: warning,
+ exitCode: exitCode,
+ environment: environment,
+ workingDirectory: workingDirectory,
+ includeParentHomeAndPath: includeParentHomeAndPath,
+);
Future<void> pubUpgrade({
Iterable<String>? args,
@@ -238,18 +244,17 @@
int? exitCode,
Map<String, String>? environment,
String? workingDirectory,
-}) async =>
- await pubCommand(
- RunCommand.upgrade,
- args: args,
- output: output,
- error: error,
- warning: warning,
- silent: silent,
- exitCode: exitCode,
- environment: environment,
- workingDirectory: workingDirectory,
- );
+}) async => await pubCommand(
+ RunCommand.upgrade,
+ args: args,
+ output: output,
+ error: error,
+ warning: warning,
+ silent: silent,
+ exitCode: exitCode,
+ environment: environment,
+ workingDirectory: workingDirectory,
+);
Future<void> pubDowngrade({
Iterable<String>? args,
@@ -259,17 +264,16 @@
int? exitCode,
Map<String, String>? environment,
String? workingDirectory,
-}) async =>
- await pubCommand(
- RunCommand.downgrade,
- args: args,
- output: output,
- error: error,
- warning: warning,
- exitCode: exitCode,
- environment: environment,
- workingDirectory: workingDirectory,
- );
+}) async => await pubCommand(
+ RunCommand.downgrade,
+ args: args,
+ output: output,
+ error: error,
+ warning: warning,
+ exitCode: exitCode,
+ environment: environment,
+ workingDirectory: workingDirectory,
+);
Future<void> pubRemove({
Iterable<String>? args,
@@ -279,17 +283,16 @@
int? exitCode,
Map<String, String>? environment,
String? workingDirectory,
-}) async =>
- await pubCommand(
- RunCommand.remove,
- args: args,
- output: output,
- error: error,
- warning: warning,
- exitCode: exitCode,
- environment: environment,
- workingDirectory: workingDirectory,
- );
+}) async => await pubCommand(
+ RunCommand.remove,
+ args: args,
+ output: output,
+ error: error,
+ warning: warning,
+ exitCode: exitCode,
+ environment: environment,
+ workingDirectory: workingDirectory,
+);
/// Schedules starting the "pub [global] run" process and validates the
/// expected startup output.
@@ -436,11 +439,7 @@
);
await expectLater(
pub.stdout,
- emitsThrough(
- matches(
- r'^Do you want to publish [^ ]+ [^ ]+ (y/N)?',
- ),
- ),
+ emitsThrough(matches(r'^Do you want to publish [^ ]+ [^ ]+ (y/N)?')),
);
pub.stdin.writeln('y');
}
@@ -459,29 +458,30 @@
/// Gets the environment variables used to run pub in a test context.
Map<String, String> getPubTestEnvironment([String? tokenEndpoint]) => {
- 'CI': 'false', // unless explicitly given tests don't run pub in CI mode
- '_PUB_TESTING': 'true',
- '_PUB_TEST_CONFIG_DIR': _pathInSandbox(configPath),
- 'PUB_CACHE': _pathInSandbox(cachePath),
- 'PUB_ENVIRONMENT': 'test-environment',
+ 'CI': 'false', // unless explicitly given tests don't run pub in CI mode
+ '_PUB_TESTING': 'true',
+ '_PUB_TEST_CONFIG_DIR': _pathInSandbox(configPath),
+ 'PUB_CACHE': _pathInSandbox(cachePath),
+ 'PUB_ENVIRONMENT': 'test-environment',
- // Ensure a known SDK version is set for the tests that rely on that.
- '_PUB_TEST_SDK_VERSION': testVersion,
- if (tokenEndpoint != null) '_PUB_TEST_TOKEN_ENDPOINT': tokenEndpoint,
- if (_globalServer?.port != null)
- 'PUB_HOSTED_URL': 'http://localhost:${_globalServer?.port}',
- };
+ // Ensure a known SDK version is set for the tests that rely on that.
+ '_PUB_TEST_SDK_VERSION': testVersion,
+ if (tokenEndpoint != null) '_PUB_TEST_TOKEN_ENDPOINT': tokenEndpoint,
+ if (_globalServer?.port != null)
+ 'PUB_HOSTED_URL': 'http://localhost:${_globalServer?.port}',
+};
/// The path to the root of pub's sources in the pub repo.
-final String _pubRoot = (() {
- if (!fileExists(p.join('bin', 'pub.dart'))) {
- throw StateError(
- "Current working directory (${p.current} is not pub's root. "
- "Run tests from pub's root.",
- );
- }
- return p.current;
-})();
+final String _pubRoot =
+ (() {
+ if (!fileExists(p.join('bin', 'pub.dart'))) {
+ throw StateError(
+ "Current working directory (${p.current} is not pub's root. "
+ "Run tests from pub's root.",
+ );
+ }
+ return p.current;
+ })();
/// Starts a Pub process and returns a [PubProcess] that supports interaction
/// with that process.
@@ -590,9 +590,10 @@
);
if (description == null) {
- final humanExecutable = p.isWithin(p.current, executable)
- ? p.relative(executable)
- : executable;
+ final humanExecutable =
+ p.isWithin(p.current, executable)
+ ? p.relative(executable)
+ : executable;
description = '$humanExecutable ${arguments.join(' ')}';
}
@@ -695,8 +696,11 @@
}) async {
final cache = SystemCache(rootDir: _pathInSandbox(cachePath));
- final lockFile =
- _createLockFile(cache, sandbox: dependenciesInSandBox, hosted: hosted);
+ final lockFile = _createLockFile(
+ cache,
+ sandbox: dependenciesInSandBox,
+ hosted: hosted,
+ );
await d.dir(package, [
d.file(
@@ -728,12 +732,10 @@
final packages = <PackageId>[
...dependencies.entries.map(
- (entry) => cache.path.parseId(
- entry.key,
- Version(0, 0, 0),
- {'path': entry.value, 'relative': true},
- containingDir: p.join(d.sandbox, appPath),
- ),
+ (entry) => cache.path.parseId(entry.key, Version(0, 0, 0), {
+ 'path': entry.value,
+ 'relative': true,
+ }, containingDir: p.join(d.sandbox, appPath)),
),
if (hosted != null)
...hosted.entries.map(
@@ -741,10 +743,7 @@
entry.key,
Version.parse(entry.value),
ResolvedHostedDescription(
- HostedDescription(
- entry.key,
- 'https://pub.dev',
- ),
+ HostedDescription(entry.key, 'https://pub.dev'),
sha256: null,
),
),
@@ -983,11 +982,11 @@
// .join('\n'));
// }
final pipe = stdin == null ? '' : ' echo ${escapeShellArgument(stdin)} |';
- final joinedArgs =
- args.map(filterUnstableText).map(escapeShellArgument).join(' ');
- buffer.writeln(
- '\$$pipe pub $joinedArgs',
- );
+ final joinedArgs = args
+ .map(filterUnstableText)
+ .map(escapeShellArgument)
+ .join(' ');
+ buffer.writeln('\$$pipe pub $joinedArgs');
for (final line in await process.stdout.rest.toList()) {
buffer.writeln(filterUnstableText(line));
}
@@ -1147,8 +1146,9 @@
case final int key:
json = (json as List)[key];
case (final String key, final String value):
- json = (json as List)
- .firstWhere((element) => (element as Map)[key] == value);
+ json = (json as List).firstWhere(
+ (element) => (element as Map)[key] == value,
+ );
case final key:
throw ArgumentError('Bad key $key in', 'path');
}
diff --git a/test/token/add_token_test.dart b/test/token/add_token_test.dart
index 7e7ab44..55793c5 100644
--- a/test/token/add_token_test.dart
+++ b/test/token/add_token_test.dart
@@ -9,32 +9,9 @@
import '../test_pub.dart';
void main() {
- test('with correct server url creates pub-tokens.json that contains token',
- () async {
- await d.tokensFile({
- 'version': 1,
- 'hosted': [
- {'url': 'https://example.com', 'token': 'abc'},
- ],
- }).create();
-
- await runPub(
- args: ['token', 'add', 'https://server.demo/'],
- input: ['auth-token'],
- );
-
- await d.tokensFile({
- 'version': 1,
- 'hosted': [
- {'url': 'https://example.com', 'token': 'abc'},
- {'url': 'https://server.demo', 'token': 'auth-token'},
- ],
- }).validate();
- });
-
- group('with environment variable creates tokens.json that contains env var',
- () {
- test('without environment variable provided', () async {
+ test(
+ 'with correct server url creates pub-tokens.json that contains token',
+ () async {
await d.tokensFile({
'version': 1,
'hosted': [
@@ -43,40 +20,67 @@
}).create();
await runPub(
- args: ['token', 'add', 'https://example.com/', '--env-var', 'TOKEN'],
- error: 'Environment variable "TOKEN" is not defined.',
+ args: ['token', 'add', 'https://server.demo/'],
+ input: ['auth-token'],
);
await d.tokensFile({
'version': 1,
'hosted': [
- {'url': 'https://example.com', 'env': 'TOKEN'},
- ],
- }).validate();
- });
-
- test('with environment variable provided', () async {
- await d.tokensFile({
- 'version': 1,
- 'hosted': [
{'url': 'https://example.com', 'token': 'abc'},
- ],
- }).create();
-
- await runPub(
- args: ['token', 'add', 'https://example.com/', '--env-var', 'TOKEN'],
- environment: {'TOKEN': 'secret'},
- error: isNot(contains('is not defined.')),
- );
-
- await d.tokensFile({
- 'version': 1,
- 'hosted': [
- {'url': 'https://example.com', 'env': 'TOKEN'},
+ {'url': 'https://server.demo', 'token': 'auth-token'},
],
}).validate();
- });
- });
+ },
+ );
+
+ group(
+ 'with environment variable creates tokens.json that contains env var',
+ () {
+ test('without environment variable provided', () async {
+ await d.tokensFile({
+ 'version': 1,
+ 'hosted': [
+ {'url': 'https://example.com', 'token': 'abc'},
+ ],
+ }).create();
+
+ await runPub(
+ args: ['token', 'add', 'https://example.com/', '--env-var', 'TOKEN'],
+ error: 'Environment variable "TOKEN" is not defined.',
+ );
+
+ await d.tokensFile({
+ 'version': 1,
+ 'hosted': [
+ {'url': 'https://example.com', 'env': 'TOKEN'},
+ ],
+ }).validate();
+ });
+
+ test('with environment variable provided', () async {
+ await d.tokensFile({
+ 'version': 1,
+ 'hosted': [
+ {'url': 'https://example.com', 'token': 'abc'},
+ ],
+ }).create();
+
+ await runPub(
+ args: ['token', 'add', 'https://example.com/', '--env-var', 'TOKEN'],
+ environment: {'TOKEN': 'secret'},
+ error: isNot(contains('is not defined.')),
+ );
+
+ await d.tokensFile({
+ 'version': 1,
+ 'hosted': [
+ {'url': 'https://example.com', 'env': 'TOKEN'},
+ ],
+ }).validate();
+ });
+ },
+ );
test('persists unknown fields on unmodified entries', () async {
await d.tokensFile({
@@ -86,12 +90,9 @@
'url': 'https://example.com',
'unknownField': '123',
'nestedField': [
- {
- 'username': 'user',
- 'password': 'pass',
- },
+ {'username': 'user', 'password': 'pass'},
],
- }
+ },
],
}).create();
@@ -107,10 +108,7 @@
'url': 'https://example.com',
'unknownField': '123',
'nestedField': [
- {
- 'username': 'user',
- 'password': 'pass',
- },
+ {'username': 'user', 'password': 'pass'},
],
},
{'url': 'https://server.demo', 'token': 'auth-token'},
@@ -153,8 +151,7 @@
await configDir([d.nothing('pub-tokens.json')]).validate();
});
- test(
- 'with non-secure localhost url creates pub-tokens.json '
+ test('with non-secure localhost url creates pub-tokens.json '
'that contains token', () async {
await d.dir(configPath).create();
diff --git a/test/token/error_message_test.dart b/test/token/error_message_test.dart
index 3a98357..0be1a71 100644
--- a/test/token/error_message_test.dart
+++ b/test/token/error_message_test.dart
@@ -56,10 +56,7 @@
respondWithWwwAuthenticate('bearer realm="pub", message="$message"');
await expectPubErrorMessage(
- allOf(
- isNot(contains(message)),
- contains(message.substring(0, 1024)),
- ),
+ allOf(isNot(contains(message)), contains(message.substring(0, 1024))),
);
});
@@ -68,11 +65,13 @@
await expectPubErrorMessage(isNot(contains('custom message')));
});
- test('does not prints message if challenge is not equals to bearer',
- () async {
- respondWithWwwAuthenticate('basic realm="pub", message="custom message"');
- await expectPubErrorMessage(isNot(contains('custom message')));
- });
+ test(
+ 'does not prints message if challenge is not equals to bearer',
+ () async {
+ respondWithWwwAuthenticate('basic realm="pub", message="custom message"');
+ await expectPubErrorMessage(isNot(contains('custom message')));
+ },
+ );
test('prints message for bearer challenge for pub realm only', () async {
respondWithWwwAuthenticate(
diff --git a/test/token/remove_token_test.dart b/test/token/remove_token_test.dart
index e06459e..6317ed5 100644
--- a/test/token/remove_token_test.dart
+++ b/test/token/remove_token_test.dart
@@ -19,9 +19,10 @@
await runPub(args: ['token', 'remove', 'https://server.demo']);
- await d.tokensFile(
- {'version': 1, 'hosted': <Map<String, String>>[]},
- ).validate();
+ await d.tokensFile({
+ 'version': 1,
+ 'hosted': <Map<String, String>>[],
+ }).validate();
});
test('without any matching schemes, does nothing', () async {
diff --git a/test/token/when_receives_401_removes_token_test.dart b/test/token/when_receives_401_removes_token_test.dart
index 90ec986..bb12fb3 100644
--- a/test/token/when_receives_401_removes_token_test.dart
+++ b/test/token/when_receives_401_removes_token_test.dart
@@ -27,8 +27,9 @@
await pub.shouldExit(65);
- await d.tokensFile(
- {'version': 1, 'hosted': <Map<String, Object?>>[]},
- ).validate();
+ await d.tokensFile({
+ 'version': 1,
+ 'hosted': <Map<String, Object?>>[],
+ }).validate();
});
}
diff --git a/test/unknown_properties_in_description_test.dart b/test/unknown_properties_in_description_test.dart
index 137e89e..981f94a 100644
--- a/test/unknown_properties_in_description_test.dart
+++ b/test/unknown_properties_in_description_test.dart
@@ -22,83 +22,97 @@
},
);
server.serve('bar', '1.0.0');
- await d.appDir(
- pubspec: {
- 'environment': {'sdk': '^3.6.0'},
- },
- dependencies: {
- 'foo': {
- 'hosted': {'url': server.url, 'unknown': 11},
- 'version': '^1.0.0',
- },
- },
- ).create();
+ await d
+ .appDir(
+ pubspec: {
+ 'environment': {'sdk': '^3.6.0'},
+ },
+ dependencies: {
+ 'foo': {
+ 'hosted': {'url': server.url, 'unknown': 11},
+ 'version': '^1.0.0',
+ },
+ },
+ )
+ .create();
await pubGet(environment: {'_PUB_TEST_SDK_VERSION': '3.7.0'});
});
- test('Detects unknown attributes in descriptions in root project after 3.7',
- () async {
- final server = await servePackages();
- server.serve(
- 'foo',
- '1.0.0',
- sdk: '^3.6.0',
- deps: {
- 'bar': {
- 'hosted': {'url': server.url, 'unknown': 11},
+ test(
+ 'Detects unknown attributes in descriptions in root project after 3.7',
+ () async {
+ final server = await servePackages();
+ server.serve(
+ 'foo',
+ '1.0.0',
+ sdk: '^3.6.0',
+ deps: {
+ 'bar': {
+ 'hosted': {'url': server.url, 'unknown': 11},
+ },
},
- },
- );
- server.serve('bar', '1.0.0');
- await d.appDir(
- pubspec: {
- 'environment': {'sdk': '^3.7.0'},
- },
- dependencies: {
- 'foo': {
- 'hosted': {'url': server.url, 'unknown': 11},
- 'version': '^1.0.0',
- },
- },
- ).create();
- await pubGet(
- environment: {'_PUB_TEST_SDK_VERSION': '3.7.0'},
- error: contains('Invalid description in the "myapp" pubspec '
- 'on the "foo" dependency: Unknown key "unknown" in description.'),
- exitCode: DATA,
- );
- });
+ );
+ server.serve('bar', '1.0.0');
+ await d
+ .appDir(
+ pubspec: {
+ 'environment': {'sdk': '^3.7.0'},
+ },
+ dependencies: {
+ 'foo': {
+ 'hosted': {'url': server.url, 'unknown': 11},
+ 'version': '^1.0.0',
+ },
+ },
+ )
+ .create();
+ await pubGet(
+ environment: {'_PUB_TEST_SDK_VERSION': '3.7.0'},
+ error: contains(
+ 'Invalid description in the "myapp" pubspec '
+ 'on the "foo" dependency: Unknown key "unknown" in description.',
+ ),
+ exitCode: DATA,
+ );
+ },
+ );
- test('Detects unknown attributes in descriptions in dependency after 3.7',
- () async {
- final server = await servePackages();
- server.serve(
- 'foo',
- '1.0.0',
- sdk: '^3.7.0',
- deps: {
- 'bar': {
- 'hosted': {'url': server.url, 'unknown': 11},
+ test(
+ 'Detects unknown attributes in descriptions in dependency after 3.7',
+ () async {
+ final server = await servePackages();
+ server.serve(
+ 'foo',
+ '1.0.0',
+ sdk: '^3.7.0',
+ deps: {
+ 'bar': {
+ 'hosted': {'url': server.url, 'unknown': 11},
+ },
},
- },
- );
- server.serve('bar', '1.0.0');
- await d.appDir(
- pubspec: {
- 'environment': {'sdk': '^3.6.0'},
- },
- dependencies: {
- 'foo': {
- 'hosted': {'url': server.url, 'unknown': 11},
- 'version': '^1.0.0',
- },
- },
- ).create();
- await pubGet(
- environment: {'_PUB_TEST_SDK_VERSION': '3.7.0'},
- error: contains('Invalid description in the "foo" pubspec '
- 'on the "bar" dependency: Unknown key "unknown" in description.'),
- exitCode: DATA,
- );
- });
+ );
+ server.serve('bar', '1.0.0');
+ await d
+ .appDir(
+ pubspec: {
+ 'environment': {'sdk': '^3.6.0'},
+ },
+ dependencies: {
+ 'foo': {
+ 'hosted': {'url': server.url, 'unknown': 11},
+ 'version': '^1.0.0',
+ },
+ },
+ )
+ .create();
+ await pubGet(
+ environment: {'_PUB_TEST_SDK_VERSION': '3.7.0'},
+ error: contains(
+ 'Invalid description in the "foo" pubspec '
+ 'on the "bar" dependency: Unknown key "unknown" in description.',
+ ),
+ exitCode: DATA,
+ );
+ },
+ );
}
diff --git a/test/unknown_source_test.dart b/test/unknown_source_test.dart
index a3e4485..62b9df5 100644
--- a/test/unknown_source_test.dart
+++ b/test/unknown_source_test.dart
@@ -12,11 +12,13 @@
void main() {
forBothPubGetAndUpgrade((command) {
test('fails gracefully on a dependency from an unknown source', () async {
- await d.appDir(
- dependencies: {
- 'foo': {'bad': 'foo'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'bad': 'foo'},
+ },
+ )
+ .create();
await pubCommand(
command,
@@ -27,8 +29,7 @@
);
});
- test(
- 'fails gracefully on transitive dependency from an unknown '
+ test('fails gracefully on transitive dependency from an unknown '
'source', () async {
await d.dir('foo', [
d.libDir('foo', 'foo 0.0.1'),
@@ -41,11 +42,13 @@
),
]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'path': '../foo'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'path': '../foo'},
+ },
+ )
+ .create();
await pubCommand(
command,
@@ -58,8 +61,10 @@
});
test('ignores unknown source in lockfile', () async {
- await d
- .dir('foo', [d.libDir('foo'), d.libPubspec('foo', '0.0.1')]).create();
+ await d.dir('foo', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '0.0.1'),
+ ]).create();
// Depend on "foo" from a valid source.
await d.dir(appPath, [
diff --git a/test/unpack_test.dart b/test/unpack_test.dart
index 56fa6a0..c5f5528 100644
--- a/test/unpack_test.dart
+++ b/test/unpack_test.dart
@@ -29,17 +29,17 @@
await runPub(
args: ['unpack', 'foo:1.0'],
- error: 'Error on line 1, column 1 of descriptor: '
+ error:
+ 'Error on line 1, column 1 of descriptor: '
'A dependency specification must be a string or a mapping.',
exitCode: DATA,
);
+ await runPub(args: ['unpack', 'foo']);
await runPub(
args: ['unpack', 'foo'],
- );
- await runPub(
- args: ['unpack', 'foo'],
- error: 'Target directory `.${s}foo-1.2.3` already exists. '
+ error:
+ 'Target directory `.${s}foo-1.2.3` already exists. '
'Use --force to overwrite.',
exitCode: 1,
);
@@ -76,20 +76,20 @@
Resolving dependencies in `.${s}foo-1.2.3`...
'''),
contains('To explore type: cd .${s}foo-1.2.3'),
- contains(
- 'To explore the example type: cd .${s}foo-1.2.3${s}example',
- ),
+ contains('To explore the example type: cd .${s}foo-1.2.3${s}example'),
),
);
expect(
- File(p.join(d.sandbox, appPath, 'foo-1.2.3', 'pubspec.yaml'))
- .existsSync(),
+ File(
+ p.join(d.sandbox, appPath, 'foo-1.2.3', 'pubspec.yaml'),
+ ).existsSync(),
isTrue,
);
expect(
- File(p.join(d.sandbox, appPath, 'foo-1.2.3', 'example', 'pubspec.yaml'))
- .existsSync(),
+ File(
+ p.join(d.sandbox, appPath, 'foo-1.2.3', 'example', 'pubspec.yaml'),
+ ).existsSync(),
isTrue,
);
@@ -155,9 +155,7 @@
args: ['unpack', 'foo:1.0.0'],
output: allOf(
contains('Downloading foo 1.0.0 to `.${s}foo-1.0.0`...'),
- contains(
- '+ bar',
- ),
+ contains('+ bar'),
),
environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
);
diff --git a/test/upgrade/example_warns_about_major_versions_test.dart b/test/upgrade/example_warns_about_major_versions_test.dart
index 9a487b6..e774fc1 100644
--- a/test/upgrade/example_warns_about_major_versions_test.dart
+++ b/test/upgrade/example_warns_about_major_versions_test.dart
@@ -8,94 +8,96 @@
void main() {
testWithGolden(
- 'pub upgrade --major-versions does not update major versions in example/',
- (ctx) async {
- await servePackages()
- ..serve('foo', '1.0.0')
- ..serve('foo', '2.0.0')
- ..serve('bar', '1.0.0')
- ..serve('bar', '2.0.0');
- await d.dir(appPath, [
- d.pubspec({
- 'name': 'myapp',
- 'dev_dependencies': {'bar': '^1.0.0'},
- }),
- d.dir('example', [
+ 'pub upgrade --major-versions does not update major versions in example/',
+ (ctx) async {
+ await servePackages()
+ ..serve('foo', '1.0.0')
+ ..serve('foo', '2.0.0')
+ ..serve('bar', '1.0.0')
+ ..serve('bar', '2.0.0');
+ await d.dir(appPath, [
d.pubspec({
- 'name': 'app_example',
- 'dependencies': {
- 'bar': 'any',
- 'foo': '^1.0.0',
- 'myapp': {'path': '..'},
- },
+ 'name': 'myapp',
+ 'dev_dependencies': {'bar': '^1.0.0'},
}),
- ]),
- ]).create();
+ d.dir('example', [
+ d.pubspec({
+ 'name': 'app_example',
+ 'dependencies': {
+ 'bar': 'any',
+ 'foo': '^1.0.0',
+ 'myapp': {'path': '..'},
+ },
+ }),
+ ]),
+ ]).create();
- await ctx.run(['upgrade', '--major-versions', '--example']);
- await ctx.run(['upgrade', '--major-versions', '--directory', 'example']);
- });
+ await ctx.run(['upgrade', '--major-versions', '--example']);
+ await ctx.run(['upgrade', '--major-versions', '--directory', 'example']);
+ },
+ );
testWithGolden(
- 'pub upgrade --null-safety does not update null-safety of dependencies in example/',
- (ctx) async {
- await servePackages()
- ..serve(
- 'foo',
- '1.0.0',
- pubspec: {
- 'environment': {'sdk': '>=2.7.0 <3.0.0'},
- },
- )
- ..serve(
- 'foo',
- '2.0.0',
- pubspec: {
- 'environment': {'sdk': '>=2.12.0 <3.0.0'},
- },
- )
- ..serve(
- 'bar',
- '1.0.0',
- pubspec: {
- 'environment': {'sdk': '>=2.7.0 <3.0.0'},
- },
- )
- ..serve(
- 'bar',
- '2.0.0',
- pubspec: {
- 'environment': {'sdk': '>=2.12.0 <3.0.0'},
- },
- );
- await d.dir(appPath, [
- d.pubspec({
- 'name': 'myapp',
- 'dev_dependencies': {'bar': '^1.0.0'},
- 'environment': {'sdk': '>=2.12.0 <3.0.0'},
- }),
- d.dir('example', [
- d.pubspec({
- 'name': 'app_example',
- 'environment': {'sdk': '>=2.12.0 <3.0.0'},
- 'dependencies': {
- 'foo': '^1.0.0',
- // This will make the implicit upgrade of the example folder fail:
- 'bar': '^1.0.0',
- 'myapp': {'path': '..'},
+ 'pub upgrade --null-safety does not update null-safety of dependencies in example/',
+ (ctx) async {
+ await servePackages()
+ ..serve(
+ 'foo',
+ '1.0.0',
+ pubspec: {
+ 'environment': {'sdk': '>=2.7.0 <3.0.0'},
},
+ )
+ ..serve(
+ 'foo',
+ '2.0.0',
+ pubspec: {
+ 'environment': {'sdk': '>=2.12.0 <3.0.0'},
+ },
+ )
+ ..serve(
+ 'bar',
+ '1.0.0',
+ pubspec: {
+ 'environment': {'sdk': '>=2.7.0 <3.0.0'},
+ },
+ )
+ ..serve(
+ 'bar',
+ '2.0.0',
+ pubspec: {
+ 'environment': {'sdk': '>=2.12.0 <3.0.0'},
+ },
+ );
+ await d.dir(appPath, [
+ d.pubspec({
+ 'name': 'myapp',
+ 'dev_dependencies': {'bar': '^1.0.0'},
+ 'environment': {'sdk': '>=2.12.0 <3.0.0'},
}),
- ]),
- ]).create();
+ d.dir('example', [
+ d.pubspec({
+ 'name': 'app_example',
+ 'environment': {'sdk': '>=2.12.0 <3.0.0'},
+ 'dependencies': {
+ 'foo': '^1.0.0',
+ // This will make the implicit upgrade of the example folder fail:
+ 'bar': '^1.0.0',
+ 'myapp': {'path': '..'},
+ },
+ }),
+ ]),
+ ]).create();
- await ctx.run(
- ['upgrade', '--null-safety', '--example'],
- environment: {'_PUB_TEST_SDK_VERSION': '2.13.0'},
- );
+ await ctx.run(
+ ['upgrade', '--null-safety', '--example'],
+ environment: {'_PUB_TEST_SDK_VERSION': '2.13.0'},
+ );
- await ctx.run(
- ['upgrade', '--null-safety', '--directory', 'example'],
- environment: {'_PUB_TEST_SDK_VERSION': '2.13.0'},
- );
- });
+ await ctx.run(
+ ['upgrade', '--null-safety', '--directory', 'example'],
+ environment: {'_PUB_TEST_SDK_VERSION': '2.13.0'},
+ );
+ },
+ );
}
diff --git a/test/upgrade/git/do_not_upgrade_if_unneeded_test.dart b/test/upgrade/git/do_not_upgrade_if_unneeded_test.dart
index 37df96f..d53b21e 100644
--- a/test/upgrade/git/do_not_upgrade_if_unneeded_test.dart
+++ b/test/upgrade/git/do_not_upgrade_if_unneeded_test.dart
@@ -9,8 +9,7 @@
import '../../test_pub.dart';
void main() {
- test(
- "doesn't upgrade one locked Git package's dependencies if it's "
+ test("doesn't upgrade one locked Git package's dependencies if it's "
'not necessary', () async {
ensureGit();
@@ -21,24 +20,27 @@
'1.0.0',
deps: {
'foo_dep': {
- 'git': p
- .toUri(p.absolute(d.sandbox, appPath, '../foo_dep.git'))
- .toString(),
+ 'git':
+ p
+ .toUri(p.absolute(d.sandbox, appPath, '../foo_dep.git'))
+ .toString(),
},
},
),
]).create();
- await d.git(
- 'foo_dep.git',
- [d.libDir('foo_dep'), d.libPubspec('foo_dep', '1.0.0')],
- ).create();
+ await d.git('foo_dep.git', [
+ d.libDir('foo_dep'),
+ d.libPubspec('foo_dep', '1.0.0'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .create();
await pubGet();
@@ -62,9 +64,10 @@
'1.0.0',
deps: {
'foo_dep': {
- 'git': p
- .toUri(p.absolute(d.sandbox, appPath, '../foo_dep.git'))
- .toString(),
+ 'git':
+ p
+ .toUri(p.absolute(d.sandbox, appPath, '../foo_dep.git'))
+ .toString(),
},
},
),
@@ -78,9 +81,7 @@
await pubUpgrade(args: ['foo']);
await d.dir(cachePath, [
- d.dir('git', [
- d.gitPackageRevisionCacheDir('foo', modifier: 2),
- ]),
+ d.dir('git', [d.gitPackageRevisionCacheDir('foo', modifier: 2)]),
]).validate();
expect(packageSpec('foo_dep'), originalFooDepSpec);
diff --git a/test/upgrade/git/upgrade_locked_test.dart b/test/upgrade/git/upgrade_locked_test.dart
index f528a2e..dcc78a3 100644
--- a/test/upgrade/git/upgrade_locked_test.dart
+++ b/test/upgrade/git/upgrade_locked_test.dart
@@ -11,31 +11,33 @@
test('upgrades locked Git packages', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
- await d.git(
- 'bar.git',
- [d.libDir('bar'), d.libPubspec('bar', '1.0.0')],
- ).create();
+ await d.git('bar.git', [
+ d.libDir('bar'),
+ d.libPubspec('bar', '1.0.0'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- 'bar': {'git': '../bar.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ 'bar': {'git': '../bar.git'},
+ },
+ )
+ .create();
await pubGet();
await d.dir(cachePath, [
d.dir('git', [
- d.dir(
- 'cache',
- [d.gitPackageRepoCacheDir('foo'), d.gitPackageRepoCacheDir('bar')],
- ),
+ d.dir('cache', [
+ d.gitPackageRepoCacheDir('foo'),
+ d.gitPackageRepoCacheDir('bar'),
+ ]),
d.gitPackageRevisionCacheDir('foo'),
d.gitPackageRevisionCacheDir('bar'),
]),
@@ -44,15 +46,15 @@
final originalFooSpec = packageSpec('foo');
final originalBarSpec = packageSpec('bar');
- await d.git(
- 'foo.git',
- [d.libDir('foo', 'foo 2'), d.libPubspec('foo', '1.0.0')],
- ).commit();
+ await d.git('foo.git', [
+ d.libDir('foo', 'foo 2'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).commit();
- await d.git(
- 'bar.git',
- [d.libDir('bar', 'bar 2'), d.libPubspec('bar', '1.0.0')],
- ).commit();
+ await d.git('bar.git', [
+ d.libDir('bar', 'bar 2'),
+ d.libPubspec('bar', '1.0.0'),
+ ]).commit();
await pubUpgrade();
diff --git a/test/upgrade/git/upgrade_one_locked_test.dart b/test/upgrade/git/upgrade_one_locked_test.dart
index 089328c..bd71fe9 100644
--- a/test/upgrade/git/upgrade_one_locked_test.dart
+++ b/test/upgrade/git/upgrade_one_locked_test.dart
@@ -11,31 +11,33 @@
test('upgrades one locked Git package but no others', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
- await d.git(
- 'bar.git',
- [d.libDir('bar'), d.libPubspec('bar', '1.0.0')],
- ).create();
+ await d.git('bar.git', [
+ d.libDir('bar'),
+ d.libPubspec('bar', '1.0.0'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- 'bar': {'git': '../bar.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ 'bar': {'git': '../bar.git'},
+ },
+ )
+ .create();
await pubGet();
await d.dir(cachePath, [
d.dir('git', [
- d.dir(
- 'cache',
- [d.gitPackageRepoCacheDir('foo'), d.gitPackageRepoCacheDir('bar')],
- ),
+ d.dir('cache', [
+ d.gitPackageRepoCacheDir('foo'),
+ d.gitPackageRepoCacheDir('bar'),
+ ]),
d.gitPackageRevisionCacheDir('foo'),
d.gitPackageRevisionCacheDir('bar'),
]),
@@ -43,22 +45,20 @@
final originalBarSpec = packageSpec('bar');
- await d.git(
- 'foo.git',
- [d.libDir('foo', 'foo 2'), d.libPubspec('foo', '1.0.0')],
- ).commit();
+ await d.git('foo.git', [
+ d.libDir('foo', 'foo 2'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).commit();
- await d.git(
- 'bar.git',
- [d.libDir('bar', 'bar 2'), d.libPubspec('bar', '1.0.0')],
- ).commit();
+ await d.git('bar.git', [
+ d.libDir('bar', 'bar 2'),
+ d.libPubspec('bar', '1.0.0'),
+ ]).commit();
await pubUpgrade(args: ['foo']);
await d.dir(cachePath, [
- d.dir('git', [
- d.gitPackageRevisionCacheDir('foo', modifier: 2),
- ]),
+ d.dir('git', [d.gitPackageRevisionCacheDir('foo', modifier: 2)]),
]).validate();
expect(packageSpec('bar'), originalBarSpec);
diff --git a/test/upgrade/git/upgrade_to_incompatible_pubspec_test.dart b/test/upgrade/git/upgrade_to_incompatible_pubspec_test.dart
index 10a6605..1e78ea9 100644
--- a/test/upgrade/git/upgrade_to_incompatible_pubspec_test.dart
+++ b/test/upgrade/git/upgrade_to_incompatible_pubspec_test.dart
@@ -12,34 +12,34 @@
test('upgrades Git packages to an incompatible pubspec', () async {
ensureGit();
- await d.git(
- 'foo.git',
- [d.libDir('foo'), d.libPubspec('foo', '1.0.0')],
- ).create();
+ await d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]).create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .create();
await pubGet();
await d.dir(cachePath, [
d.dir('git', [
- d.dir('cache', [
- d.gitPackageRepoCacheDir('foo'),
- ]),
+ d.dir('cache', [d.gitPackageRepoCacheDir('foo')]),
d.gitPackageRevisionCacheDir('foo'),
]),
]).validate();
final originalFooSpec = packageSpec('foo');
- await d.git(
- 'foo.git',
- [d.libDir('zoo'), d.libPubspec('zoo', '1.0.0')],
- ).commit();
+ await d.git('foo.git', [
+ d.libDir('zoo'),
+ d.libPubspec('zoo', '1.0.0'),
+ ]).commit();
await pubUpgrade(
error: contains('"name" field doesn\'t match expected name "foo".'),
diff --git a/test/upgrade/git/upgrade_to_nonexistent_pubspec_test.dart b/test/upgrade/git/upgrade_to_nonexistent_pubspec_test.dart
index 82f7ebf..815742a 100644
--- a/test/upgrade/git/upgrade_to_nonexistent_pubspec_test.dart
+++ b/test/upgrade/git/upgrade_to_nonexistent_pubspec_test.dart
@@ -11,15 +11,19 @@
test('upgrades Git packages to a nonexistent pubspec', () async {
ensureGit();
- final repo =
- d.git('foo.git', [d.libDir('foo'), d.libPubspec('foo', '1.0.0')]);
+ final repo = d.git('foo.git', [
+ d.libDir('foo'),
+ d.libPubspec('foo', '1.0.0'),
+ ]);
await repo.create();
- await d.appDir(
- dependencies: {
- 'foo': {'git': '../foo.git'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'git': '../foo.git'},
+ },
+ )
+ .create();
await pubGet();
@@ -29,8 +33,10 @@
await repo.runGit(['commit', '-m', 'delete']);
await pubUpgrade(
- error: RegExp(r'Could not find a file named "pubspec.yaml" '
- r'in [^\n]*\.'),
+ error: RegExp(
+ r'Could not find a file named "pubspec.yaml" '
+ r'in [^\n]*\.',
+ ),
);
expect(packageSpec('foo'), originalFooSpec);
diff --git a/test/upgrade/hosted/unlock_if_necessary_test.dart b/test/upgrade/hosted/unlock_if_necessary_test.dart
index 37ed5ec..37098ce 100644
--- a/test/upgrade/hosted/unlock_if_necessary_test.dart
+++ b/test/upgrade/hosted/unlock_if_necessary_test.dart
@@ -8,8 +8,7 @@
import '../../test_pub.dart';
void main() {
- test(
- "upgrades one locked pub server package's dependencies if it's "
+ test("upgrades one locked pub server package's dependencies if it's "
'necessary', () async {
final server = await servePackages();
diff --git a/test/upgrade/hosted/warn_about_discontinued_test.dart b/test/upgrade/hosted/warn_about_discontinued_test.dart
index 46a06d0..e3aa5ed 100644
--- a/test/upgrade/hosted/warn_about_discontinued_test.dart
+++ b/test/upgrade/hosted/warn_about_discontinued_test.dart
@@ -9,9 +9,10 @@
void main() {
test('Warns about discontinued dependencies', () async {
- final server = await servePackages()
- ..serve('foo', '1.2.3', deps: {'transitive': 'any'})
- ..serve('transitive', '1.0.0');
+ final server =
+ await servePackages()
+ ..serve('foo', '1.2.3', deps: {'transitive': 'any'})
+ ..serve('transitive', '1.0.0');
await d.appDir(dependencies: {'foo': '1.2.3'}).create();
await pubGet();
@@ -42,9 +43,10 @@
});
test('Warns about discontinued dev_dependencies', () async {
- final server = await servePackages()
- ..serve('foo', '1.2.3', deps: {'transitive': 'any'})
- ..serve('transitive', '1.0.0');
+ final server =
+ await servePackages()
+ ..serve('foo', '1.2.3', deps: {'transitive': 'any'})
+ ..serve('transitive', '1.0.0');
await d.dir(appPath, [
d.file('pubspec.yaml', '''
diff --git a/test/upgrade/report/describes_change_test.dart b/test/upgrade/report/describes_change_test.dart
index 97c6e5b..1798787 100644
--- a/test/upgrade/report/describes_change_test.dart
+++ b/test/upgrade/report/describes_change_test.dart
@@ -28,9 +28,7 @@
);
// Try without --dry-run
- await pubUpgrade(
- output: contains('1 package is discontinued.'),
- );
+ await pubUpgrade(output: contains('1 package is discontinued.'));
});
test('shows how package changed from previous lockfile', () async {
@@ -62,17 +60,19 @@
]).create();
// Create the first lockfile.
- await d.appDir(
- dependencies: {
- 'unchanged': 'any',
- 'contents_changed': '1.0.0',
- 'version_upgraded': '1.0.0',
- 'version_downgraded': '2.0.0',
- 'source_changed': 'any',
- 'package_removed': 'any',
- 'description_changed': {'path': '../description_changed_1'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'unchanged': 'any',
+ 'contents_changed': '1.0.0',
+ 'version_upgraded': '1.0.0',
+ 'version_downgraded': '2.0.0',
+ 'source_changed': 'any',
+ 'package_removed': 'any',
+ 'description_changed': {'path': '../description_changed_1'},
+ },
+ )
+ .create();
await pubGet();
server.serve(
@@ -82,17 +82,19 @@
);
// Change the pubspec.
- await d.appDir(
- dependencies: {
- 'unchanged': 'any',
- 'version_upgraded': 'any',
- 'version_downgraded': '1.0.0',
- 'source_changed': {'path': '../source_changed'},
- 'package_added': 'any',
- 'description_changed': {'path': '../description_changed_2'},
- 'contents_changed': '1.0.0',
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'unchanged': 'any',
+ 'version_upgraded': 'any',
+ 'version_downgraded': '1.0.0',
+ 'source_changed': {'path': '../source_changed'},
+ 'package_added': 'any',
+ 'description_changed': {'path': '../description_changed_2'},
+ 'contents_changed': '1.0.0',
+ },
+ )
+ .create();
// Upgrade everything.
await pubUpgrade(
diff --git a/test/upgrade/report/does_not_show_newer_versions_for_locked_packages_test.dart b/test/upgrade/report/does_not_show_newer_versions_for_locked_packages_test.dart
index 16b7419..8bcae62 100644
--- a/test/upgrade/report/does_not_show_newer_versions_for_locked_packages_test.dart
+++ b/test/upgrade/report/does_not_show_newer_versions_for_locked_packages_test.dart
@@ -8,8 +8,7 @@
import '../../test_pub.dart';
void main() {
- test(
- 'Shows newer versions available for packages that are locked '
+ test('Shows newer versions available for packages that are locked '
'and not being upgraded', () async {
await servePackages()
..serve('not_upgraded', '1.0.0')
@@ -20,29 +19,26 @@
..serve('upgraded', '3.0.0-dev');
// Constraint everything to the first version.
- await d.appDir(
- dependencies: {'not_upgraded': '1.0.0', 'upgraded': '1.0.0'},
- ).create();
+ await d
+ .appDir(dependencies: {'not_upgraded': '1.0.0', 'upgraded': '1.0.0'})
+ .create();
await pubGet();
// Loosen the constraints.
- await d.appDir(
- dependencies: {'not_upgraded': 'any', 'upgraded': 'any'},
- ).create();
+ await d
+ .appDir(dependencies: {'not_upgraded': 'any', 'upgraded': 'any'})
+ .create();
// Only upgrade "upgraded".
await pubUpgrade(
args: ['upgraded'],
- output: RegExp(
- r'''
+ output: RegExp(r'''
Resolving dependencies\.\.\..*
Downloading packages\.\.\..*
not_upgraded 1\.0\.0 \(2\.0\.0 available\)
. upgraded 2\.0\.0 \(was 1\.0\.0\)
-''',
- multiLine: true,
- ),
+''', multiLine: true),
environment: {'PUB_ALLOW_PRERELEASE_SDK': 'false'},
);
});
diff --git a/test/upgrade/report/highlights_overrides_test.dart b/test/upgrade/report/highlights_overrides_test.dart
index 617f9e3..404da58 100644
--- a/test/upgrade/report/highlights_overrides_test.dart
+++ b/test/upgrade/report/highlights_overrides_test.dart
@@ -21,14 +21,11 @@
// Upgrade everything.
await pubUpgrade(
- output: RegExp(
- r'''
+ output: RegExp(r'''
Resolving dependencies\.\.\..*
Downloading packages\.\.\..*
! overridden 1\.0\.0 \(overridden\)
-''',
- multiLine: true,
- ),
+''', multiLine: true),
);
});
}
diff --git a/test/upgrade/report/leading_character_shows_change_test.dart b/test/upgrade/report/leading_character_shows_change_test.dart
index 73d6d5c..52b6f9e 100644
--- a/test/upgrade/report/leading_character_shows_change_test.dart
+++ b/test/upgrade/report/leading_character_shows_change_test.dart
@@ -71,8 +71,7 @@
// Upgrade everything.
await pubUpgrade(
- output: RegExp(
- r'''
+ output: RegExp(r'''
Resolving dependencies\.\.\..*
Downloading packages\.\.\..*
\+ added .*
@@ -83,9 +82,7 @@
> upgraded .*
These packages are no longer being depended on:
- removed .*
-''',
- multiLine: true,
- ),
+''', multiLine: true),
environment: {'PUB_ALLOW_PRERELEASE_SDK': 'false'},
);
});
diff --git a/test/upgrade/report/shows_newer_available_versions_test.dart b/test/upgrade/report/shows_newer_available_versions_test.dart
index e3befce..cb17c15 100644
--- a/test/upgrade/report/shows_newer_available_versions_test.dart
+++ b/test/upgrade/report/shows_newer_available_versions_test.dart
@@ -31,23 +31,24 @@
..serve('one_newer_stable', '1.0.1');
// Constraint everything to the first version.
- await d.appDir(
- dependencies: {
- 'multiple_newer': '1.0.0',
- 'multiple_newer_stable': '1.0.0',
- 'multiple_newer_unstable': '1.0.0',
- 'multiple_newer_unstable2': '1.0.1-unstable.1',
- 'no_newer': '1.0.0',
- 'one_newer_unstable': '1.0.0',
- 'one_newer_unstable2': '1.0.1-unstable.1',
- 'one_newer_stable': '1.0.0',
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'multiple_newer': '1.0.0',
+ 'multiple_newer_stable': '1.0.0',
+ 'multiple_newer_unstable': '1.0.0',
+ 'multiple_newer_unstable2': '1.0.1-unstable.1',
+ 'no_newer': '1.0.0',
+ 'one_newer_unstable': '1.0.0',
+ 'one_newer_unstable2': '1.0.1-unstable.1',
+ 'one_newer_stable': '1.0.0',
+ },
+ )
+ .create();
// Upgrade everything.
await pubUpgrade(
- output: RegExp(
- r'''
+ output: RegExp(r'''
Resolving dependencies\.\.\..*
Downloading packages\.\.\..*
. multiple_newer 1\.0\.0 \(1\.0\.1 available\)
@@ -58,9 +59,7 @@
. one_newer_stable 1\.0\.0 \(1\.0\.1 available\)
. one_newer_unstable 1\.0\.0
. one_newer_unstable2 1\.0\.1-unstable\.1 \(1\.0\.1-unstable\.2 available\)
-''',
- multiLine: true,
- ),
+''', multiLine: true),
environment: {'PUB_ALLOW_PRERELEASE_SDK': 'false'},
);
});
diff --git a/test/upgrade/report/shows_number_of_changed_dependencies_test.dart b/test/upgrade/report/shows_number_of_changed_dependencies_test.dart
index 784fc83..7f2ec0f 100644
--- a/test/upgrade/report/shows_number_of_changed_dependencies_test.dart
+++ b/test/upgrade/report/shows_number_of_changed_dependencies_test.dart
@@ -8,8 +8,7 @@
import '../../test_pub.dart';
void main() {
- test(
- 'does not show how many newer versions are available for '
+ test('does not show how many newer versions are available for '
'packages that are locked and not being upgraded', () async {
await servePackages()
..serve('a', '1.0.0')
diff --git a/test/upgrade/report/shows_pub_outdated_test.dart b/test/upgrade/report/shows_pub_outdated_test.dart
index cddcda2..6abf571 100644
--- a/test/upgrade/report/shows_pub_outdated_test.dart
+++ b/test/upgrade/report/shows_pub_outdated_test.dart
@@ -28,102 +28,95 @@
..serve('one_newer_stable', '1.0.1');
// Constraint everything to the first version.
- await d.appDir(
- dependencies: {
- 'multiple_newer': '1.0.0',
- 'multiple_newer_stable': '1.0.0',
- 'multiple_newer_unstable': '1.0.0',
- 'no_newer': '1.0.0',
- 'one_newer_unstable': '1.0.0',
- 'one_newer_stable': '1.0.0',
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'multiple_newer': '1.0.0',
+ 'multiple_newer_stable': '1.0.0',
+ 'multiple_newer_unstable': '1.0.0',
+ 'no_newer': '1.0.0',
+ 'one_newer_unstable': '1.0.0',
+ 'one_newer_stable': '1.0.0',
+ },
+ )
+ .create();
// Upgrade everything.
await pubUpgrade(
- output: RegExp(
- r'''
+ output: RegExp(r'''
3 packages have newer versions incompatible with dependency constraints.
-Try `dart pub outdated` for more information.$''',
- multiLine: true,
- ),
+Try `dart pub outdated` for more information.$''', multiLine: true),
);
// Running inside Flutter this will recommend the Flutter variant.
await pubUpgrade(
environment: {'PUB_ENVIRONMENT': 'flutter_cli:get'},
- output: RegExp(
- r'''
+ output: RegExp(r'''
3 packages have newer versions incompatible with dependency constraints.
-Try `flutter pub outdated` for more information.$''',
- multiLine: true,
- ),
+Try `flutter pub outdated` for more information.$''', multiLine: true),
);
// Upgrade `multiple_newer` to `1.0.1`.
- await d.appDir(
- dependencies: {
- 'multiple_newer': '1.0.1',
- 'multiple_newer_stable': '1.0.0',
- 'multiple_newer_unstable': '1.0.0',
- 'no_newer': '1.0.0',
- 'one_newer_unstable': '1.0.0',
- 'one_newer_stable': '1.0.0',
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'multiple_newer': '1.0.1',
+ 'multiple_newer_stable': '1.0.0',
+ 'multiple_newer_unstable': '1.0.0',
+ 'no_newer': '1.0.0',
+ 'one_newer_unstable': '1.0.0',
+ 'one_newer_stable': '1.0.0',
+ },
+ )
+ .create();
// Upgrade everything.
await pubUpgrade(
- output: RegExp(
- r'''
+ output: RegExp(r'''
2 packages have newer versions incompatible with dependency constraints.
-Try `dart pub outdated` for more information.$''',
- multiLine: true,
- ),
+Try `dart pub outdated` for more information.$''', multiLine: true),
);
// Upgrade `multiple_newer` to `1.0.2-unstable.1`.
- await d.appDir(
- dependencies: {
- 'multiple_newer': '1.0.2-unstable.1',
- 'multiple_newer_stable': '1.0.0',
- 'multiple_newer_unstable': '1.0.0',
- 'no_newer': '1.0.0',
- 'one_newer_unstable': '1.0.0',
- 'one_newer_stable': '1.0.0',
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'multiple_newer': '1.0.2-unstable.1',
+ 'multiple_newer_stable': '1.0.0',
+ 'multiple_newer_unstable': '1.0.0',
+ 'no_newer': '1.0.0',
+ 'one_newer_unstable': '1.0.0',
+ 'one_newer_stable': '1.0.0',
+ },
+ )
+ .create();
// Upgrade everything.
await pubUpgrade(
- output: RegExp(
- r'''
+ output: RegExp(r'''
3 packages have newer versions incompatible with dependency constraints.
-Try `dart pub outdated` for more information.$''',
- multiLine: true,
- ),
+Try `dart pub outdated` for more information.$''', multiLine: true),
);
// Upgrade all except `one_newer_stable`.
- await d.appDir(
- dependencies: {
- 'multiple_newer': '1.0.2-unstable.2',
- 'multiple_newer_stable': '1.0.2',
- 'multiple_newer_unstable': '1.0.1-unstable.2',
- 'no_newer': '1.0.0',
- 'one_newer_unstable': '1.0.1-unstable.1',
- 'one_newer_stable': '1.0.0',
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'multiple_newer': '1.0.2-unstable.2',
+ 'multiple_newer_stable': '1.0.2',
+ 'multiple_newer_unstable': '1.0.1-unstable.2',
+ 'no_newer': '1.0.0',
+ 'one_newer_unstable': '1.0.1-unstable.1',
+ 'one_newer_stable': '1.0.0',
+ },
+ )
+ .create();
// Upgrade everything.
await pubUpgrade(
- output: RegExp(
- r'''
+ output: RegExp(r'''
1 package has newer versions incompatible with dependency constraints.
-Try `dart pub outdated` for more information.$''',
- multiLine: true,
- ),
+Try `dart pub outdated` for more information.$''', multiLine: true),
);
});
}
diff --git a/test/upgrade/upgrade_major_versions_test.dart b/test/upgrade/upgrade_major_versions_test.dart
index c876c43..26cf68d 100644
--- a/test/upgrade/upgrade_major_versions_test.dart
+++ b/test/upgrade/upgrade_major_versions_test.dart
@@ -18,13 +18,11 @@
..serve('baz', '1.0.0')
..serve('baz', '1.0.1');
- await d.appDir(
- dependencies: {
- 'foo': '^1.0.0',
- 'bar': '^0.1.0',
- 'baz': '^1.0.0',
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {'foo': '^1.0.0', 'bar': '^0.1.0', 'baz': '^1.0.0'},
+ )
+ .create();
await pubGet();
@@ -38,13 +36,11 @@
]),
);
- await d.appDir(
- dependencies: {
- 'foo': '^2.0.0',
- 'bar': '^0.2.0',
- 'baz': '^1.0.0',
- },
- ).validate();
+ await d
+ .appDir(
+ dependencies: {'foo': '^2.0.0', 'bar': '^0.2.0', 'baz': '^1.0.0'},
+ )
+ .validate();
await d.appPackageConfigFile([
d.packageConfigEntry(name: 'foo', version: '2.0.0'),
d.packageConfigEntry(name: 'bar', version: '0.2.0'),
@@ -103,17 +99,13 @@
});
test('upgrades only the selected package', () async {
- final server = await servePackages()
- ..serve('foo', '1.0.0')
- ..serve('foo', '2.0.0')
- ..serve('bar', '0.1.0');
+ final server =
+ await servePackages()
+ ..serve('foo', '1.0.0')
+ ..serve('foo', '2.0.0')
+ ..serve('bar', '0.1.0');
- await d.appDir(
- dependencies: {
- 'foo': '^1.0.0',
- 'bar': '^0.1.0',
- },
- ).create();
+ await d.appDir(dependencies: {'foo': '^1.0.0', 'bar': '^0.1.0'}).create();
await pubGet();
@@ -128,12 +120,14 @@
]),
);
- await d.appDir(
- dependencies: {
- 'foo': '^2.0.0', // bumped
- 'bar': '^0.1.0',
- },
- ).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': '^2.0.0', // bumped
+ 'bar': '^0.1.0',
+ },
+ )
+ .validate();
await d.appPackageConfigFile([
d.packageConfigEntry(name: 'foo', version: '2.0.0'),
@@ -185,14 +179,8 @@
d.pubspec({
'name': 'myapp',
'version': '1.0.1',
- 'dependencies': {
- 'foo': 'any',
- 'bar': 'any',
- },
- 'dependency_overrides': {
- 'foo': '1.0.0',
- 'bar': '1.0.0',
- },
+ 'dependencies': {'foo': 'any', 'bar': 'any'},
+ 'dependency_overrides': {'foo': '1.0.0', 'bar': '1.0.0'},
}),
]).create();
@@ -212,14 +200,8 @@
d.pubspec({
'name': 'myapp',
'version': '1.0.1',
- 'dependencies': {
- 'foo': 'any',
- 'bar': 'any',
- },
- 'dependency_overrides': {
- 'foo': '1.0.0',
- 'bar': '1.0.0',
- },
+ 'dependencies': {'foo': 'any', 'bar': 'any'},
+ 'dependency_overrides': {'foo': '1.0.0', 'bar': '1.0.0'},
}),
]).validate();
@@ -247,12 +229,7 @@
..serve('bar', '3.0.0')
..serve('bar', '4.0.0');
- await d.appDir(
- dependencies: {
- 'foo': '^1.0.0',
- 'bar': '^2.0.0',
- },
- ).create();
+ await d.appDir(dependencies: {'foo': '^1.0.0', 'bar': '^2.0.0'}).create();
await pubGet();
@@ -265,12 +242,9 @@
]),
);
- await d.appDir(
- dependencies: {
- 'foo': '^1.0.0',
- 'bar': '^4.0.0',
- },
- ).validate();
+ await d
+ .appDir(dependencies: {'foo': '^1.0.0', 'bar': '^4.0.0'})
+ .validate();
await d.appPackageConfigFile([
d.packageConfigEntry(name: 'foo', version: '1.0.0'),
@@ -283,12 +257,7 @@
..serve('foo', '1.0.0')
..serve('foo', '2.0.0');
await d.dir('bar', [d.libPubspec('bar', '1.0.0')]).create();
- await d.appDir(
- dependencies: {
- 'foo': '^1.0.0',
- 'bar': '^1.0.0',
- },
- ).create();
+ await d.appDir(dependencies: {'foo': '^1.0.0', 'bar': '^1.0.0'}).create();
await d.dir(appPath, [
d.pubspecOverrides({
'dependency_overrides': {
@@ -313,11 +282,13 @@
final alternativeServer = await startPackageServer();
alternativeServer.serve('foo', '1.0.0');
alternativeServer.serve('foo', '2.0.0');
- await d.appDir(
- dependencies: {
- 'foo': {'hosted': alternativeServer.url, 'version': '^1.0.0'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'hosted': alternativeServer.url, 'version': '^1.0.0'},
+ },
+ )
+ .create();
await pubGet();
@@ -328,11 +299,13 @@
contains('foo: ^1.0.0 -> ^2.0.0'),
]),
);
- await d.appDir(
- dependencies: {
- 'foo': {'hosted': alternativeServer.url, 'version': '^2.0.0'},
- },
- ).validate();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'hosted': alternativeServer.url, 'version': '^2.0.0'},
+ },
+ )
+ .validate();
});
});
}
diff --git a/test/upgrade/upgrade_tighten_test.dart b/test/upgrade/upgrade_tighten_test.dart
index f720bc1..9776be2 100644
--- a/test/upgrade/upgrade_tighten_test.dart
+++ b/test/upgrade/upgrade_tighten_test.dart
@@ -9,90 +9,90 @@
void main() {
group('pub upgrade --tighten', () {
- test('updates dependency constraints lower bounds and shows summary report',
- () async {
- final server = await servePackages();
-
- server.serve('foo', '1.0.0');
- server.serve('bar', '0.2.0');
- server.serve('baz', '0.2.0');
- server.serve('boo', '1.0.0');
-
- await d.dir('boom', [d.libPubspec('boom', '1.0.0')]).create();
- await d.dir('boom2', [d.libPubspec('boom2', '1.5.0')]).create();
-
- await d.appDir(
- dependencies: {
- 'foo': '^1.0.0',
- 'bar': '>=0.1.2 <3.0.0',
- 'baz': '0.2.0',
- 'boo': 'any',
- 'boom': {'path': '../boom'},
- 'boom2': {'path': '../boom2', 'version': '^1.0.0'},
- },
- ).create();
-
- await pubGet();
-
- server.serve('foo', '1.5.0');
- server.serve('bar', '1.5.0');
-
- await pubUpgrade(
- args: ['--tighten', '--dry-run'],
- output: allOf([
- contains('Would change 4 constraints in pubspec.yaml:'),
- contains('foo: ^1.0.0 -> ^1.5.0'),
- contains('bar: >=0.1.2 <3.0.0 -> >=1.5.0 <3.0.0'),
- contains('boo: any -> ^1.0.0'),
- contains('boom2: ^1.0.0 -> ^1.5.0'),
- ]),
- );
-
- await pubUpgrade(
- args: ['--tighten'],
- output: allOf([
- contains('Changed 4 constraints in pubspec.yaml:'),
- contains('foo: ^1.0.0 -> ^1.5.0'),
- contains('bar: >=0.1.2 <3.0.0 -> >=1.5.0 <3.0.0'),
- contains('boo: any -> ^1.0.0'),
- contains('boom2: ^1.0.0 -> ^1.5.0'),
- ]),
- );
-
- await d.appDir(
- dependencies: {
- 'foo': '^1.5.0',
- 'bar': '>=1.5.0 <3.0.0',
- 'baz': '0.2.0',
- 'boo': '^1.0.0',
- 'boom': {'path': '../boom'},
- 'boom2': {'path': '../boom2', 'version': '^1.5.0'},
- },
- ).validate();
- await d.appPackageConfigFile([
- d.packageConfigEntry(name: 'foo', version: '1.5.0'),
- d.packageConfigEntry(name: 'bar', version: '1.5.0'),
- d.packageConfigEntry(name: 'baz', version: '0.2.0'),
- d.packageConfigEntry(name: 'boo', version: '1.0.0'),
- d.packageConfigEntry(name: 'boom', path: '../boom'),
- d.packageConfigEntry(name: 'boom2', path: '../boom2'),
- ]).validate();
- });
-
test(
- '--major-versions updates dependency constraints lower bounds '
+ 'updates dependency constraints lower bounds and shows summary report',
+ () async {
+ final server = await servePackages();
+
+ server.serve('foo', '1.0.0');
+ server.serve('bar', '0.2.0');
+ server.serve('baz', '0.2.0');
+ server.serve('boo', '1.0.0');
+
+ await d.dir('boom', [d.libPubspec('boom', '1.0.0')]).create();
+ await d.dir('boom2', [d.libPubspec('boom2', '1.5.0')]).create();
+
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': '^1.0.0',
+ 'bar': '>=0.1.2 <3.0.0',
+ 'baz': '0.2.0',
+ 'boo': 'any',
+ 'boom': {'path': '../boom'},
+ 'boom2': {'path': '../boom2', 'version': '^1.0.0'},
+ },
+ )
+ .create();
+
+ await pubGet();
+
+ server.serve('foo', '1.5.0');
+ server.serve('bar', '1.5.0');
+
+ await pubUpgrade(
+ args: ['--tighten', '--dry-run'],
+ output: allOf([
+ contains('Would change 4 constraints in pubspec.yaml:'),
+ contains('foo: ^1.0.0 -> ^1.5.0'),
+ contains('bar: >=0.1.2 <3.0.0 -> >=1.5.0 <3.0.0'),
+ contains('boo: any -> ^1.0.0'),
+ contains('boom2: ^1.0.0 -> ^1.5.0'),
+ ]),
+ );
+
+ await pubUpgrade(
+ args: ['--tighten'],
+ output: allOf([
+ contains('Changed 4 constraints in pubspec.yaml:'),
+ contains('foo: ^1.0.0 -> ^1.5.0'),
+ contains('bar: >=0.1.2 <3.0.0 -> >=1.5.0 <3.0.0'),
+ contains('boo: any -> ^1.0.0'),
+ contains('boom2: ^1.0.0 -> ^1.5.0'),
+ ]),
+ );
+
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': '^1.5.0',
+ 'bar': '>=1.5.0 <3.0.0',
+ 'baz': '0.2.0',
+ 'boo': '^1.0.0',
+ 'boom': {'path': '../boom'},
+ 'boom2': {'path': '../boom2', 'version': '^1.5.0'},
+ },
+ )
+ .validate();
+ await d.appPackageConfigFile([
+ d.packageConfigEntry(name: 'foo', version: '1.5.0'),
+ d.packageConfigEntry(name: 'bar', version: '1.5.0'),
+ d.packageConfigEntry(name: 'baz', version: '0.2.0'),
+ d.packageConfigEntry(name: 'boo', version: '1.0.0'),
+ d.packageConfigEntry(name: 'boom', path: '../boom'),
+ d.packageConfigEntry(name: 'boom2', path: '../boom2'),
+ ]).validate();
+ },
+ );
+
+ test('--major-versions updates dependency constraints lower bounds '
'and shows summary report', () async {
final server = await servePackages();
server.serve('foo', '1.0.0');
server.serve('bar', '1.0.0');
- await d.appDir(
- dependencies: {
- 'foo': '^1.0.0',
- 'bar': '^1.0.0',
- },
- ).create();
+ await d.appDir(dependencies: {'foo': '^1.0.0', 'bar': '^1.0.0'}).create();
await pubGet();
@@ -108,12 +108,9 @@
]),
);
- await d.appDir(
- dependencies: {
- 'foo': '^2.0.0',
- 'bar': '^1.5.0',
- },
- ).validate();
+ await d
+ .appDir(dependencies: {'foo': '^2.0.0', 'bar': '^1.5.0'})
+ .validate();
await d.appPackageConfigFile([
d.packageConfigEntry(name: 'foo', version: '2.0.0'),
d.packageConfigEntry(name: 'bar', version: '1.5.0'),
@@ -126,12 +123,7 @@
server.serve('foo', '1.0.0');
server.serve('bar', '1.0.0');
- await d.appDir(
- dependencies: {
- 'foo': '^1.0.0',
- 'bar': '^1.0.0',
- },
- ).create();
+ await d.appDir(dependencies: {'foo': '^1.0.0', 'bar': '^1.0.0'}).create();
await pubGet();
@@ -146,12 +138,9 @@
]),
);
- await d.appDir(
- dependencies: {
- 'foo': '^1.5.0',
- 'bar': '^1.0.0',
- },
- ).validate();
+ await d
+ .appDir(dependencies: {'foo': '^1.5.0', 'bar': '^1.0.0'})
+ .validate();
await d.appPackageConfigFile([
d.packageConfigEntry(name: 'foo', version: '1.5.0'),
d.packageConfigEntry(name: 'bar', version: '1.0.0'),
diff --git a/test/upgrade/upgrade_transitive_test.dart b/test/upgrade/upgrade_transitive_test.dart
index 864972b..fe40382 100644
--- a/test/upgrade/upgrade_transitive_test.dart
+++ b/test/upgrade/upgrade_transitive_test.dart
@@ -8,31 +8,26 @@
import '../test_pub.dart';
void main() {
- test('without --unlock-transitive, the transitive dependencies stay locked',
- () async {
- final server = await servePackages();
- server.serve('foo', '1.0.0', deps: {'bar': '^1.0.0'});
- server.serve('bar', '1.0.0');
+ test(
+ 'without --unlock-transitive, the transitive dependencies stay locked',
+ () async {
+ final server = await servePackages();
+ server.serve('foo', '1.0.0', deps: {'bar': '^1.0.0'});
+ server.serve('bar', '1.0.0');
- await d.appDir(
- dependencies: {
- 'foo': '^1.0.0',
- },
- ).create();
+ await d.appDir(dependencies: {'foo': '^1.0.0'}).create();
- await pubGet(output: contains('+ foo 1.0.0'));
+ await pubGet(output: contains('+ foo 1.0.0'));
- server.serve('foo', '1.5.0', deps: {'bar': '^1.0.0'});
- server.serve('bar', '1.5.0');
+ server.serve('foo', '1.5.0', deps: {'bar': '^1.0.0'});
+ server.serve('bar', '1.5.0');
- await pubUpgrade(
- args: ['foo'],
- output: allOf(
- contains('> foo 1.5.0'),
- isNot(contains('> bar')),
- ),
- );
- });
+ await pubUpgrade(
+ args: ['foo'],
+ output: allOf(contains('> foo 1.5.0'), isNot(contains('> bar'))),
+ );
+ },
+ );
test('`--unlock-transitive` dependencies gets unlocked', () async {
final server = await servePackages();
@@ -40,12 +35,7 @@
server.serve('bar', '1.0.0');
server.serve('baz', '1.0.0');
- await d.appDir(
- dependencies: {
- 'foo': '^1.0.0',
- 'baz': '^1.0.0',
- },
- ).create();
+ await d.appDir(dependencies: {'foo': '^1.0.0', 'baz': '^1.0.0'}).create();
await pubGet(output: contains('+ foo 1.0.0'));
@@ -66,47 +56,35 @@
});
test(
- '`--major-versions` without `--unlock-transitive` does not allow '
- 'transitive dependencies to be upgraded along with the named packages',
- () async {
- final server = await servePackages();
- server.serve('foo', '1.0.0', deps: {'bar': '^1.0.0'});
- server.serve('bar', '1.0.0');
+ '`--major-versions` without `--unlock-transitive` does not allow '
+ 'transitive dependencies to be upgraded along with the named packages',
+ () async {
+ final server = await servePackages();
+ server.serve('foo', '1.0.0', deps: {'bar': '^1.0.0'});
+ server.serve('bar', '1.0.0');
- await d.appDir(
- dependencies: {
- 'foo': '^1.0.0',
- },
- ).create();
+ await d.appDir(dependencies: {'foo': '^1.0.0'}).create();
- await pubGet(output: contains('+ foo 1.0.0'));
+ await pubGet(output: contains('+ foo 1.0.0'));
- server.serve('foo', '2.0.0', deps: {'bar': '^1.0.0'});
- server.serve('bar', '1.5.0');
+ server.serve('foo', '2.0.0', deps: {'bar': '^1.0.0'});
+ server.serve('bar', '1.5.0');
- await pubUpgrade(
- args: ['--major-versions', 'foo'],
- output: allOf(
- contains('> foo 2.0.0'),
- isNot(contains('bar 1.5.0')),
- ),
- );
- });
+ await pubUpgrade(
+ args: ['--major-versions', 'foo'],
+ output: allOf(contains('> foo 2.0.0'), isNot(contains('bar 1.5.0'))),
+ );
+ },
+ );
- test(
- '`--unlock-transitive --major-versions` allows transitive dependencies '
+ test('`--unlock-transitive --major-versions` allows transitive dependencies '
'be upgraded along with the named packages', () async {
final server = await servePackages();
server.serve('foo', '1.0.0', deps: {'bar': '^1.0.0'});
server.serve('bar', '1.0.0');
server.serve('baz', '1.0.0');
- await d.appDir(
- dependencies: {
- 'foo': '^1.0.0',
- 'baz': '^1.0.0',
- },
- ).create();
+ await d.appDir(dependencies: {'foo': '^1.0.0', 'baz': '^1.0.0'}).create();
await pubGet(output: contains('+ foo 1.0.0'));
diff --git a/test/utils_test.dart b/test/utils_test.dart
index 10932d5..81b647e 100644
--- a/test/utils_test.dart
+++ b/test/utils_test.dart
@@ -118,8 +118,10 @@
});
group('uuid', () {
- final uuidRegexp = RegExp('^[0-9A-F]{8}-[0-9A-F]{4}-4[0-9A-F]{3}-'
- r'[8-9A-B][0-9A-F]{3}-[0-9A-F]{12}$');
+ final uuidRegexp = RegExp(
+ '^[0-9A-F]{8}-[0-9A-F]{4}-4[0-9A-F]{3}-'
+ r'[8-9A-B][0-9A-F]{3}-[0-9A-F]{12}$',
+ );
test('min value is valid', () {
final uuid = createUuid(List<int>.filled(16, 0));
diff --git a/test/validator/analyze_test.dart b/test/validator/analyze_test.dart
index 6d62230..e7790ad 100644
--- a/test/validator/analyze_test.dart
+++ b/test/validator/analyze_test.dart
@@ -10,17 +10,19 @@
import 'utils.dart';
void main() {
- test('should consider a package valid if it contains no warnings or errors',
- () async {
- await d.dir(appPath, [
- d.validPubspec(),
- d.file('LICENSE', 'Eh, do what you want.'),
- d.file('README.md', "This package isn't real."),
- d.file('CHANGELOG.md', '# 1.0.0\nFirst version\n'),
- d.dir('lib', [d.file('test_pkg.dart', 'int i = 1;')]),
- ]).create();
- await expectValidation();
- });
+ test(
+ 'should consider a package valid if it contains no warnings or errors',
+ () async {
+ await d.dir(appPath, [
+ d.validPubspec(),
+ d.file('LICENSE', 'Eh, do what you want.'),
+ d.file('README.md', "This package isn't real."),
+ d.file('CHANGELOG.md', '# 1.0.0\nFirst version\n'),
+ d.dir('lib', [d.file('test_pkg.dart', 'int i = 1;')]),
+ ]).create();
+ await expectValidation();
+ },
+ );
test('should handle having no code in the analyzed directories', () async {
await d.dir(appPath, [
@@ -33,12 +35,12 @@
await expectValidation();
});
- test(
- 'follows analysis_options.yaml '
+ test('follows analysis_options.yaml '
'and should not warn if package contains only infos', () async {
await d.dir(appPath, [
d.libPubspec(
- 'test_pkg', '1.0.0',
+ 'test_pkg',
+ '1.0.0',
sdk: '^3.0.0',
// Using http where https is recommended.
extras: {'repository': 'http://repo.org/'},
@@ -56,12 +58,12 @@
await expectValidation();
});
- test(
- 'follows analysis_options.yaml and '
+ test('follows analysis_options.yaml and '
'should warn if package contains warnings in pubspec.yaml', () async {
await d.dir(appPath, [
d.libPubspec(
- 'test_pkg', '1.0.0',
+ 'test_pkg',
+ '1.0.0',
sdk: '^3.0.0',
// Using http where https is recommended.
extras: {'repository': 'http://repo.org/'},
@@ -92,56 +94,60 @@
});
test(
- 'should consider a package valid even if it contains errors in the example/ sub-folder',
- () async {
- await d.dir(appPath, [
- d.validPubspec(),
- d.file('LICENSE', 'Eh, do what you want.'),
- d.file('README.md', "This package isn't real."),
- d.file('CHANGELOG.md', '# 1.0.0\nFirst version\n'),
- d.dir('lib', [d.file('test_pkg.dart', 'int i = 1;')]),
- d.dir('example', [
- d.file('test_pkg.dart', '''
+ 'should consider a package valid even if it contains errors in the example/ sub-folder',
+ () async {
+ await d.dir(appPath, [
+ d.validPubspec(),
+ d.file('LICENSE', 'Eh, do what you want.'),
+ d.file('README.md', "This package isn't real."),
+ d.file('CHANGELOG.md', '# 1.0.0\nFirst version\n'),
+ d.dir('lib', [d.file('test_pkg.dart', 'int i = 1;')]),
+ d.dir('example', [
+ d.file('test_pkg.dart', '''
void main() {
final a = 10; // Unused.
}
'''),
- ]),
- ]).create();
+ ]),
+ ]).create();
- await expectValidation();
- });
+ await expectValidation();
+ },
+ );
test(
- 'should warn if package contains errors in bin/, and works with --directory',
- () async {
- await d.dir(appPath, [
- d.validPubspec(),
- d.file('LICENSE', 'Eh, do what you want.'),
- d.file('README.md', "This package isn't real."),
- d.file('CHANGELOG.md', '# 1.0.0\nFirst version\n'),
- d.file('build.dart', 'void main(){}'),
- d.file('link.dart', 'void main(){}'),
- d.dir('lib', [d.file('test_pkg.dart', 'int i = 1;')]),
- d.dir('bin', [
- d.file('test_pkg.dart', '''
+ 'should warn if package contains errors in bin/, and works with --directory',
+ () async {
+ await d.dir(appPath, [
+ d.validPubspec(),
+ d.file('LICENSE', 'Eh, do what you want.'),
+ d.file('README.md', "This package isn't real."),
+ d.file('CHANGELOG.md', '# 1.0.0\nFirst version\n'),
+ d.file('build.dart', 'void main(){}'),
+ d.file('link.dart', 'void main(){}'),
+ d.dir('lib', [d.file('test_pkg.dart', 'int i = 1;')]),
+ d.dir('bin', [
+ d.file('test_pkg.dart', '''
void main() {
// Missing }
'''),
- ]),
- ]).create();
+ ]),
+ ]).create();
- await expectValidation(
- message: allOf([
- contains('`dart analyze` found the following issue(s):'),
- contains('Analyzing bin, lib, build.dart, link.dart, pubspec.yaml...'),
- contains('error -'),
- contains("Expected to find '}'."),
- contains('Package has 1 warning.'),
- ]),
- exitCode: DATA,
- extraArgs: ['--directory', appPath],
- workingDirectory: d.sandbox,
- );
- });
+ await expectValidation(
+ message: allOf([
+ contains('`dart analyze` found the following issue(s):'),
+ contains(
+ 'Analyzing bin, lib, build.dart, link.dart, pubspec.yaml...',
+ ),
+ contains('error -'),
+ contains("Expected to find '}'."),
+ contains('Package has 1 warning.'),
+ ]),
+ exitCode: DATA,
+ extraArgs: ['--directory', appPath],
+ workingDirectory: d.sandbox,
+ );
+ },
+ );
}
diff --git a/test/validator/changelog_test.dart b/test/validator/changelog_test.dart
index 2c0de45..3328318 100644
--- a/test/validator/changelog_test.dart
+++ b/test/validator/changelog_test.dart
@@ -32,9 +32,7 @@
group('should consider a package invalid if it', () {
test('has no CHANGELOG', () async {
- await d.dir(appPath, [
- d.libPubspec('test_pkg', '1.0.0'),
- ]).create();
+ await d.dir(appPath, [d.libPubspec('test_pkg', '1.0.0')]).create();
await expectValidationDeprecated(changelog, warnings: isNotEmpty);
});
@@ -51,19 +49,21 @@
await expectValidationDeprecated(changelog, warnings: isNotEmpty);
});
- test('has a CHANGELOG that doesn\'t include the current package version',
- () async {
- await d.dir(appPath, [
- d.libPubspec('test_pkg', '1.0.1'),
- d.file('CHANGELOG.md', '''
+ test(
+ 'has a CHANGELOG that doesn\'t include the current package version',
+ () async {
+ await d.dir(appPath, [
+ d.libPubspec('test_pkg', '1.0.1'),
+ d.file('CHANGELOG.md', '''
# 1.0.0
* Solves traveling salesman problem in polynomial time.
* Passes Turing test.
'''),
- ]).create();
- await expectValidationDeprecated(changelog, warnings: isNotEmpty);
- });
+ ]).create();
+ await expectValidationDeprecated(changelog, warnings: isNotEmpty);
+ },
+ );
test('has a CHANGELOG with invalid utf-8', () async {
await d.dir(appPath, [
diff --git a/test/validator/dependency_override_test.dart b/test/validator/dependency_override_test.dart
index a3fd574..8f877da 100644
--- a/test/validator/dependency_override_test.dart
+++ b/test/validator/dependency_override_test.dart
@@ -9,8 +9,7 @@
import 'utils.dart';
void main() {
- test(
- 'should consider a package valid if it has dev dependency '
+ test('should consider a package valid if it has dev dependency '
'overrides', () async {
final server = await servePackages();
server.serve('foo', '3.0.0');
@@ -78,10 +77,7 @@
d.validPubspec(
extras: {
'dev_dependencies': {'foo': '^1.0.0'},
- 'dependency_overrides': {
- 'foo': '^3.0.0',
- 'bar': '^3.0.0',
- },
+ 'dependency_overrides': {'foo': '^3.0.0', 'bar': '^3.0.0'},
},
),
]).create();
diff --git a/test/validator/dependency_test.dart b/test/validator/dependency_test.dart
index 7823d45..a29d988 100644
--- a/test/validator/dependency_test.dart
+++ b/test/validator/dependency_test.dart
@@ -99,29 +99,31 @@
await expectValidation();
});
- test('has a git path dependency with an appropriate SDK constraint',
- () async {
- await servePackages();
- await d.git('foo', [
- d.dir('subdir', [d.libPubspec('foo', '1.0.0')]),
- ]).create();
- await package(
- deps: {
- 'foo': {
- 'git': {'url': '../foo', 'path': 'subdir'},
+ test(
+ 'has a git path dependency with an appropriate SDK constraint',
+ () async {
+ await servePackages();
+ await d.git('foo', [
+ d.dir('subdir', [d.libPubspec('foo', '1.0.0')]),
+ ]).create();
+ await package(
+ deps: {
+ 'foo': {
+ 'git': {'url': '../foo', 'path': 'subdir'},
+ },
},
- },
- ).create();
+ ).create();
- // We should get a warning for using a git dependency, but not an error.
- await expectValidationWarning(
- allOf([
- contains(' foo: any'),
- contains("Publishable packages can't have 'git' dependencies"),
- ]),
- count: 2,
- );
- });
+ // We should get a warning for using a git dependency, but not an error.
+ await expectValidationWarning(
+ allOf([
+ contains(' foo: any'),
+ contains("Publishable packages can't have 'git' dependencies"),
+ ]),
+ count: 2,
+ );
+ },
+ );
test('depends on Flutter from an SDK source', () async {
await d.dir('flutter', [d.flutterVersion('1.2.3')]).create();
@@ -137,44 +139,34 @@
);
});
- test(
- 'depends on a package from Flutter '
- 'with an appropriate Dart SDK constraint',
- () async {
- await d.dir('flutter', [d.flutterVersion('1.2.3')]).create();
- await flutterPackage('foo').create();
- await package(
- deps: {
- 'foo': {'sdk': 'flutter', 'version': '>=1.2.3 <2.0.0'},
- },
- ).create();
+ test('depends on a package from Flutter '
+ 'with an appropriate Dart SDK constraint', () async {
+ await d.dir('flutter', [d.flutterVersion('1.2.3')]).create();
+ await flutterPackage('foo').create();
+ await package(
+ deps: {
+ 'foo': {'sdk': 'flutter', 'version': '>=1.2.3 <2.0.0'},
+ },
+ ).create();
- await expectValidation(
- environment: {
- 'FLUTTER_ROOT': p.join(d.sandbox, 'flutter'),
- },
- );
- },
- );
+ await expectValidation(
+ environment: {'FLUTTER_ROOT': p.join(d.sandbox, 'flutter')},
+ );
+ });
- test(
- 'depends on a package from Fuchsia '
- 'with an appropriate Dart SDK constraint',
- () async {
- await fuchsiaPackage('foo', sdk: '^3.0.0').create();
- await package(
- deps: {
- 'foo': {'sdk': 'fuchsia', 'version': '>=1.2.3 <2.0.0'},
- },
- ).create();
+ test('depends on a package from Fuchsia '
+ 'with an appropriate Dart SDK constraint', () async {
+ await fuchsiaPackage('foo', sdk: '^3.0.0').create();
+ await package(
+ deps: {
+ 'foo': {'sdk': 'fuchsia', 'version': '>=1.2.3 <2.0.0'},
+ },
+ ).create();
- await expectValidation(
- environment: {
- 'FUCHSIA_DART_SDK_ROOT': p.join(d.sandbox, 'fuchsia'),
- },
- );
- },
- );
+ await expectValidation(
+ environment: {'FUCHSIA_DART_SDK_ROOT': p.join(d.sandbox, 'fuchsia')},
+ );
+ });
});
group('should consider a package invalid if it', () {
@@ -183,92 +175,74 @@
group('has a path dependency', () {
group('where a hosted version exists', () {
test('and should suggest the hosted primary version', () async {
- await d.dir('foo', [
- d.libPubspec('foo', '1.2.3'),
- ]).create();
+ await d.dir('foo', [d.libPubspec('foo', '1.2.3')]).create();
await setUpDependency(
{'path': p.join(d.sandbox, 'foo')},
hostedVersions: ['3.0.0-pre', '2.0.0', '1.0.0'],
);
- await expectValidationError(
- ' foo: ^2.0.0',
- );
+ await expectValidationError(' foo: ^2.0.0');
});
- test(
- 'and should suggest the hosted prerelease version '
+ test('and should suggest the hosted prerelease version '
"if it's the only version available", () async {
- await d.dir('foo', [
- d.libPubspec('foo', '1.2.3'),
- ]).create();
+ await d.dir('foo', [d.libPubspec('foo', '1.2.3')]).create();
await setUpDependency(
{'path': p.join(d.sandbox, 'foo')},
hostedVersions: ['3.0.0-pre', '2.0.0-pre'],
);
- await expectValidationError(
- ' foo: ^3.0.0-pre',
- );
+ await expectValidationError(' foo: ^3.0.0-pre');
});
- test('and should suggest a tighter constraint if primary is pre-1.0.0',
- () async {
- await d.dir('foo', [
- d.libPubspec('foo', '1.2.3'),
- ]).create();
- await setUpDependency(
- {'path': p.join(d.sandbox, 'foo')},
- hostedVersions: ['0.0.1', '0.0.2'],
- );
- await expectValidationError(
- ' foo: ^0.0.2',
- );
- });
+ test(
+ 'and should suggest a tighter constraint if primary is pre-1.0.0',
+ () async {
+ await d.dir('foo', [d.libPubspec('foo', '1.2.3')]).create();
+ await setUpDependency(
+ {'path': p.join(d.sandbox, 'foo')},
+ hostedVersions: ['0.0.1', '0.0.2'],
+ );
+ await expectValidationError(' foo: ^0.0.2');
+ },
+ );
});
group('where no hosted version exists', () {
test("and should use the other source's version", () async {
- await d.dir('foo', [
- d.libPubspec('foo', '1.2.3'),
- ]).create();
+ await d.dir('foo', [d.libPubspec('foo', '1.2.3')]).create();
await setUpDependency({
'path': p.join(d.sandbox, 'foo'),
'version': '>=1.0.0 <2.0.0',
});
- await expectValidationError(
- ' foo: ">=1.0.0 <2.0.0"',
- );
+ await expectValidationError(' foo: ">=1.0.0 <2.0.0"');
});
- test(
- "and should use the other source's unquoted version if "
+ test("and should use the other source's unquoted version if "
'concrete', () async {
- await d.dir('foo', [
- d.libPubspec('foo', '0.2.3'),
- ]).create();
- await setUpDependency(
- {'path': p.join(d.sandbox, 'foo'), 'version': '0.2.3'},
- );
- await expectValidationError(
- ' foo: 0.2.3',
- );
+ await d.dir('foo', [d.libPubspec('foo', '0.2.3')]).create();
+ await setUpDependency({
+ 'path': p.join(d.sandbox, 'foo'),
+ 'version': '0.2.3',
+ });
+ await expectValidationError(' foo: 0.2.3');
});
});
});
group('has an unconstrained dependency', () {
group('with a lockfile', () {
- test('and it should suggest a constraint based on the locked version',
- () async {
- (await servePackages()).serve('foo', '1.2.3');
- await d.dir(appPath, [
- d.libPubspec('test_pkg', '1.0.0', deps: {'foo': 'any'}),
- ]).create();
-
- await expectValidationWarning(' foo: ^1.2.3');
- });
-
test(
- 'and it should suggest a concrete constraint '
+ 'and it should suggest a constraint based on the locked version',
+ () async {
+ (await servePackages()).serve('foo', '1.2.3');
+ await d.dir(appPath, [
+ d.libPubspec('test_pkg', '1.0.0', deps: {'foo': 'any'}),
+ ]).create();
+
+ await expectValidationWarning(' foo: ^1.2.3');
+ },
+ );
+
+ test('and it should suggest a concrete constraint '
'if the locked version is pre-1.0.0', () async {
(await servePackages()).serve('foo', '0.1.2');
@@ -297,19 +271,12 @@
(await servePackages()).serve('foo', '1.2.3-dev');
await d.dir(appPath, [
- d.libPubspec(
- 'test_pkg',
- '1.0.0',
- deps: {'foo': '^1.2.3-dev'},
- ),
+ d.libPubspec('test_pkg', '1.0.0', deps: {'foo': '^1.2.3-dev'}),
]).create();
- await expectValidationWarning(
- 'Packages dependent on a pre-release',
- );
+ await expectValidationWarning('Packages dependent on a pre-release');
});
- test(
- 'with a single-version dependency and it should suggest a '
+ test('with a single-version dependency and it should suggest a '
'constraint based on the version', () async {
(await servePackages()).serve('foo', '1.2.3');
await d.dir(appPath, [
@@ -320,16 +287,13 @@
});
group('has a dependency without a lower bound', () {
- test(
- 'and it should suggest a constraint based on the locked '
+ test('and it should suggest a constraint based on the locked '
'version', () async {
(await servePackages()).serve('foo', '1.2.3');
await d.dir(appPath, [
d.libPubspec('test_pkg', '1.0.0', deps: {'foo': '<3.0.0'}),
- d.file(
- 'pubspec.lock',
- ),
+ d.file('pubspec.lock'),
]).create();
await expectValidationWarning(' foo: ">=1.2.3 <3.0.0"');
@@ -356,8 +320,7 @@
await expectValidationWarning(' foo: ">=1.2.3 <=3.0.0"');
});
- test(
- 'and it should expand the suggested constraint if the '
+ test('and it should expand the suggested constraint if the '
'locked version matches the upper bound', () async {
(await servePackages()).serve('foo', '1.2.3');
@@ -382,15 +345,17 @@
});
group('with a dependency without an upper bound', () {
- test('and it should suggest a constraint based on the lower bound',
- () async {
- (await servePackages()).serve('foo', '1.2.3');
- await d.dir(appPath, [
- d.libPubspec('test_pkg', '1.0.0', deps: {'foo': '>=1.2.3'}),
- ]).create();
+ test(
+ 'and it should suggest a constraint based on the lower bound',
+ () async {
+ (await servePackages()).serve('foo', '1.2.3');
+ await d.dir(appPath, [
+ d.libPubspec('test_pkg', '1.0.0', deps: {'foo': '>=1.2.3'}),
+ ]).create();
- await expectValidationWarning(' foo: ^1.2.3');
- });
+ await expectValidationWarning(' foo: ^1.2.3');
+ },
+ );
test('and it should preserve the lower-bound operator', () async {
(await servePackages()).serve('foo', '1.2.4');
diff --git a/test/validator/devtools_extension_test.dart b/test/validator/devtools_extension_test.dart
index 8b5dd1d..3076b39 100644
--- a/test/validator/devtools_extension_test.dart
+++ b/test/validator/devtools_extension_test.dart
@@ -25,10 +25,7 @@
await d.validPackage().create();
await d.dir(appPath, [
d.dir('extension', [
- d.dir('devtools', [
- d.file('config.yaml'),
- d.dir('build', []),
- ]),
+ d.dir('devtools', [d.file('config.yaml'), d.dir('build', [])]),
]),
]).create();
await expectValidationWarning(
diff --git a/test/validator/directory_test.dart b/test/validator/directory_test.dart
index c58002b..0bfeeb7 100644
--- a/test/validator/directory_test.dart
+++ b/test/validator/directory_test.dart
@@ -38,8 +38,7 @@
});
});
- group(
- 'should consider a package invalid if it has a top-level directory '
+ group('should consider a package invalid if it has a top-level directory '
'named', () {
setUp(d.validPackage().create);
diff --git a/test/validator/flutter_constraint_test.dart b/test/validator/flutter_constraint_test.dart
index c2c63f5..1a518cc 100644
--- a/test/validator/flutter_constraint_test.dart
+++ b/test/validator/flutter_constraint_test.dart
@@ -11,9 +11,7 @@
await runPub(
output: output,
args: ['publish', '--dry-run'],
- environment: {
- 'FLUTTER_ROOT': fakeFlutterRoot.io.path,
- },
+ environment: {'FLUTTER_ROOT': fakeFlutterRoot.io.path},
workingDirectory: d.path(appPath),
exitCode: exitCode,
);
@@ -21,16 +19,15 @@
late d.DirectoryDescriptor fakeFlutterRoot;
-Future<void> setup({
- String? flutterConstraint,
-}) async {
+Future<void> setup({String? flutterConstraint}) async {
fakeFlutterRoot = d.dir('fake_flutter_root', [d.flutterVersion('1.23.0')]);
await fakeFlutterRoot.create();
await d.validPackage().create();
await d.dir(appPath, [
d.pubspec({
'name': 'test_pkg',
- 'description': 'A just long enough description '
+ 'description':
+ 'A just long enough description '
'to fit the requirement of 60 characters',
'homepage': 'https://example.com/',
'version': '1.0.0',
@@ -40,9 +37,7 @@
},
}),
]).create();
- await pubGet(
- environment: {'FLUTTER_ROOT': fakeFlutterRoot.io.path},
- );
+ await pubGet(environment: {'FLUTTER_ROOT': fakeFlutterRoot.io.path});
}
void main() {
@@ -58,9 +53,7 @@
await setup(flutterConstraint: '>=1.20.0 <=2.0.0');
await expectValidation(
allOf([
- contains(
- 'You can replace that with just the lower bound: `>=1.20.0`.',
- ),
+ contains('You can replace that with just the lower bound: `>=1.20.0`.'),
contains('Package has 1 warning.'),
]),
65,
diff --git a/test/validator/flutter_plugin_format_test.dart b/test/validator/flutter_plugin_format_test.dart
index db537e5..f5c9590 100644
--- a/test/validator/flutter_plugin_format_test.dart
+++ b/test/validator/flutter_plugin_format_test.dart
@@ -20,52 +20,61 @@
});
test('is a Flutter 1.9.0 package', () async {
- final pkg = packageMap('test_pkg', '1.0.0', {
- 'flutter': {'sdk': 'flutter'},
- }, {}, {
- 'sdk': '>=2.0.0 <3.0.0',
- 'flutter': '>=1.9.0 <2.0.0',
- });
+ final pkg = packageMap(
+ 'test_pkg',
+ '1.0.0',
+ {
+ 'flutter': {'sdk': 'flutter'},
+ },
+ {},
+ {'sdk': '>=2.0.0 <3.0.0', 'flutter': '>=1.9.0 <2.0.0'},
+ );
await d.dir(appPath, [d.pubspec(pkg), d.dir('ios')]).create();
await expectValidationDeprecated(flutterPluginFormat);
});
test('is a Flutter 1.10.0 package', () async {
- final pkg = packageMap('test_pkg', '1.0.0', {
- 'flutter': {'sdk': 'flutter'},
- }, {}, {
- 'sdk': '>=2.0.0 <3.0.0',
- 'flutter': '>=1.10.0 <2.0.0',
- });
+ final pkg = packageMap(
+ 'test_pkg',
+ '1.0.0',
+ {
+ 'flutter': {'sdk': 'flutter'},
+ },
+ {},
+ {'sdk': '>=2.0.0 <3.0.0', 'flutter': '>=1.10.0 <2.0.0'},
+ );
await d.dir(appPath, [d.pubspec(pkg), d.dir('ios')]).create();
await expectValidationDeprecated(flutterPluginFormat);
});
test('is a Flutter 1.10.0-0 package', () async {
- final pkg = packageMap('test_pkg', '1.0.0', {
- 'flutter': {'sdk': 'flutter'},
- }, {}, {
- 'sdk': '>=2.0.0 <3.0.0',
- 'flutter': '>=1.10.0-0 <2.0.0',
- });
+ final pkg = packageMap(
+ 'test_pkg',
+ '1.0.0',
+ {
+ 'flutter': {'sdk': 'flutter'},
+ },
+ {},
+ {'sdk': '>=2.0.0 <3.0.0', 'flutter': '>=1.10.0-0 <2.0.0'},
+ );
await d.dir(appPath, [d.pubspec(pkg), d.dir('ios')]).create();
await expectValidationDeprecated(flutterPluginFormat);
});
test('is a flutter 1.10.0 plugin with the new format', () async {
- final pkg = packageMap('test_pkg', '1.0.0', {
- 'flutter': {'sdk': 'flutter'},
- }, {}, {
- 'sdk': '>=2.0.0 <3.0.0',
- 'flutter': '>=1.10.0 <2.0.0',
- });
+ final pkg = packageMap(
+ 'test_pkg',
+ '1.0.0',
+ {
+ 'flutter': {'sdk': 'flutter'},
+ },
+ {},
+ {'sdk': '>=2.0.0 <3.0.0', 'flutter': '>=1.10.0 <2.0.0'},
+ );
pkg['flutter'] = {
'plugin': {
'platforms': {
- 'ios': {
- 'classPrefix': 'FLT',
- 'pluginClass': 'SamplePlugin',
- },
+ 'ios': {'classPrefix': 'FLT', 'pluginClass': 'SamplePlugin'},
},
},
};
@@ -76,22 +85,22 @@
group('should consider a package invalid if it', () {
test('is a flutter plugin with old and new format', () async {
- final pkg = packageMap('test_pkg', '1.0.0', {
- 'flutter': {'sdk': 'flutter'},
- }, {}, {
- 'sdk': '>=2.0.0 <3.0.0',
- 'flutter': '>=1.9.0 <2.0.0',
- });
+ final pkg = packageMap(
+ 'test_pkg',
+ '1.0.0',
+ {
+ 'flutter': {'sdk': 'flutter'},
+ },
+ {},
+ {'sdk': '>=2.0.0 <3.0.0', 'flutter': '>=1.9.0 <2.0.0'},
+ );
pkg['flutter'] = {
'plugin': {
'androidPackage': 'io.flutter.plugins.myplugin',
'iosPrefix': 'FLT',
'pluginClass': 'MyPlugin',
'platforms': {
- 'ios': {
- 'classPrefix': 'FLT',
- 'pluginClass': 'SamplePlugin',
- },
+ 'ios': {'classPrefix': 'FLT', 'pluginClass': 'SamplePlugin'},
},
},
};
@@ -107,12 +116,15 @@
});
test('is a flutter 1.9.0 plugin with old format', () async {
- final pkg = packageMap('test_pkg', '1.0.0', {
- 'flutter': {'sdk': 'flutter'},
- }, {}, {
- 'sdk': '>=2.0.0 <3.0.0',
- 'flutter': '>=1.9.0 <2.0.0',
- });
+ final pkg = packageMap(
+ 'test_pkg',
+ '1.0.0',
+ {
+ 'flutter': {'sdk': 'flutter'},
+ },
+ {},
+ {'sdk': '>=2.0.0 <3.0.0', 'flutter': '>=1.9.0 <2.0.0'},
+ );
pkg['flutter'] = {
'plugin': {
'androidPackage': 'io.flutter.plugins.myplugin',
@@ -130,19 +142,19 @@
});
test('is a flutter 1.9.0 plugin with new format', () async {
- final pkg = packageMap('test_pkg', '1.0.0', {
- 'flutter': {'sdk': 'flutter'},
- }, {}, {
- 'sdk': '>=2.0.0 <3.0.0',
- 'flutter': '>=1.9.0 <2.0.0',
- });
+ final pkg = packageMap(
+ 'test_pkg',
+ '1.0.0',
+ {
+ 'flutter': {'sdk': 'flutter'},
+ },
+ {},
+ {'sdk': '>=2.0.0 <3.0.0', 'flutter': '>=1.9.0 <2.0.0'},
+ );
pkg['flutter'] = {
'plugin': {
'platforms': {
- 'ios': {
- 'classPrefix': 'FLT',
- 'pluginClass': 'SamplePlugin',
- },
+ 'ios': {'classPrefix': 'FLT', 'pluginClass': 'SamplePlugin'},
},
},
};
@@ -157,21 +169,22 @@
);
});
- test(
- 'is a flutter plugin with only implicit flutter sdk version constraint '
+ test('is a flutter plugin with only implicit '
+ 'flutter sdk version constraint '
'and the new format', () async {
- final pkg = packageMap('test_pkg', '1.0.0', {
- 'flutter': {'sdk': 'flutter'},
- }, {}, {
- 'sdk': '>=2.0.0 <3.0.0',
- });
+ final pkg = packageMap(
+ 'test_pkg',
+ '1.0.0',
+ {
+ 'flutter': {'sdk': 'flutter'},
+ },
+ {},
+ {'sdk': '>=2.0.0 <3.0.0'},
+ );
pkg['flutter'] = {
'plugin': {
'platforms': {
- 'ios': {
- 'classPrefix': 'FLT',
- 'pluginClass': 'SamplePlugin',
- },
+ 'ios': {'classPrefix': 'FLT', 'pluginClass': 'SamplePlugin'},
},
},
};
@@ -193,10 +206,7 @@
pkg['flutter'] = {
'plugin': {
'platforms': {
- 'ios': {
- 'classPrefix': 'FLT',
- 'pluginClass': 'SamplePlugin',
- },
+ 'ios': {'classPrefix': 'FLT', 'pluginClass': 'SamplePlugin'},
},
},
};
@@ -212,19 +222,19 @@
});
test('is a flutter 1.8.0 plugin with new format', () async {
- final pkg = packageMap('test_pkg', '1.0.0', {
- 'flutter': {'sdk': 'flutter'},
- }, {}, {
- 'sdk': '>=2.0.0 <3.0.0',
- 'flutter': '>=1.8.0 <2.0.0',
- });
+ final pkg = packageMap(
+ 'test_pkg',
+ '1.0.0',
+ {
+ 'flutter': {'sdk': 'flutter'},
+ },
+ {},
+ {'sdk': '>=2.0.0 <3.0.0', 'flutter': '>=1.8.0 <2.0.0'},
+ );
pkg['flutter'] = {
'plugin': {
'platforms': {
- 'ios': {
- 'classPrefix': 'FLT',
- 'pluginClass': 'SamplePlugin',
- },
+ 'ios': {'classPrefix': 'FLT', 'pluginClass': 'SamplePlugin'},
},
},
};
@@ -240,19 +250,19 @@
});
test('is a flutter 1.9.999 plugin with new format', () async {
- final pkg = packageMap('test_pkg', '1.0.0', {
- 'flutter': {'sdk': 'flutter'},
- }, {}, {
- 'sdk': '>=2.0.0 <3.0.0',
- 'flutter': '>=1.9.999 <2.0.0',
- });
+ final pkg = packageMap(
+ 'test_pkg',
+ '1.0.0',
+ {
+ 'flutter': {'sdk': 'flutter'},
+ },
+ {},
+ {'sdk': '>=2.0.0 <3.0.0', 'flutter': '>=1.9.999 <2.0.0'},
+ );
pkg['flutter'] = {
'plugin': {
'platforms': {
- 'ios': {
- 'classPrefix': 'FLT',
- 'pluginClass': 'SamplePlugin',
- },
+ 'ios': {'classPrefix': 'FLT', 'pluginClass': 'SamplePlugin'},
},
},
};
diff --git a/test/validator/git_status_test.dart b/test/validator/git_status_test.dart
index 0ca1875..bdfb1d9 100644
--- a/test/validator/git_status_test.dart
+++ b/test/validator/git_status_test.dart
@@ -27,23 +27,24 @@
void main() {
test(
- 'should consider a package valid '
- 'if it contains no modified files (but contains a newly created one)',
- () async {
- await d.git('myapp', [
- ...d.validPackage().contents,
- d.file('foo.txt', 'foo'),
- d.file('.pubignore', 'bob.txt\n'),
- d.file('bob.txt', 'bob'),
- ]).create();
+ 'should consider a package valid '
+ 'if it contains no modified files (but contains a newly created one)',
+ () async {
+ await d.git('myapp', [
+ ...d.validPackage().contents,
+ d.file('foo.txt', 'foo'),
+ d.file('.pubignore', 'bob.txt\n'),
+ d.file('bob.txt', 'bob'),
+ ]).create();
- await d.dir('myapp', [
- d.file('bar.txt', 'bar'), // Create untracked file.
- d.file('bob.txt', 'bob2'), // Modify pub-ignored file.
- ]).create();
+ await d.dir('myapp', [
+ d.file('bar.txt', 'bar'), // Create untracked file.
+ d.file('bob.txt', 'bob2'), // Modify pub-ignored file.
+ ]).create();
- await expectValidation(contains('Package has 0 warnings.'), 0);
- });
+ await expectValidation(contains('Package has 0 warnings.'), 0);
+ },
+ );
test('Warns if files are modified', () async {
await d.git('myapp', [
@@ -51,15 +52,12 @@
d.file('foo.txt', 'foo'),
]).create();
- await d.dir('myapp', [
- d.file('foo.txt', 'foo2'),
- ]).create();
+ await d.dir('myapp', [d.file('foo.txt', 'foo2')]).create();
await expectValidation(
allOf([
contains('Package has 1 warning.'),
- contains(
- '''
+ contains('''
* 1 checked-in file is modified in git.
Usually you want to publish from a clean git state.
@@ -70,8 +68,7 @@
foo.txt
- Run `git status` for more information.''',
- ),
+ Run `git status` for more information.'''),
]),
exit_codes.DATA,
);
@@ -79,10 +76,7 @@
// Stage but do not commit foo.txt. The warning should still be active.
await d.git('myapp').runGit(['add', 'foo.txt']);
await expectValidation(
- allOf([
- contains('Package has 1 warning.'),
- contains('foo.txt'),
- ]),
+ allOf([contains('Package has 1 warning.'), contains('foo.txt')]),
exit_codes.DATA,
);
await d.git('myapp').runGit(['commit', '-m', 'message']);
@@ -98,8 +92,7 @@
await expectValidation(
allOf([
contains('Package has 1 warning.'),
- contains(
- '''
+ contains('''
* 1 checked-in file is modified in git.
Usually you want to publish from a clean git state.
@@ -110,8 +103,7 @@
bar.txt
- Run `git status` for more information.''',
- ),
+ Run `git status` for more information.'''),
]),
exit_codes.DATA,
);
@@ -133,8 +125,7 @@
await expectValidation(
allOf([
contains('Package has 1 warning.'),
- contains(
- '''
+ contains('''
* 1 checked-in file is modified in git.
Usually you want to publish from a clean git state.
@@ -145,8 +136,7 @@
non_ascii_и.txt
- Run `git status` for more information.''',
- ),
+ Run `git status` for more information.'''),
]),
exit_codes.DATA,
);
@@ -179,15 +169,11 @@
]).create();
await expectValidation(
- workingDirectory: p.join(
- d.sandbox,
- appPath,
- ),
+ workingDirectory: p.join(d.sandbox, appPath),
extraArgs: ['-C', 'a'],
allOf([
contains('Package has 1 warning.'),
- contains(
- '''
+ contains('''
* 1 checked-in file is modified in git.
Usually you want to publish from a clean git state.
@@ -198,22 +184,16 @@
a${p.separator}non_ascii_и.txt
- Run `git status` for more information.''',
- ),
+ Run `git status` for more information.'''),
]),
exit_codes.DATA,
);
await expectValidation(
- workingDirectory: p.join(
- d.sandbox,
- appPath,
- 'a',
- ),
+ workingDirectory: p.join(d.sandbox, appPath, 'a'),
allOf([
contains('Package has 1 warning.'),
- contains(
- '''
+ contains('''
* 1 checked-in file is modified in git.
Usually you want to publish from a clean git state.
@@ -224,8 +204,7 @@
non_ascii_и.txt
- Run `git status` for more information.''',
- ),
+ Run `git status` for more information.'''),
]),
exit_codes.DATA,
);
diff --git a/test/validator/gitignore_test.dart b/test/validator/gitignore_test.dart
index 5aabae5..2438b92 100644
--- a/test/validator/gitignore_test.dart
+++ b/test/validator/gitignore_test.dart
@@ -27,8 +27,7 @@
}
void main() {
- test(
- 'should consider a package valid '
+ test('should consider a package valid '
'if it contains no checked in otherwise ignored files', () async {
await d.git('myapp', [
...d.validPackage().contents,
@@ -37,9 +36,7 @@
await expectValidation(contains('Package has 0 warnings.'), 0);
- await d.dir('myapp', [
- d.file('.gitignore', '*.txt'),
- ]).create();
+ await d.dir('myapp', [d.file('.gitignore', '*.txt')]).create();
await expectValidation(
allOf([
@@ -90,13 +87,7 @@
test('Should also consider gitignores from above the package root', () async {
await d.git('reporoot', [
- d.dir(
- 'myapp',
- [
- d.file('foo.txt'),
- ...d.validPackage().contents,
- ],
- ),
+ d.dir('myapp', [d.file('foo.txt'), ...d.validPackage().contents]),
]).create();
final packageRoot = p.join(d.sandbox, 'reporoot', 'myapp');
await pubGet(workingDirectory: packageRoot);
@@ -107,9 +98,7 @@
workingDirectory: packageRoot,
);
- await d.dir('reporoot', [
- d.file('.gitignore', '*.txt'),
- ]).create();
+ await d.dir('reporoot', [d.file('.gitignore', '*.txt')]).create();
await expectValidation(
allOf([
@@ -126,16 +115,13 @@
});
test('Should not follow symlinks', () async {
- await d.git('myapp', [
- ...d.validPackage().contents,
- ]).create();
+ await d.git('myapp', [...d.validPackage().contents]).create();
final packageRoot = p.join(d.sandbox, 'myapp');
await pubGet(workingDirectory: packageRoot);
- Link(p.join(packageRoot, '.abc', 'itself')).createSync(
- packageRoot,
- recursive: true,
- );
+ Link(
+ p.join(packageRoot, '.abc', 'itself'),
+ ).createSync(packageRoot, recursive: true);
await expectValidation(
contains('Package has 0 warnings.'),
@@ -144,20 +130,15 @@
);
});
- test(
- 'Should consider symlinks to be valid files and not list '
+ test('Should consider symlinks to be valid files and not list '
'them as gitignored', () async {
final git = d.git(appPath, [
...d.validPackage().contents,
- d.dir('dir_with_symlink', [
- d.file('.pubignore', 'symlink'),
- ]),
+ d.dir('dir_with_symlink', [d.file('.pubignore', 'symlink')]),
]);
await git.create();
final packageRoot = p.join(d.sandbox, appPath);
- await pubGet(
- workingDirectory: packageRoot,
- );
+ await pubGet(workingDirectory: packageRoot);
await d
.link(
p.join(d.sandbox, appPath, 'dir_with_symlink', 'symlink'),
diff --git a/test/validator/language_version_test.dart b/test/validator/language_version_test.dart
index ed43727..19725e6 100644
--- a/test/validator/language_version_test.dart
+++ b/test/validator/language_version_test.dart
@@ -38,32 +38,30 @@
});
test('opts in to older language versions', () async {
- await setup(
- sdkConstraint: '^3.0.0',
- libraryLanguageVersion: '2.14',
- );
+ await setup(sdkConstraint: '^3.0.0', libraryLanguageVersion: '2.14');
await expectValidation();
});
test('opts in to same language versions', () async {
- await setup(
- sdkConstraint: '^3.0.0',
- libraryLanguageVersion: '3.0',
- );
+ await setup(sdkConstraint: '^3.0.0', libraryLanguageVersion: '3.0');
await expectValidation();
});
- test('opts in to older language version, with non-range constraint',
- () async {
- await setup(sdkConstraint: '3.1.2+3', libraryLanguageVersion: '2.18');
- await expectValidation();
- });
+ test(
+ 'opts in to older language version, with non-range constraint',
+ () async {
+ await setup(sdkConstraint: '3.1.2+3', libraryLanguageVersion: '2.18');
+ await expectValidation();
+ },
+ );
});
group('should warn if it', () {
final currentVersion = Version.parse(Platform.version.split(' ').first);
final nextLanguageVersion =
- LanguageVersion(currentVersion.major, currentVersion.minor + 1)
- .toString();
+ LanguageVersion(
+ currentVersion.major,
+ currentVersion.minor + 1,
+ ).toString();
test('opts in to a newer version.', () async {
await setup(
diff --git a/test/validator/leak_detection_test.dart b/test/validator/leak_detection_test.dart
index 81a00d9..86551cd 100644
--- a/test/validator/leak_detection_test.dart
+++ b/test/validator/leak_detection_test.dart
@@ -33,9 +33,7 @@
d.pubspec({
'name': 'test_pkg',
'version': '1.0.0',
- 'false_secrets': [
- '/lib/test_pkg.dart',
- ],
+ 'false_secrets': ['/lib/test_pkg.dart'],
}),
d.dir('lib', [
d.file('test_pkg.dart', '''
diff --git a/test/validator/pubspec_field_test.dart b/test/validator/pubspec_field_test.dart
index 908167f..107c9ef 100644
--- a/test/validator/pubspec_field_test.dart
+++ b/test/validator/pubspec_field_test.dart
@@ -64,14 +64,16 @@
});
group('should warn if a package', () {
- test('is missing both the "homepage" and the "description" field',
- () async {
- final pkg = packageMap('test_pkg', '1.0.0');
- pkg.remove('homepage');
- await d.dir(appPath, [d.pubspec(pkg)]).create();
+ test(
+ 'is missing both the "homepage" and the "description" field',
+ () async {
+ final pkg = packageMap('test_pkg', '1.0.0');
+ pkg.remove('homepage');
+ await d.dir(appPath, [d.pubspec(pkg)]).create();
- await expectValidationDeprecated(pubspecField, warnings: isNotEmpty);
- });
+ await expectValidationDeprecated(pubspecField, warnings: isNotEmpty);
+ },
+ );
});
group('should consider a package invalid if it', () {
@@ -143,9 +145,7 @@
test('has invalid executables mapping to a number', () async {
final pkg = packageMap('test_pkg', '1.0.0');
- pkg['executables'] = <String, dynamic>{
- 'test_pkg': 33,
- };
+ pkg['executables'] = <String, dynamic>{'test_pkg': 33};
await d.dir(appPath, [d.pubspec(pkg)]).create();
await expectValidationDeprecated(pubspecField, errors: isNotEmpty);
diff --git a/test/validator/pubspec_test.dart b/test/validator/pubspec_test.dart
index 480033e..e84ddcc 100644
--- a/test/validator/pubspec_test.dart
+++ b/test/validator/pubspec_test.dart
@@ -16,12 +16,17 @@
await expectValidationDeprecated(PubspecValidator.new);
});
- test('should consider a package invalid if it has a .gitignored pubspec',
- () async {
- final repo = d.git(appPath, [d.file('.gitignore', 'pubspec.yaml')]);
- await d.validPackage().create();
- await repo.create();
+ test(
+ 'should consider a package invalid if it has a .gitignored pubspec',
+ () async {
+ final repo = d.git(appPath, [d.file('.gitignore', 'pubspec.yaml')]);
+ await d.validPackage().create();
+ await repo.create();
- await expectValidationDeprecated(PubspecValidator.new, errors: isNotEmpty);
- });
+ await expectValidationDeprecated(
+ PubspecValidator.new,
+ errors: isNotEmpty,
+ );
+ },
+ );
}
diff --git a/test/validator/pubspec_typo_test.dart b/test/validator/pubspec_typo_test.dart
index d5bb654..a2543bc 100644
--- a/test/validator/pubspec_typo_test.dart
+++ b/test/validator/pubspec_typo_test.dart
@@ -61,10 +61,7 @@
test('contains typos', () async {
await d.dir(appPath, [
- d.pubspec({
- 'name': 'myapp',
- 'dependecies': {},
- }),
+ d.pubspec({'name': 'myapp', 'dependecies': {}}),
]).create();
await expectValidationDeprecated(pubspecTypo, warnings: isNotEmpty);
diff --git a/test/validator/relative_version_numbering_test.dart b/test/validator/relative_version_numbering_test.dart
index 420f5d3..1f35766 100644
--- a/test/validator/relative_version_numbering_test.dart
+++ b/test/validator/relative_version_numbering_test.dart
@@ -28,10 +28,7 @@
void main() {
test('Hints about not publishing latest', () async {
final server = await servePackages();
- server.serve(
- 'test_pkg',
- '2.0.2',
- );
+ server.serve('test_pkg', '2.0.2');
await d.validPackage().create();
await expectValidationHint('''
@@ -42,10 +39,7 @@
test('Hints incrementing more than needed', () async {
final server = await servePackages();
- server.serve(
- 'test_pkg',
- '1.0.2',
- );
+ server.serve('test_pkg', '1.0.2');
const notIncrementalHintText = '''
* The previous version is 1.0.2.
@@ -68,10 +62,7 @@
test('Hints incrementing more than needed after a prerelease', () async {
final server = await servePackages();
- server.serve(
- 'test_pkg',
- '1.0.2-pre',
- );
+ server.serve('test_pkg', '1.0.2-pre');
const notIncrementalHintText = '''
* The previous version is 1.0.2-pre.
@@ -94,10 +85,7 @@
test('Hints incrementing more than after pre 1.0', () async {
final server = await servePackages();
- server.serve(
- 'test_pkg',
- '0.0.1',
- );
+ server.serve('test_pkg', '0.0.1');
const notIncrementalHintText = '''
* The previous version is 0.0.1.
@@ -118,59 +106,53 @@
await expectValidationHint(notIncrementalHintText);
});
- test('Releasing a prerelease of incremental version causes no hint',
- () async {
- final server = await servePackages();
- server.serve(
- 'test_pkg',
- '1.0.0',
- );
- await d.validPackage(version: '1.0.1-dev').create();
- await expectValidation();
- await d.validPackage(version: '1.1.0-dev').create();
- await expectValidation();
- await d.validPackage(version: '2.0.0-dev').create();
- await expectValidation();
- });
+ test(
+ 'Releasing a prerelease of incremental version causes no hint',
+ () async {
+ final server = await servePackages();
+ server.serve('test_pkg', '1.0.0');
+ await d.validPackage(version: '1.0.1-dev').create();
+ await expectValidation();
+ await d.validPackage(version: '1.1.0-dev').create();
+ await expectValidation();
+ await d.validPackage(version: '2.0.0-dev').create();
+ await expectValidation();
+ },
+ );
test('Releasing the prereleased version causes no hint', () async {
final server = await servePackages();
- server.serve(
- 'test_pkg',
- '1.0.0-dev',
- );
+ server.serve('test_pkg', '1.0.0-dev');
await d.validPackage().create();
await expectValidation();
});
test('Releasing a build-release causes no hint', () async {
final server = await servePackages();
- server.serve(
- 'test_pkg',
- '1.0.0',
- );
+ server.serve('test_pkg', '1.0.0');
await d.validPackage(version: '1.0.0+0').create();
await expectValidation();
});
group('should consider a package valid if it', () {
- test('is opting in to null-safety with previous null-safe version',
- () async {
- final server = await servePackages();
- server.serve(
- 'test_pkg',
- '0.0.1',
- pubspec: {
- 'environment': {'sdk': '>=2.12.0<3.0.0'},
- },
- );
-
- await setup(sdkConstraint: '>=2.12.0 <3.0.0');
- await expectValidationDeprecated(validator);
- });
-
test(
- 'is opting in to null-safety using a pre-release of 2.12.0 '
+ 'is opting in to null-safety with previous null-safe version',
+ () async {
+ final server = await servePackages();
+ server.serve(
+ 'test_pkg',
+ '0.0.1',
+ pubspec: {
+ 'environment': {'sdk': '>=2.12.0<3.0.0'},
+ },
+ );
+
+ await setup(sdkConstraint: '>=2.12.0 <3.0.0');
+ await expectValidationDeprecated(validator);
+ },
+ );
+
+ test('is opting in to null-safety using a pre-release of 2.12.0 '
'with previous null-safe version', () async {
final server = await servePackages();
server.serve(
@@ -185,8 +167,7 @@
await expectValidationDeprecated(validator);
});
- test(
- 'is opting in to null-safety with previous null-safe version. '
+ test('is opting in to null-safety with previous null-safe version. '
'Even with a later non-null-safe version', () async {
await servePackages()
..serve(
@@ -211,7 +192,7 @@
// Nothing about null-safety
'''
The latest published version is 2.0.1.
-Your version 1.0.0 is earlier than that.'''
+Your version 1.0.0 is earlier than that.''',
],
);
});
@@ -222,8 +203,7 @@
await expectValidationDeprecated(validator);
});
- test(
- 'opts in to null-safety, with previous stable version not-null-safe. '
+ test('opts in to null-safety, with previous stable version not-null-safe. '
'With an in-between non-null-safe prerelease', () async {
await servePackages()
..serve(
@@ -247,31 +227,32 @@
});
group('should warn if ', () {
- test('opts in to null-safety, with previous version not-null-safe',
- () async {
- final server = await servePackages();
- server.serve(
- 'test_pkg',
- '0.0.1',
- pubspec: {
- 'environment': {'sdk': '>=2.9.0<3.0.0'},
- },
- );
+ test(
+ 'opts in to null-safety, with previous version not-null-safe',
+ () async {
+ final server = await servePackages();
+ server.serve(
+ 'test_pkg',
+ '0.0.1',
+ pubspec: {
+ 'environment': {'sdk': '>=2.9.0<3.0.0'},
+ },
+ );
- await setup(sdkConstraint: '>=2.12.0 <3.0.0');
- await expectValidationDeprecated(
- validator,
- hints: [
- '''
+ await setup(sdkConstraint: '>=2.12.0 <3.0.0');
+ await expectValidationDeprecated(
+ validator,
+ hints: [
+ '''
You're about to publish a package that opts into null safety.
The previous version (0.0.1) isn't opted in.
-See https://dart.dev/null-safety/migration-guide for best practices.'''
- ],
- );
- });
+See https://dart.dev/null-safety/migration-guide for best practices.''',
+ ],
+ );
+ },
+ );
- test(
- 'opts in to null-safety, with previous version not-null-safe. '
+ test('opts in to null-safety, with previous version not-null-safe. '
'Even with a later null-safe version', () async {
await servePackages()
..serve(
@@ -299,13 +280,12 @@
'''
You're about to publish a package that opts into null safety.
The previous version (0.0.1) isn't opted in.
-See https://dart.dev/null-safety/migration-guide for best practices.'''
+See https://dart.dev/null-safety/migration-guide for best practices.''',
],
);
});
- test(
- 'is opting in to null-safety with previous null-safe stable version. '
+ test('is opting in to null-safety with previous null-safe stable version. '
'with an in-between non-null-safe prerelease', () async {
await servePackages()
..serve(
@@ -330,13 +310,12 @@
'''
You're about to publish a package that opts into null safety.
The previous version (0.0.2-dev) isn't opted in.
-See https://dart.dev/null-safety/migration-guide for best practices.'''
+See https://dart.dev/null-safety/migration-guide for best practices.''',
],
);
});
- test(
- 'is opting in to null-safety with no existing stable versions. '
+ test('is opting in to null-safety with no existing stable versions. '
'With a previous non-null-safe prerelease', () async {
await setup(sdkConstraint: '>=2.12.0 <3.0.0');
final server = await servePackages();
@@ -353,7 +332,7 @@
'''
You're about to publish a package that opts into null safety.
The previous version (0.0.2-dev) isn't opted in.
-See https://dart.dev/null-safety/migration-guide for best practices.'''
+See https://dart.dev/null-safety/migration-guide for best practices.''',
],
);
});
diff --git a/test/validator/sdk_constraint_test.dart b/test/validator/sdk_constraint_test.dart
index 192cda0..4d367b8 100644
--- a/test/validator/sdk_constraint_test.dart
+++ b/test/validator/sdk_constraint_test.dart
@@ -20,18 +20,16 @@
});
test('has an SDK constraint without ^', () async {
- await d.dir(
- appPath,
- [d.libPubspec('test_pkg', '1.0.0', sdk: '>=1.8.0 <2.0.0')],
- ).create();
+ await d.dir(appPath, [
+ d.libPubspec('test_pkg', '1.0.0', sdk: '>=1.8.0 <2.0.0'),
+ ]).create();
await expectValidationDeprecated(sdkConstraint);
});
test('has an SDK constraint with ^', () async {
- await d.dir(
- appPath,
- [d.libPubspec('test_pkg', '1.0.0', sdk: '^1.8.0')],
- ).create();
+ await d.dir(appPath, [
+ d.libPubspec('test_pkg', '1.0.0', sdk: '^1.8.0'),
+ ]).create();
await expectValidationDeprecated(sdkConstraint);
});
@@ -42,8 +40,7 @@
await expectValidationDeprecated(sdkConstraint);
});
- test(
- 'has a Flutter SDK constraint with an appropriate Dart SDK '
+ test('has a Flutter SDK constraint with an appropriate Dart SDK '
'constraint', () async {
await d.dir(appPath, [
d.pubspec({
@@ -55,8 +52,7 @@
await expectValidationDeprecated(sdkConstraint);
});
- test(
- 'has a Fuchsia SDK constraint with an appropriate Dart SDK '
+ test('has a Fuchsia SDK constraint with an appropriate Dart SDK '
'constraint', () async {
await d.dir(appPath, [
d.pubspec({
@@ -74,10 +70,9 @@
group('should consider a package invalid if it', () {
test('has no upper bound SDK constraint', () async {
- await d.dir(
- appPath,
- [d.libPubspec('test_pkg', '1.0.0', sdk: '>=1.8.0')],
- ).create();
+ await d.dir(appPath, [
+ d.libPubspec('test_pkg', '1.0.0', sdk: '>=1.8.0'),
+ ]).create();
await expectValidationDeprecated(
sdkConstraint,
errors: anyElement(contains('should have an upper bound constraint')),
@@ -86,10 +81,7 @@
test('has no SDK constraint', () async {
await d.dir(appPath, [
- d.rawPubspec({
- 'name': 'test_pkg',
- 'version': '1.0.0',
- }),
+ d.rawPubspec({'name': 'test_pkg', 'version': '1.0.0'}),
]).create();
await expectValidationDeprecated(
sdkConstraint,
@@ -104,15 +96,12 @@
await expectValidationDeprecated(
sdkConstraint,
warnings: anyElement(
- contains(
- 'consider publishing the package as a pre-release instead',
- ),
+ contains('consider publishing the package as a pre-release instead'),
),
);
});
- test(
- 'Gives a hint if package has a <3.0.0 constraint '
+ test('Gives a hint if package has a <3.0.0 constraint '
'that is interpreted as <4.0.0', () async {
await d.dir(appPath, [
d.rawPubspec({
@@ -123,16 +112,14 @@
]).create();
await expectValidationDeprecated(
sdkConstraint,
- hints: anyElement(
- '''
+ hints: anyElement('''
The declared SDK constraint is '^2.19.0', this is interpreted as '>=2.19.0 <4.0.0'.
Consider updating the SDK constraint to:
environment:
sdk: '>=2.19.0 <4.0.0'
-''',
- ),
+'''),
);
});
});
diff --git a/test/validator/size_test.dart b/test/validator/size_test.dart
index f04245a..4fa37f0 100644
--- a/test/validator/size_test.dart
+++ b/test/validator/size_test.dart
@@ -28,14 +28,14 @@
});
group('hints if package is more than 100 MB', () {
- test('package is not under source control and no .gitignore exists',
- () async {
- await d.validPackage().create();
+ test(
+ 'package is not under source control and no .gitignore exists',
+ () async {
+ await d.validPackage().create();
- await expectSizeValidationHint(
- contains('Your package is 100.0 MB.'),
- );
- });
+ await expectSizeValidationHint(contains('Your package is 100.0 MB.'));
+ },
+ );
test('package is not under source control and .gitignore exists', () async {
await d.validPackage().create();
@@ -44,8 +44,10 @@
await expectSizeValidationHint(
allOf(
contains('Your package is 100.0 MB.'),
- contains('Your .gitignore has no effect since your project '
- 'does not appear to be in version control.'),
+ contains(
+ 'Your .gitignore has no effect since your project '
+ 'does not appear to be in version control.',
+ ),
),
);
});
@@ -57,8 +59,10 @@
await expectSizeValidationHint(
allOf(
contains('Your package is 100.0 MB.'),
- contains('Consider adding a .gitignore to avoid including '
- 'temporary files.'),
+ contains(
+ 'Consider adding a .gitignore to avoid including '
+ 'temporary files.',
+ ),
),
);
});
@@ -67,9 +71,7 @@
await d.validPackage().create();
await d.git(appPath, [d.file('.gitignore', 'ignored')]).create();
- await expectSizeValidationHint(
- contains('Your package is 100.0 MB.'),
- );
+ await expectSizeValidationHint(contains('Your package is 100.0 MB.'));
});
});
}
diff --git a/test/validator/strict_dependencies_test.dart b/test/validator/strict_dependencies_test.dart
index 9c0a5d7..6150d21 100644
--- a/test/validator/strict_dependencies_test.dart
+++ b/test/validator/strict_dependencies_test.dart
@@ -55,7 +55,7 @@
await expectValidationDeprecated(strictDeps);
});
-// Regression test of https://github.com/dart-lang/pub/issues/4115 .
+ // Regression test of https://github.com/dart-lang/pub/issues/4115 .
test('imports a dev_dependency in bindings_generator/', () async {
await d.dir(appPath, [
d.libPubspec(
@@ -103,8 +103,7 @@
deps = {'silly_monkey': '^1.2.3'};
}
for (var devDir in ['benchmark', 'example', 'test', 'tool']) {
- test(
- 'declares an "$port" as a '
+ test('declares an "$port" as a '
'${isDev ? 'dev ' : ''}dependency in $devDir/', () async {
await d.dir(appPath, [
d.libPubspec(
@@ -201,9 +200,7 @@
d.dir('test', [
d.dir('data', [
d.dir('mypkg', [
- d.dir('lib', [
- d.file('dummy.dart', '\n'),
- ]),
+ d.dir('lib', [d.file('dummy.dart', '\n')]),
]),
]),
]),
@@ -243,9 +240,7 @@
d.dir('test', [
d.dir('data', [
d.dir('mypkg', [
- d.dir('lib', [
- d.file('dummy.dart', '\n'),
- ]),
+ d.dir('lib', [d.file('dummy.dart', '\n')]),
]),
]),
]),
@@ -283,14 +278,11 @@
});
test('hook does not declare an "import" as a dependency', () async {
- await d.dir(
- p.join(appPath, 'hook'),
- [
- d.file('build.dart', r'''
+ await d.dir(p.join(appPath, 'hook'), [
+ d.file('build.dart', r'''
import 'package:silly_monkey/silly_monkey.dart';
'''),
- ],
- ).create();
+ ]).create();
await expectValidationDeprecated(
strictDeps,
@@ -301,25 +293,19 @@
});
test('hook declares an import as a devDependency for', () async {
- await d.dir(
- appPath,
- [
- d.libPubspec(
- 'test_pkg',
- '1.0.0',
- devDeps: {'silly_monkey': '^1.2.3'},
- sdk: '>=1.8.0 <2.0.0',
- ),
- d.dir(
- 'hook',
- [
- d.file('build.dart', r'''
+ await d.dir(appPath, [
+ d.libPubspec(
+ 'test_pkg',
+ '1.0.0',
+ devDeps: {'silly_monkey': '^1.2.3'},
+ sdk: '>=1.8.0 <2.0.0',
+ ),
+ d.dir('hook', [
+ d.file('build.dart', r'''
import 'package:silly_monkey/silly_monkey.dart';
'''),
- ],
- ),
- ],
- ).create();
+ ]),
+ ]).create();
await expectValidationDeprecated(
strictDeps,
@@ -377,27 +363,29 @@
for (var port in ['import', 'export']) {
for (var devDir in ['benchmark', 'test', 'tool']) {
- test('does not declare an "$port" as a dependency in $devDir/',
- () async {
- await d.dir(appPath, [
- d.libPubspec('test_pkg', '1.0.0', sdk: '>=1.8.0 <2.0.0'),
- d.dir(devDir, [
- d.file('library.dart', '''
+ test(
+ 'does not declare an "$port" as a dependency in $devDir/',
+ () async {
+ await d.dir(appPath, [
+ d.libPubspec('test_pkg', '1.0.0', sdk: '>=1.8.0 <2.0.0'),
+ d.dir(devDir, [
+ d.file('library.dart', '''
$port 'package:silly_monkey/silly_monkey.dart';
'''),
- ]),
- ]).create();
+ ]),
+ ]).create();
- await expectValidationDeprecated(
- strictDeps,
- warnings: [
- matches(
- 'does not have silly_monkey in the '
- '`dependencies` or `dev_dependencies` section',
- ),
- ],
- );
- });
+ await expectValidationDeprecated(
+ strictDeps,
+ warnings: [
+ matches(
+ 'does not have silly_monkey in the '
+ '`dependencies` or `dev_dependencies` section',
+ ),
+ ],
+ );
+ },
+ );
}
}
diff --git a/test/validator/utils.dart b/test/validator/utils.dart
index f470134..3a8fb88 100644
--- a/test/validator/utils.dart
+++ b/test/validator/utils.dart
@@ -48,10 +48,7 @@
}) async {
final s = count == 1 ? '' : 's';
await expectValidation(
- message: allOf([
- contains(error),
- contains('Package has $count warning$s'),
- ]),
+ message: allOf([contains(error), contains('Package has $count warning$s')]),
exitCode: DATA,
environment: environment,
);
@@ -64,10 +61,7 @@
}) async {
final s = count == 1 ? '' : 's';
await expectValidation(
- message: allOf([
- contains(hint),
- contains('and $count hint$s'),
- ]),
+ message: allOf([contains(hint), contains('and $count hint$s')]),
environment: environment,
);
}
diff --git a/test/version_solver_test.dart b/test/version_solver_test.dart
index a2bd6a7..c0ec370 100644
--- a/test/version_solver_test.dart
+++ b/test/version_solver_test.dart
@@ -76,8 +76,7 @@
);
});
- test(
- 'shared dependency where dependent version in turn affects other '
+ test('shared dependency where dependent version in turn affects other '
'dependencies', () async {
await servePackages()
..serve('foo', '1.0.0')
@@ -167,8 +166,7 @@
await expectResolves(result: {'foo': '1.0.2', 'bar': '1.0.2'});
});
- test(
- 'unlocks dependencies if necessary to ensure that a new '
+ test('unlocks dependencies if necessary to ensure that a new '
'dependency is satisfied', () async {
await servePackages()
..serve('foo', '1.0.0', deps: {'bar': '<2.0.0'})
@@ -200,8 +198,7 @@
});
// Issue 1853
- test(
- 'produces a nice message for a locked dependency '
+ test('produces a nice message for a locked dependency '
"that's the only version of its package", () async {
await servePackages()
..serve('foo', '1.0.0', deps: {'bar': '>=2.0.0'})
@@ -436,8 +433,10 @@
await d.appDir(dependencies: {'foo': '>=1.0.0 <2.0.0'}).create();
await expectResolves(
- error: contains('''
-Because myapp depends on foo ^1.0.0 which doesn't match any versions, version solving failed.'''),
+ error: contains(
+ '''
+Because myapp depends on foo ^1.0.0 which doesn't match any versions, version solving failed.''',
+ ),
);
});
@@ -503,13 +502,18 @@
await expectResolves(
error: allOf([
contains(
- 'Because every version of bar depends on shared from hosted on '
- 'http://localhost:'),
- contains(' and every version of foo depends on shared from hosted on '
- 'http://localhost:'),
+ 'Because every version of bar depends on shared from hosted on '
+ 'http://localhost:',
+ ),
+ contains(
+ ' and every version of foo depends on shared from hosted on '
+ 'http://localhost:',
+ ),
contains(', bar is incompatible with foo.'),
- contains('So, because myapp depends on both foo 1.0.0 and bar 1.0.0, '
- 'version solving failed.'),
+ contains(
+ 'So, because myapp depends on both foo 1.0.0 and bar 1.0.0, '
+ 'version solving failed.',
+ ),
]),
);
});
@@ -570,8 +574,10 @@
await d.appDir(dependencies: {'a': 'any', 'b': '>1.0.0'}).create();
await expectResolves(
- error: contains('''
-Because myapp depends on b >1.0.0 which doesn't match any versions, version solving failed.'''),
+ error: contains(
+ '''
+Because myapp depends on b >1.0.0 which doesn't match any versions, version solving failed.''',
+ ),
);
});
@@ -600,7 +606,8 @@
..serve('di', '0.0.36', deps: {'analyzer': '>=0.13.0 <0.14.0'});
await d
- .appDir(dependencies: {'angular': 'any', 'collection': 'any'}).create();
+ .appDir(dependencies: {'angular': 'any', 'collection': 'any'})
+ .create();
await expectResolves(
error: equalsIgnoringWhitespace('''
Because every version of angular depends on di ^0.0.32 which depends on
@@ -614,11 +621,13 @@
void badSource() {
test('fail if the root package has a bad source in dep', () async {
- await d.appDir(
- dependencies: {
- 'foo': {'bad': 'any'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': {'bad': 'any'},
+ },
+ )
+ .create();
await expectResolves(
error: equalsIgnoringWhitespace('''
Because myapp depends on foo from unknown source "bad", version solving
@@ -716,12 +725,14 @@
..serve('baz', '1.0.0');
await d.dir('baz', [d.libPubspec('baz', '1.0.0')]).create();
- await d.appDir(
- dependencies: {
- 'foo': 'any',
- 'baz': {'path': '../baz'},
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'foo': 'any',
+ 'baz': {'path': '../baz'},
+ },
+ )
+ .create();
await expectResolves(
error: equalsIgnoringWhitespace('''
Because every version of foo depends on bar any which depends on baz any,
@@ -794,7 +805,8 @@
await expectResolves(
// We avoid equalsIgnoringWhitespace() here because we want to test the
// formatting of the line number.
- error: ' Because foo <1.1.0 depends on a ^1.0.0 which depends on b '
+ error:
+ ' Because foo <1.1.0 depends on a ^1.0.0 which depends on b '
'^2.0.0, foo <1.1.0 requires b ^2.0.0.\n'
'(1) So, because foo <1.1.0 depends on b ^1.0.0, foo <1.1.0 is '
'forbidden.\n'
@@ -983,8 +995,10 @@
await d.appDir(dependencies: {'a': 'any', 'b': 'any', 'c': 'any'}).create();
await expectResolves(
error: allOf([
- contains('Because every version of b depends on a from hosted on '
- 'http://localhost:'),
+ contains(
+ 'Because every version of b depends on a from hosted on '
+ 'http://localhost:',
+ ),
contains(' and myapp depends on a from hosted on http://localhost:'),
contains(', b is forbidden.'),
contains('So, because myapp depends on b any, version solving failed.'),
@@ -1229,8 +1243,7 @@
await expectResolves(result: {'foo': '1.0.0', 'bar': '2.0.0'});
});
- test(
- 'selects a dependency version that allows a transitive '
+ test('selects a dependency version that allows a transitive '
'dependency that allows the SDK', () async {
await servePackages()
..serve('foo', '1.0.0', deps: {'bar': '1.0.0'})
@@ -1438,10 +1451,7 @@
await d.dir(appPath, [
d.pubspec({
'name': 'myapp',
- 'environment': {
- 'sdk': '>3.1.2+3',
- 'flutter': '1.2.3',
- },
+ 'environment': {'sdk': '>3.1.2+3', 'flutter': '1.2.3'},
}),
]).create();
@@ -1521,17 +1531,19 @@
await expectResolves(result: {'a': '1.1.0'});
});
- test('prefer a stable version even if constraint mentions unstable',
- () async {
- await servePackages()
- ..serve('a', '1.0.0')
- ..serve('a', '1.1.0')
- ..serve('a', '2.0.0-dev')
- ..serve('a', '2.0.0');
+ test(
+ 'prefer a stable version even if constraint mentions unstable',
+ () async {
+ await servePackages()
+ ..serve('a', '1.0.0')
+ ..serve('a', '1.1.0')
+ ..serve('a', '2.0.0-dev')
+ ..serve('a', '2.0.0');
- await d.appDir(dependencies: {'a': '<=2.0.0-dev'}).create();
- await expectResolves(result: {'a': '1.1.0'});
- });
+ await d.appDir(dependencies: {'a': '<=2.0.0-dev'}).create();
+ await expectResolves(result: {'a': '1.1.0'});
+ },
+ );
test('use pre-release when desired', () async {
await servePackages()
@@ -1562,12 +1574,7 @@
..serve('b', '1.1.0-dev');
await d.appDir(dependencies: {'a': '^1.0.0'}).create();
- await expectResolves(
- result: {
- 'a': '1.1.0',
- 'b': '1.1.0-dev',
- },
- );
+ await expectResolves(result: {'a': '1.1.0', 'b': '1.1.0-dev'});
});
test('backtracks pre-release choice with direct dependency', () async {
@@ -1577,19 +1584,17 @@
..serve('b', '1.0.0')
..serve('b', '1.1.0-dev');
- await d.appDir(
- dependencies: {
- 'a': '^1.0.0',
- 'b':
- '^1.0.0', // Direct dependency prevents us from using a pre-release.
- },
- ).create();
- await expectResolves(
- result: {
- 'a': '1.0.0',
- 'b': '1.0.0',
- },
- );
+ await d
+ .appDir(
+ dependencies: {
+ 'a': '^1.0.0',
+ 'b':
+ // Direct dependency prevents us from using a pre-release.
+ '^1.0.0',
+ },
+ )
+ .create();
+ await expectResolves(result: {'a': '1.0.0', 'b': '1.0.0'});
});
test('backtracking pre-release fails with indirect dependency', () async {
@@ -1602,18 +1607,16 @@
..serve('b', '1.1.0-dev')
..serve('c', '1.0.0', deps: {'b': '^1.0.0'});
- await d.appDir(
- dependencies: {
- 'a': '^1.0.0',
- 'c': '^1.0.0', // This doesn't not prevent using a pre-release.
- },
- ).create();
+ await d
+ .appDir(
+ dependencies: {
+ 'a': '^1.0.0',
+ 'c': '^1.0.0', // This doesn't not prevent using a pre-release.
+ },
+ )
+ .create();
await expectResolves(
- result: {
- 'a': '1.1.0',
- 'b': '1.1.0-dev',
- 'c': '1.0.0',
- },
+ result: {'a': '1.1.0', 'b': '1.1.0-dev', 'c': '1.0.0'},
);
});
@@ -1627,12 +1630,7 @@
..serve('b', '0.17.0', deps: {'a': '1.0.0'})
..serve('c', '2.0.1', deps: {});
- await d.appDir(
- dependencies: {
- 'a': '0.12.0',
- 'b': 'any',
- },
- ).create();
+ await d.appDir(dependencies: {'a': '0.12.0', 'b': 'any'}).create();
await expectResolves(
error: contains(
'So, because myapp depends on both '
@@ -1650,11 +1648,7 @@
..serve('b', '1.1.0-alpha')
..serve('a', '1.0.0', deps: {'b': '^1.1.0-alpha'});
- await d.appDir(
- dependencies: {
- 'a': '^1.0.0',
- },
- ).create();
+ await d.appDir(dependencies: {'a': '^1.0.0'}).create();
await expectResolves(tries: 2);
});
}
@@ -1922,8 +1916,7 @@
await expectResolves(result: {'foo': '2.0.0'}, downgrade: true);
});
- test(
- 'use earliest allowed prerelease if no stable versions match '
+ test('use earliest allowed prerelease if no stable versions match '
'while downgrading', () async {
await servePackages()
..serve('a', '1.0.0')
@@ -1966,12 +1959,13 @@
await runPub(
args: [downgrade ? 'downgrade' : 'get'],
environment: environment,
- output: output ??
+ output:
+ output ??
(error == null
? anyOf(
- contains('Got dependencies!'),
- matches(RegExp(r'Changed \d+ dependenc(ies|y)!')),
- )
+ contains('Got dependencies!'),
+ matches(RegExp(r'Changed \d+ dependenc(ies|y)!')),
+ )
: null),
error: error,
silent: contains('Tried ${tries ?? 1} solutions'),
@@ -1982,8 +1976,10 @@
final cache = SystemCache();
final registry = cache.sources;
- final lockFile =
- LockFile.load(p.join(d.sandbox, appPath, 'pubspec.lock'), registry);
+ final lockFile = LockFile.load(
+ p.join(d.sandbox, appPath, 'pubspec.lock'),
+ registry,
+ );
final resultPubspec = Pubspec.fromMap(
{'dependencies': result},
registry,
@@ -2034,10 +2030,10 @@
test('diamond sdk deps', () async {
await d.dir('flutter', [
d.dir('bin/cache/pkg', [
- d.dir(
- 'baz',
- [d.libDir('baz', 'foo 0.0.1'), d.libPubspec('baz', '0.0.1')],
- ),
+ d.dir('baz', [
+ d.libDir('baz', 'foo 0.0.1'),
+ d.libPubspec('baz', '0.0.1'),
+ ]),
]),
d.flutterVersion('1.2.3'),
]).create();
diff --git a/test/workspace_test.dart b/test/workspace_test.dart
index 94ce826..b348f8b 100644
--- a/test/workspace_test.dart
+++ b/test/workspace_test.dart
@@ -47,13 +47,10 @@
File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync(),
);
expect(dig<Map>(lockfile, ['packages']).keys, <String>{'dev_dep'});
- await appPackageConfigFile(
- [
- packageConfigEntry(name: 'dev_dep', version: '1.0.0'),
- packageConfigEntry(name: 'a', path: './pkgs/a'),
- ],
- generatorVersion: '3.5.0',
- ).validate();
+ await appPackageConfigFile([
+ packageConfigEntry(name: 'dev_dep', version: '1.0.0'),
+ packageConfigEntry(name: 'a', path: './pkgs/a'),
+ ], generatorVersion: '3.5.0').validate();
final workspaceRefA = jsonDecode(
File(
p.join(
@@ -69,59 +66,58 @@
);
expect(workspaceRefA, {'workspaceRoot': p.join('..', '..', '..', '..')});
final workspaceRefMyApp = jsonDecode(
- File(p.join(sandbox, appPath, '.dart_tool', 'pub', 'workspace_ref.json'))
- .readAsStringSync(),
+ File(
+ p.join(sandbox, appPath, '.dart_tool', 'pub', 'workspace_ref.json'),
+ ).readAsStringSync(),
);
expect(workspaceRefMyApp, {'workspaceRoot': p.join('..', '..')});
});
test(
- 'allows dependencies between workspace members, the source is overridden',
- () async {
- await servePackages();
- await dir(appPath, [
- libPubspec(
- 'myapp',
- '1.2.3',
- extras: {
- 'workspace': ['pkgs/a', 'pkgs/b'],
- },
- sdk: '^3.5.0',
- ),
- dir('pkgs', [
- dir('a', [
- libPubspec(
- 'a',
- '1.1.1',
- deps: {'b': '^2.0.0'},
- resolutionWorkspace: true,
- ),
+ 'allows dependencies between workspace members, the source is overridden',
+ () async {
+ await servePackages();
+ await dir(appPath, [
+ libPubspec(
+ 'myapp',
+ '1.2.3',
+ extras: {
+ 'workspace': ['pkgs/a', 'pkgs/b'],
+ },
+ sdk: '^3.5.0',
+ ),
+ dir('pkgs', [
+ dir('a', [
+ libPubspec(
+ 'a',
+ '1.1.1',
+ deps: {'b': '^2.0.0'},
+ resolutionWorkspace: true,
+ ),
+ ]),
+ dir('b', [
+ libPubspec(
+ 'b',
+ '2.1.1',
+ deps: {
+ 'myapp': {'git': 'somewhere'},
+ },
+ resolutionWorkspace: true,
+ ),
+ ]),
]),
- dir('b', [
- libPubspec(
- 'b',
- '2.1.1',
- deps: {
- 'myapp': {'git': 'somewhere'},
- },
- resolutionWorkspace: true,
- ),
- ]),
- ]),
- ]).create();
- await pubGet(environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'});
- final lockfile = loadYaml(
- File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync(),
- );
- expect(dig<Map>(lockfile, ['packages']).keys, <String>{});
- await appPackageConfigFile(
- [
+ ]).create();
+ await pubGet(environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'});
+ final lockfile = loadYaml(
+ File(p.join(sandbox, appPath, 'pubspec.lock')).readAsStringSync(),
+ );
+ expect(dig<Map>(lockfile, ['packages']).keys, <String>{});
+ await appPackageConfigFile([
packageConfigEntry(name: 'a', path: './pkgs/a'),
packageConfigEntry(name: 'b', path: './pkgs/b'),
- ],
- generatorVersion: '3.5.0',
- ).validate();
- });
+ ], generatorVersion: '3.5.0').validate();
+ },
+ );
test('allows nested workspaces', () async {
final server = await servePackages();
@@ -164,13 +160,10 @@
);
expect(dig<Map>(lockfile, ['packages']).keys, <String>{});
- await appPackageConfigFile(
- [
- packageConfigEntry(name: 'a', path: './pkgs/a'),
- packageConfigEntry(name: 'example', path: './pkgs/a/example'),
- ],
- generatorVersion: '3.5.0',
- ).validate();
+ await appPackageConfigFile([
+ packageConfigEntry(name: 'a', path: './pkgs/a'),
+ packageConfigEntry(name: 'example', path: './pkgs/a/example'),
+ ], generatorVersion: '3.5.0').validate();
});
test('checks constraints between workspace members', () async {
@@ -204,8 +197,7 @@
);
});
- test(
- 'ignores the source of dependencies on root packages. '
+ test('ignores the source of dependencies on root packages. '
'(Uses the local version instead)', () async {
await dir(appPath, [
libPubspec(
@@ -309,12 +301,7 @@
sdk: '^3.5.0',
),
dir('pkgs', [
- dir('a', [
- libPubspec(
- 'a',
- '1.1.1',
- ),
- ]),
+ dir('a', [libPubspec('a', '1.1.1')]),
]),
]).create();
final s = p.separator;
@@ -355,9 +342,7 @@
environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
workingDirectory: p.join(sandbox, appPath, 'pkgs'),
output: allOf(
- contains(
- 'Resolving dependencies in `$absoluteAppPath`...',
- ),
+ contains('Resolving dependencies in `$absoluteAppPath`...'),
contains('Got dependencies in `$absoluteAppPath`'),
),
);
@@ -379,11 +364,7 @@
await pubGet(
args: ['-C..'],
environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- workingDirectory: p.join(
- sandbox,
- appPath,
- 'pkgs',
- ),
+ workingDirectory: p.join(sandbox, appPath, 'pkgs'),
output: contains(
'Resolving dependencies in `${p.join(sandbox, appPath)}`...',
),
@@ -486,8 +467,7 @@
await runPub(
args: ['deps'],
environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- output: contains(
- '''
+ output: contains('''
Dart SDK 3.5.0
a 1.1.1
├── both...
@@ -499,8 +479,7 @@
└── myapp...
myapp 1.2.3
├── b...
-└── both 1.0.0''',
- ),
+└── both 1.0.0'''),
);
await runPub(
@@ -724,13 +703,7 @@
sdk: '^3.5.0',
),
dir('pkgs', [
- dir('a', [
- libPubspec(
- 'a',
- '1.1.1',
- resolutionWorkspace: true,
- ),
- ]),
+ dir('a', [libPubspec('a', '1.1.1', resolutionWorkspace: true)]),
]),
]).create();
@@ -791,13 +764,7 @@
);
await dir(appPath, [
dir('pkgs', [
- dir('a', [
- libPubspec(
- 'a',
- '1.1.1',
- resolutionWorkspace: true,
- ),
- ]),
+ dir('a', [libPubspec('a', '1.1.1', resolutionWorkspace: true)]),
]),
]).validate();
// Only when removing it from the root it shows the update.
@@ -809,132 +776,122 @@
);
});
- test('Reports error if pubspec inside workspace is not part of the workspace',
- () async {
- await dir(appPath, [
- libPubspec(
- 'myapp',
- '1.2.3',
- extras: {
- 'workspace': ['pkgs/a', 'pkgs/a/example'],
- },
- sdk: '^3.5.0',
- ),
- dir('pkgs', [
- libPubspec('not_in_workspace', '1.0.0'),
- dir(
- 'a',
- [
+ test(
+ 'Reports error if pubspec inside workspace is not part of the workspace',
+ () async {
+ await dir(appPath, [
+ libPubspec(
+ 'myapp',
+ '1.2.3',
+ extras: {
+ 'workspace': ['pkgs/a', 'pkgs/a/example'],
+ },
+ sdk: '^3.5.0',
+ ),
+ dir('pkgs', [
+ libPubspec('not_in_workspace', '1.0.0'),
+ dir('a', [
libPubspec('a', '1.1.1', resolutionWorkspace: true),
dir('example', [
libPubspec('example', '0.0.0', resolutionWorkspace: true),
]),
- ],
+ ]),
+ ]),
+ ]).create();
+ await pubGet(
+ environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
+ error: contains(
+ 'The file `.${s}pkgs${s}pubspec.yaml` '
+ 'is located in a directory between the workspace root',
),
- ]),
- ]).create();
- await pubGet(
- environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- error: contains(
- 'The file `.${s}pkgs${s}pubspec.yaml` '
- 'is located in a directory between the workspace root',
- ),
- );
- });
+ );
+ },
+ );
- test('Removes lock files and package configs from inside the workspace',
- () async {
- await dir(appPath, [
- libPubspec(
- 'myapp',
- '1.2.3',
- extras: {
- 'workspace': ['pkgs/a', 'pkgs/b'],
- },
- sdk: '^3.5.0',
- ),
- dir('pkgs', [
- dir(
- 'a',
- [
+ test(
+ 'Removes lock files and package configs from inside the workspace',
+ () async {
+ await dir(appPath, [
+ libPubspec(
+ 'myapp',
+ '1.2.3',
+ extras: {
+ 'workspace': ['pkgs/a', 'pkgs/b'],
+ },
+ sdk: '^3.5.0',
+ ),
+ dir('pkgs', [
+ dir('a', [
libPubspec('a', '1.1.1', resolutionWorkspace: true),
dir('test_data', []),
- ],
- ),
- dir(
- 'b',
- [
- libPubspec('b', '1.1.1', resolutionWorkspace: true),
- ],
- ),
- ]),
- ]).create();
- // Directories outside the workspace should not be affected.
- final outideWorkpace = sandbox;
- // Directories of worksace packages should be cleaned.
- final aDir = p.join(sandbox, appPath, 'pkgs', 'a');
- // Directories between workspace root and workspace packages should
- // be cleaned.
- final pkgsDir = p.join(sandbox, appPath, 'pkgs');
- // Directories inside a workspace package should not be cleaned.
- final inside = p.join(aDir, 'test_data');
+ ]),
+ dir('b', [libPubspec('b', '1.1.1', resolutionWorkspace: true)]),
+ ]),
+ ]).create();
+ // Directories outside the workspace should not be affected.
+ final outideWorkpace = sandbox;
+ // Directories of worksace packages should be cleaned.
+ final aDir = p.join(sandbox, appPath, 'pkgs', 'a');
+ // Directories between workspace root and workspace packages should
+ // be cleaned.
+ final pkgsDir = p.join(sandbox, appPath, 'pkgs');
+ // Directories inside a workspace package should not be cleaned.
+ final inside = p.join(aDir, 'test_data');
- void createLockFileAndPackageConfig(String dir) {
- File(p.join(dir, 'pubspec.lock')).createSync(recursive: true);
- File(p.join(dir, '.dart_tool', 'package_config.json'))
- .createSync(recursive: true);
- }
+ void createLockFileAndPackageConfig(String dir) {
+ File(p.join(dir, 'pubspec.lock')).createSync(recursive: true);
+ File(
+ p.join(dir, '.dart_tool', 'package_config.json'),
+ ).createSync(recursive: true);
+ }
- void validateLockFileAndPackageConfig(
- String dir,
- FileSystemEntityType state,
- ) {
- expect(
- File(p.join(dir, 'pubspec.lock')).statSync().type,
- state,
+ void validateLockFileAndPackageConfig(
+ String dir,
+ FileSystemEntityType state,
+ ) {
+ expect(File(p.join(dir, 'pubspec.lock')).statSync().type, state);
+ expect(
+ File(
+ p.join(dir, '.dart_tool', 'package_config.json'),
+ ).statSync().type,
+ state,
+ );
+ }
+
+ createLockFileAndPackageConfig(sandbox);
+ createLockFileAndPackageConfig(aDir);
+ createLockFileAndPackageConfig(pkgsDir);
+ createLockFileAndPackageConfig(inside);
+
+ await pubGet(
+ environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
+ warning: allOf(
+ contains('Deleting old lock-file: `.${s}pkgs/a${s}pubspec.lock'),
+ isNot(contains('.${s}pkgs/b${s}pubspec.lock')),
+ contains(
+ 'Deleting old package config: '
+ '`.${s}pkgs/a$s.dart_tool${s}package_config.json`',
+ ),
+ contains('Deleting old lock-file: `.${s}pkgs${s}pubspec.lock'),
+ contains(
+ 'Deleting old package config: '
+ '`.${s}pkgs$s.dart_tool${s}package_config.json`',
+ ),
+ contains(
+ 'See https://dart.dev/go/workspaces-stray-files for details.',
+ ),
+ ),
);
- expect(
- File(p.join(dir, '.dart_tool', 'package_config.json')).statSync().type,
- state,
+
+ validateLockFileAndPackageConfig(
+ outideWorkpace,
+ FileSystemEntityType.file,
);
- }
-
- createLockFileAndPackageConfig(sandbox);
- createLockFileAndPackageConfig(aDir);
- createLockFileAndPackageConfig(pkgsDir);
- createLockFileAndPackageConfig(inside);
-
- await pubGet(
- environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- warning: allOf(
- contains('Deleting old lock-file: `.${s}pkgs/a${s}pubspec.lock'),
- isNot(contains('.${s}pkgs/b${s}pubspec.lock')),
- contains(
- 'Deleting old package config: '
- '`.${s}pkgs/a$s.dart_tool${s}package_config.json`',
- ),
- contains('Deleting old lock-file: `.${s}pkgs${s}pubspec.lock'),
- contains(
- 'Deleting old package config: '
- '`.${s}pkgs$s.dart_tool${s}package_config.json`',
- ),
- contains(
- 'See https://dart.dev/go/workspaces-stray-files for details.',
- ),
- ),
- );
-
- validateLockFileAndPackageConfig(
- outideWorkpace,
- FileSystemEntityType.file,
- );
- validateLockFileAndPackageConfig(aDir, FileSystemEntityType.notFound);
- validateLockFileAndPackageConfig(pkgsDir, FileSystemEntityType.notFound);
- validateLockFileAndPackageConfig(
- inside,
- FileSystemEntityType.file,
- );
- });
+ validateLockFileAndPackageConfig(aDir, FileSystemEntityType.notFound);
+ validateLockFileAndPackageConfig(pkgsDir, FileSystemEntityType.notFound);
+ validateLockFileAndPackageConfig(inside, FileSystemEntityType.file);
+ },
+ );
test('Reports error if workspace doesn\'t form a tree.', () async {
await dir(appPath, [
@@ -955,12 +912,7 @@
'workspace': ['a'],
},
),
- dir(
- 'a',
- [
- libPubspec('a', '1.1.1', resolutionWorkspace: true),
- ],
- ),
+ dir('a', [libPubspec('a', '1.1.1', resolutionWorkspace: true)]),
]),
]).create();
final s = p.separator;
@@ -994,12 +946,7 @@
'workspace': ['a'],
},
),
- dir(
- 'a',
- [
- libPubspec('a', '1.1.1', resolutionWorkspace: true),
- ],
- ),
+ dir('a', [libPubspec('a', '1.1.1', resolutionWorkspace: true)]),
]),
]).create();
final s = p.separator;
@@ -1012,8 +959,7 @@
);
});
- test(
- 'Reports a failure if a workspace pubspec is not nested '
+ test('Reports a failure if a workspace pubspec is not nested '
'inside the parent dir', () async {
await dir(appPath, [
libPubspec(
@@ -1050,24 +996,26 @@
);
});
- test('Reports a failure if a workspace pubspec is not a relative path',
- () async {
- await dir(appPath, [
- libPubspec(
- 'myapp',
- '1.2.3',
- sdk: '^3.5.0',
- extras: {
- 'workspace': [p.join(sandbox, appPath, 'a')],
- },
- ),
- ]).create();
- await pubGet(
- environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- error: contains('"workspace" members must be relative paths'),
- exitCode: DATA,
- );
- });
+ test(
+ 'Reports a failure if a workspace pubspec is not a relative path',
+ () async {
+ await dir(appPath, [
+ libPubspec(
+ 'myapp',
+ '1.2.3',
+ sdk: '^3.5.0',
+ extras: {
+ 'workspace': [p.join(sandbox, appPath, 'a')],
+ },
+ ),
+ ]).create();
+ await pubGet(
+ environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
+ error: contains('"workspace" members must be relative paths'),
+ exitCode: DATA,
+ );
+ },
+ );
test('`upgrade` upgrades all workspace', () async {
final server = await servePackages();
@@ -1097,11 +1045,9 @@
server.serve('bar', '1.5.0');
await pubUpgrade(
environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- output: contains(
- '''
+ output: contains('''
> bar 1.5.0 (was 1.0.0)
-> foo 1.5.0 (was 1.0.0)''',
- ),
+> foo 1.5.0 (was 1.0.0)'''),
);
});
@@ -1138,15 +1084,13 @@
await pubUpgrade(
args: ['--major-versions'],
environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- output: contains(
- '''
+ output: contains('''
Changed 2 constraints in pubspec.yaml:
foo: ^1.0.0 -> ^2.0.0
bar: 1.0.0 -> ^2.0.0
Changed 1 constraint in a${s}pubspec.yaml:
- foo: 1.5.0 -> ^2.0.0''',
- ),
+ foo: 1.5.0 -> ^2.0.0'''),
);
await dir(appPath, [
@@ -1169,87 +1113,83 @@
]),
]).validate();
});
- test('`upgrade --major-versions foo` upgrades foo in all workspace',
- () async {
- final server = await servePackages();
- server.serve('foo', '1.5.0');
- server.serve('foo', '2.0.0');
- server.serve('bar', '1.0.0');
- server.serve('bar', '2.0.0');
- await dir(appPath, [
- libPubspec(
- 'myapp',
- '1.2.3',
- deps: {'foo': '^1.0.0', 'bar': '1.0.0'},
- sdk: '^3.5.0',
- extras: {
- 'workspace': ['a'],
- },
- ),
- dir('a', [
+ test(
+ '`upgrade --major-versions foo` upgrades foo in all workspace',
+ () async {
+ final server = await servePackages();
+ server.serve('foo', '1.5.0');
+ server.serve('foo', '2.0.0');
+ server.serve('bar', '1.0.0');
+ server.serve('bar', '2.0.0');
+ await dir(appPath, [
libPubspec(
- 'a',
- '1.0.0',
- deps: {'foo': '1.5.0'},
- resolutionWorkspace: true,
+ 'myapp',
+ '1.2.3',
+ deps: {'foo': '^1.0.0', 'bar': '1.0.0'},
+ sdk: '^3.5.0',
+ extras: {
+ 'workspace': ['a'],
+ },
),
- ]),
- ]).create();
+ dir('a', [
+ libPubspec(
+ 'a',
+ '1.0.0',
+ deps: {'foo': '1.5.0'},
+ resolutionWorkspace: true,
+ ),
+ ]),
+ ]).create();
- await pubGet(
- environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- output: contains('+ foo 1.5.0'),
- );
- await pubUpgrade(
- args: ['--major-versions', 'foo'],
- environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- output: contains(
- '''
+ await pubGet(
+ environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
+ output: contains('+ foo 1.5.0'),
+ );
+ await pubUpgrade(
+ args: ['--major-versions', 'foo'],
+ environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
+ output: contains('''
Changed 1 constraint in pubspec.yaml:
foo: ^1.0.0 -> ^2.0.0
Changed 1 constraint in a${s}pubspec.yaml:
- foo: 1.5.0 -> ^2.0.0''',
- ),
- );
- // Second run should mention "any pubspec.yaml".
- await pubUpgrade(
- args: ['--major-versions', 'foo'],
- environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- output: contains(
- '''
-No changes to any pubspec.yaml!''',
- ),
- );
- await pubUpgrade(
- args: ['--major-versions', 'foo', '--dry-run'],
- environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- output: contains(
- '''
-No changes would be made to any pubspec.yaml!''',
- ),
- );
+ foo: 1.5.0 -> ^2.0.0'''),
+ );
+ // Second run should mention "any pubspec.yaml".
+ await pubUpgrade(
+ args: ['--major-versions', 'foo'],
+ environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
+ output: contains('''
+No changes to any pubspec.yaml!'''),
+ );
+ await pubUpgrade(
+ args: ['--major-versions', 'foo', '--dry-run'],
+ environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
+ output: contains('''
+No changes would be made to any pubspec.yaml!'''),
+ );
- await dir(appPath, [
- libPubspec(
- 'myapp',
- '1.2.3',
- deps: {'foo': '^2.0.0', 'bar': '1.0.0'},
- sdk: '^3.5.0',
- extras: {
- 'workspace': ['a'],
- },
- ),
- dir('a', [
+ await dir(appPath, [
libPubspec(
- 'a',
- '1.0.0',
- deps: {'foo': '^2.0.0'},
- resolutionWorkspace: true,
+ 'myapp',
+ '1.2.3',
+ deps: {'foo': '^2.0.0', 'bar': '1.0.0'},
+ sdk: '^3.5.0',
+ extras: {
+ 'workspace': ['a'],
+ },
),
- ]),
- ]).validate();
- });
+ dir('a', [
+ libPubspec(
+ 'a',
+ '1.0.0',
+ deps: {'foo': '^2.0.0'},
+ resolutionWorkspace: true,
+ ),
+ ]),
+ ]).validate();
+ },
+ );
test('`upgrade --tighten` updates all workspace', () async {
final server = await servePackages();
@@ -1290,15 +1230,13 @@
await pubUpgrade(
args: ['--tighten'],
environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- output: contains(
- '''
+ output: contains('''
Changed 2 constraints in pubspec.yaml:
foo: ^1.0.0 -> ^1.5.0
bar: ^1.0.0 -> ^1.5.0
Changed 1 constraint in a${s}pubspec.yaml:
- foo: ^1.0.0 -> ^1.5.0''',
- ),
+ foo: ^1.0.0 -> ^1.5.0'''),
);
await dir(appPath, [
@@ -1370,8 +1308,7 @@
await pubUpgrade(
args: ['--tighten', '--major-versions'],
environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- output: contains(
- '''
+ output: contains('''
Changed 2 constraints in pubspec.yaml:
foo: ^1.0.0 -> ^2.0.0
bar: ^1.0.0 -> ^1.5.0
@@ -1380,8 +1317,7 @@
foo: ^1.0.0 -> ^2.0.0
Changed 1 constraint in b${s}pubspec.yaml:
- bar: ^1.0.0 -> ^1.5.0''',
- ),
+ bar: ^1.0.0 -> ^1.5.0'''),
);
});
@@ -1402,18 +1338,13 @@
sdk: '^3.5.0',
),
dir('pkgs', [
- dir('a', [
- libPubspec(
- 'a',
- '1.0.0',
- resolutionWorkspace: true,
- ),
- ]),
+ dir('a', [libPubspec('a', '1.0.0', resolutionWorkspace: true)]),
]),
]).create();
await pubGet(
environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- error: 'Because myapp depends on both a 2.0.0 and a, '
+ error:
+ 'Because myapp depends on both a 2.0.0 and a, '
'version solving failed.',
);
});
@@ -1430,13 +1361,7 @@
},
sdk: '^3.5.0',
),
- dir('a', [
- libPubspec(
- 'a',
- '1.0.0',
- resolutionWorkspace: true,
- ),
- ]),
+ dir('a', [libPubspec('a', '1.0.0', resolutionWorkspace: true)]),
dir('b', [
libPubspec(
'a', // Has same name as sibling.
@@ -1453,86 +1378,73 @@
);
});
- test('Reports error if two members of workspace override the same package',
- () async {
- final server = await servePackages();
- server.serve('foo', '1.0.0');
- await dir(appPath, [
- libPubspec(
- 'myapp',
- '1.2.3',
- deps: {'foo': 'any'},
- extras: {
- 'dependency_overrides': {
- 'foo': {'path': '../foo'},
- },
- 'workspace': ['a'],
- },
- sdk: '^3.5.0',
- ),
- dir('a', [
+ test(
+ 'Reports error if two members of workspace override the same package',
+ () async {
+ final server = await servePackages();
+ server.serve('foo', '1.0.0');
+ await dir(appPath, [
libPubspec(
- 'a',
- '1.0.0',
- resolutionWorkspace: true,
+ 'myapp',
+ '1.2.3',
+ deps: {'foo': 'any'},
+ extras: {
+ 'dependency_overrides': {
+ 'foo': {'path': '../foo'},
+ },
+ 'workspace': ['a'],
+ },
+ sdk: '^3.5.0',
),
- pubspecOverrides({
- 'dependency_overrides': {'foo': '2.0.0'},
- }),
- ]),
- ]).create();
- await pubGet(
- environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- error: '''
+ dir('a', [
+ libPubspec('a', '1.0.0', resolutionWorkspace: true),
+ pubspecOverrides({
+ 'dependency_overrides': {'foo': '2.0.0'},
+ }),
+ ]),
+ ]).create();
+ await pubGet(
+ environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
+ error: '''
The package `foo` is overridden in both:
package `myapp` at `.` and 'a' at `.${s}a`.
Consider removing one of the overrides.''',
- );
- });
+ );
+ },
+ );
test(
- 'rejects workspace with non-workspace between root and workspace package',
- () async {
- await dir(appPath, [
- libPubspec(
- 'myapp',
- '1.2.3',
- extras: {
- 'workspace': ['pkgs/a'],
- },
- sdk: '^3.5.0',
- ),
- dir('pkgs', [
+ 'rejects workspace with non-workspace between root and workspace package',
+ () async {
+ await dir(appPath, [
libPubspec(
- 'in_the_way',
- '1.0.0',
+ 'myapp',
+ '1.2.3',
+ extras: {
+ 'workspace': ['pkgs/a'],
+ },
+ sdk: '^3.5.0',
),
- dir('a', [
- libPubspec(
- 'a',
- '1.0.0',
- resolutionWorkspace: true,
- ),
+ dir('pkgs', [
+ libPubspec('in_the_way', '1.0.0'),
+ dir('a', [libPubspec('a', '1.0.0', resolutionWorkspace: true)]),
]),
- ]),
- ]).create();
- await pubGet(
- environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- error: contains(
- 'The file `.${s}pkgs${s}pubspec.yaml` is located in a directory '
- 'between the workspace root at',
- ),
- );
- });
+ ]).create();
+ await pubGet(
+ environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
+ error: contains(
+ 'The file `.${s}pkgs${s}pubspec.yaml` is located in a directory '
+ 'between the workspace root at',
+ ),
+ );
+ },
+ );
test('Doesn\t complain about pubspecs above the workspace', () async {
// Regression test for https://github.com/dart-lang/pub/issues/4463
await dir(appPath, [
- libPubspec(
- 'not_in_the_way',
- '1.0.0',
- ),
+ libPubspec('not_in_the_way', '1.0.0'),
dir('pkgs', [
libPubspec(
'myapp',
@@ -1542,13 +1454,7 @@
},
sdk: '^3.5.0',
),
- dir('a', [
- libPubspec(
- 'a',
- '1.0.0',
- resolutionWorkspace: true,
- ),
- ]),
+ dir('a', [libPubspec('a', '1.0.0', resolutionWorkspace: true)]),
]),
]).create();
await pubGet(
@@ -1643,8 +1549,7 @@
await pub.shouldExit(SUCCESS);
});
- test(
- 'published packages with `resolution: workspace` '
+ test('published packages with `resolution: workspace` '
'and `workspace` sections can be consumed out of context.', () async {
final server = await servePackages();
server.serve(
@@ -1683,9 +1588,7 @@
sdk: '^3.5.0',
),
dir('pkgs', [
- dir('a', [
- libPubspec('a', '1.1.1', resolutionWorkspace: true),
- ]),
+ dir('a', [libPubspec('a', '1.1.1', resolutionWorkspace: true)]),
]),
]).create();
await pubGet(
@@ -1719,13 +1622,7 @@
'workspace': ['b'],
},
),
- dir('b', [
- libPubspec(
- 'b',
- '1.2.2',
- resolutionWorkspace: true,
- ),
- ]),
+ dir('b', [libPubspec('b', '1.2.2', resolutionWorkspace: true)]),
]),
]),
]).create();
@@ -1758,8 +1655,9 @@
String? part4,
String? part5,
]) {
- return json
- .encode(p.canonicalize(p.join(part1, part2, part3, part4, part5)));
+ return json.encode(
+ p.canonicalize(p.join(part1, part2, part3, part4, part5)),
+ );
}
await runPub(
@@ -1786,54 +1684,47 @@
);
});
- test(
- '"workspace" and "resolution" fields can be overridden by '
- '`pubspec_overrides`',
- () async {
- final server = await servePackages();
- server.serve('foo', '1.0.0');
- server.serve('bar', '1.0.0');
- await dir(appPath, [
- libPubspec(
- 'myapp',
- '1.2.3',
- extras: {
- 'workspace': ['pkgs/a'],
- },
- sdk: '^3.5.0',
- ),
- dir('pkgs', [
- dir('a', [
- libPubspec('a', '1.1.1', sdk: '^3.5.0', deps: {'foo': '^1.0.0'}),
- file('pubspec_overrides.yaml', 'resolution: workspace'),
- ]),
- dir(
+ test('"workspace" and "resolution" fields can be overridden by '
+ '`pubspec_overrides`', () async {
+ final server = await servePackages();
+ server.serve('foo', '1.0.0');
+ server.serve('bar', '1.0.0');
+ await dir(appPath, [
+ libPubspec(
+ 'myapp',
+ '1.2.3',
+ extras: {
+ 'workspace': ['pkgs/a'],
+ },
+ sdk: '^3.5.0',
+ ),
+ dir('pkgs', [
+ dir('a', [
+ libPubspec('a', '1.1.1', sdk: '^3.5.0', deps: {'foo': '^1.0.0'}),
+ file('pubspec_overrides.yaml', 'resolution: workspace'),
+ ]),
+ dir('b', [
+ libPubspec(
'b',
- [
- libPubspec(
- 'b',
- '1.0.0',
- deps: {'bar': '^1.0.0'},
- resolutionWorkspace: true,
- ),
- ],
+ '1.0.0',
+ deps: {'bar': '^1.0.0'},
+ resolutionWorkspace: true,
),
]),
- ]).create();
- await pubGet(
- environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- output: contains('+ foo'),
- );
- await dir(
- appPath,
- [file('pubspec_overrides.yaml', 'workspace: ["pkgs/b/"]')],
- ).create();
- await pubGet(
- environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
- output: contains('+ bar'),
- );
- },
- );
+ ]),
+ ]).create();
+ await pubGet(
+ environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
+ output: contains('+ foo'),
+ );
+ await dir(appPath, [
+ file('pubspec_overrides.yaml', 'workspace: ["pkgs/b/"]'),
+ ]).create();
+ await pubGet(
+ environment: {'_PUB_TEST_SDK_VERSION': '3.5.0'},
+ output: contains('+ bar'),
+ );
+ });
}
final s = p.separator;
diff --git a/tool/extract_all_pub_dev.dart b/tool/extract_all_pub_dev.dart
index e140f48..8ad1adf 100644
--- a/tool/extract_all_pub_dev.dart
+++ b/tool/extract_all_pub_dev.dart
@@ -79,8 +79,10 @@
log.message('Skipping $packageName - already done');
continue;
}
- log.message('Processing all versions of $packageName '
- '[+${alreadyDonePackages.length}, - ${failures.length}]');
+ log.message(
+ 'Processing all versions of $packageName '
+ '[+${alreadyDonePackages.length}, - ${failures.length}]',
+ );
final resource = await pool.request();
scheduleMicrotask(() async {
try {
diff --git a/tool/test-bin/pub_command_runner.dart b/tool/test-bin/pub_command_runner.dart
index ec420af..3ef0630 100644
--- a/tool/test-bin/pub_command_runner.dart
+++ b/tool/test-bin/pub_command_runner.dart
@@ -99,15 +99,11 @@
return -1;
}
final packageConfig = executable.packageConfig;
- final process = await Process.start(
- Platform.executable,
- [
- if (packageConfig != null) '--packages=$packageConfig',
- executable.executable,
- ...argResults!.rest.skip(1),
- ],
- mode: ProcessStartMode.inheritStdio,
- );
+ final process = await Process.start(Platform.executable, [
+ if (packageConfig != null) '--packages=$packageConfig',
+ executable.executable,
+ ...argResults!.rest.skip(1),
+ ], mode: ProcessStartMode.inheritStdio);
return await process.exitCode;
}
@@ -118,9 +114,7 @@
Runner() : super('pub_command_runner', 'Tests the embeddable pub command.') {
addCommand(
- pubCommand(
- isVerbose: () => _results.flag('verbose'),
- )
+ pubCommand(isVerbose: () => _results.flag('verbose'))
..addSubcommand(ThrowingCommand())
..addSubcommand(EnsurePubspecResolvedCommand())
..addSubcommand(GetExecutableForCommandCommand()),
diff --git a/tool/test.dart b/tool/test.dart
index fd173b4..732c82a 100755
--- a/tool/test.dart
+++ b/tool/test.dart
@@ -29,8 +29,9 @@
final sub = ProcessSignal.sigint.watch().listen((signal) {
testProcess?.kill(signal);
});
- final pubSnapshotFilename =
- p.absolute(p.join('.dart_tool', '_pub', 'pub.dart.snapshot.dart2'));
+ final pubSnapshotFilename = p.absolute(
+ p.join('.dart_tool', '_pub', 'pub.dart.snapshot.dart2'),
+ );
try {
final stopwatch = Stopwatch()..start();
stderr.write('Building snapshot...');