Version 3.0.0-335.0.dev
Merge 4b3bd7564c3a35c2625b5c6619be768babb49b66 into dev
diff --git a/pkg/analysis_server/benchmark/perf/benchmark_uploader.dart b/pkg/analysis_server/benchmark/perf/benchmark_uploader.dart
new file mode 100644
index 0000000..309ee6c
--- /dev/null
+++ b/pkg/analysis_server/benchmark/perf/benchmark_uploader.dart
@@ -0,0 +1,124 @@
+// Copyright (c) 2023, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// This script runs the benchmarks in this directory and uploads the
+// results to cloud storage. These results are then ingested by our
+// performance measurement system.
+//
+// The script only works when run on a LUCI builder in the dart-ci project,
+// and uploads results to paths within gs://dart-test-results/benchmark-results.
+//
+// This script is needed to run benchmarks on platforms that we only have
+// in our LUCI CI system, not in our performance lab, such as Windows.
+// The script is currently only used and tested on Windows.
+
+import 'dart:convert';
+import 'dart:io';
+
+void main() async {
+ try {
+ final results = <Map<String, dynamic>>[];
+ results.addAll(await runBenchmarks(warm: true));
+ results.addAll(await runBenchmarks(warm: false));
+
+ if (!Platform.isWindows) {
+ print("Analyzer benchmark uploads only run on Windows");
+ exit(1);
+ }
+ final targetResults = [
+ for (final result in results)
+ {
+ 'cpu': 'Windows VM',
+ 'machineType': 'windows-x64',
+ 'target': 'dart-analysis-server-external',
+ ...result,
+ }
+ ];
+ await uploadResults(targetResults);
+ } catch (e, st) {
+ print('$e\n$st');
+ }
+}
+
+Future<List<Map<String, dynamic>>> runBenchmarks({required bool warm}) async {
+ final temperature = warm ? 'warm' : 'cold';
+ final benchmarkResults = await Process.run(Platform.resolvedExecutable, [
+ 'pkg/analysis_server/benchmark/benchmarks.dart',
+ 'run',
+ if (warm) 'analysis-server' else 'analysis-server-cold',
+ ]);
+
+ print(benchmarkResults.stdout);
+ print(benchmarkResults.stderr);
+ if (benchmarkResults.exitCode != 0) {
+ throw 'Failed to run $temperature benchmarks';
+ }
+ final result = jsonDecode(LineSplitter()
+ .convert(benchmarkResults.stdout as String)
+ .where((line) => line.startsWith('{"benchmark":'))
+ .single);
+
+ return <Map<String, dynamic>>[
+ {
+ 'benchmark': 'analysis-server-$temperature-memory',
+ 'metric': 'MemoryUse',
+ 'score': result['result']['analysis-server-$temperature-memory']['bytes'],
+ },
+ {
+ 'benchmark': 'analysis-server-$temperature-analysis',
+ 'metric': 'RunTimeRaw',
+ 'score': result['result']['analysis-server-$temperature-analysis']
+ ['micros'],
+ },
+ if (warm)
+ {
+ 'benchmark': 'analysis-server-edit',
+ 'metric': 'RunTimeRaw',
+ 'score': result['result']['analysis-server-edit']['micros'],
+ },
+ if (warm)
+ {
+ 'benchmark': 'analysis-server-completion',
+ 'metric': 'RunTimeRaw',
+ 'score': result['result']['analysis-server-completion']['micros'],
+ }
+ ];
+}
+
+Future<void> uploadResults(List<Map<String, dynamic>> results) async {
+ // Create JSON results in the desired format
+ // Write results file to cloud storage.
+ final tempDir =
+ await Directory.systemTemp.createTemp('analysis-server-benchmarks');
+ try {
+ final resultsJson = jsonEncode(results);
+ final resultsFile = File.fromUri(tempDir.uri.resolve('results.json'));
+ resultsFile.writeAsStringSync(resultsJson, flush: true);
+
+ final taskId = Platform.environment['SWARMING_TASK_ID'] ?? "test_task_id";
+ if (taskId == "test_task_id") {
+ print('Benchmark_uploader requires SWARMING_TASK_ID in the environment.');
+ }
+ final cloudStoragePath =
+ 'gs://dart-test-results/benchmarks/$taskId/results.json';
+ final args = [
+ 'third_party/gsutil/gsutil',
+ 'cp',
+ resultsFile.path,
+ cloudStoragePath
+ ];
+ final python = 'python3.exe';
+ print('Running $python ${args.join(' ')}');
+ final commandResult = await Process.run(python, args);
+ final exitCode = commandResult.exitCode;
+ print(commandResult.stdout);
+ print(commandResult.stderr);
+ print('exit code: $exitCode');
+ if (exitCode != 0) {
+ throw 'Gsutil upload failed. Exit code $exitCode';
+ }
+ } finally {
+ await tempDir.delete(recursive: true);
+ }
+}
diff --git a/pkg/dartdev/lib/src/commands/test.dart b/pkg/dartdev/lib/src/commands/test.dart
index d12c689..0d594fc 100644
--- a/pkg/dartdev/lib/src/commands/test.dart
+++ b/pkg/dartdev/lib/src/commands/test.dart
@@ -27,6 +27,16 @@
}
@override
+ void printUsage() {
+ print('''Usage: dart test [arguments]
+
+Note: flags and options for this command are provided by the project's package:test dependency.
+If package:test is not included as a dev_dependency in the project's pubspec.yaml, no flags or options will be listed.
+
+Run "${runner!.executableName} help" to see global options.''');
+ }
+
+ @override
FutureOr<int> run() async {
final args = argResults!;
try {
diff --git a/pkg/dartdev/test/commands/test_test.dart b/pkg/dartdev/test/commands/test_test.dart
index 3aea6a1..282a7cd 100644
--- a/pkg/dartdev/test/commands/test_test.dart
+++ b/pkg/dartdev/test/commands/test_test.dart
@@ -43,7 +43,7 @@
final result = await p.run(['help', 'test']);
expect(result.exitCode, 0);
- expect(result.stdout, contains(' tests for a project'));
+ expect(result.stdout, contains('Usage: dart test [arguments]'));
expect(result.stderr, isEmpty);
});
@@ -63,13 +63,13 @@
var resultHelp = await p.run(['test', '--help']);
expect(resultHelp.stderr, isEmpty);
- expect(resultHelp.stdout, '''
-No pubspec.yaml file found - run this command in your project folder.
-
-Run tests for a project.
+ expect(resultHelp.stdout,
+ '''No pubspec.yaml file found - run this command in your project folder.
Usage: dart test [arguments]
+Note: flags and options for this command are provided by the project's package:test dependency.
+If package:test is not included as a dev_dependency in the project's pubspec.yaml, no flags or options will be listed.
Run "dart help" to see global options.
''');
diff --git a/pkg/nnbd_migration/lib/src/edge_builder.dart b/pkg/nnbd_migration/lib/src/edge_builder.dart
index 6c7cf83..bfb2a59 100644
--- a/pkg/nnbd_migration/lib/src/edge_builder.dart
+++ b/pkg/nnbd_migration/lib/src/edge_builder.dart
@@ -2690,7 +2690,14 @@
// Be over conservative with public methods' arguments:
// Unless we have reasons for non-nullability, assume they are nullable.
// Soft edge to `always` node does exactly this.
- if (declaredElement.isPublic &&
+ bool isOverride = false;
+ final thisClass = declaredElement.enclosingElement;
+ if (thisClass is InterfaceElement) {
+ final name = Name(thisClass.library.source.uri, declaredElement.name);
+ isOverride = _inheritanceManager.getOverridden2(thisClass, name) != null;
+ }
+ if (!isOverride &&
+ declaredElement.isPublic &&
declaredElement is! PropertyAccessorElement &&
// operator == treats `null` specially.
!(declaredElement.isOperator && declaredElement.name == '==')) {
diff --git a/pkg/nnbd_migration/test/api_test.dart b/pkg/nnbd_migration/test/api_test.dart
index 6e879b9..54ac36a 100644
--- a/pkg/nnbd_migration/test/api_test.dart
+++ b/pkg/nnbd_migration/test/api_test.dart
@@ -8493,6 +8493,30 @@
await _checkSingleFileChanges(content, expected);
}
+ Future<void> test_override_parameter_type_unknown() async {
+ var content = '''
+abstract class Base {
+ void f(int/*!*/ i, int/*!*/ j);
+}
+class Derived extends Base {
+ void f(int i, int j) {
+ i + 1;
+ }
+}
+''';
+ var expected = '''
+abstract class Base {
+ void f(int i, int j);
+}
+class Derived extends Base {
+ void f(int i, int j) {
+ i + 1;
+ }
+}
+''';
+ await _checkSingleFileChanges(content, expected);
+ }
+
Future<void> test_override_return_type_non_nullable() async {
var content = '''
abstract class Base {
diff --git a/tools/VERSION b/tools/VERSION
index 3d21436..0d19f29 100644
--- a/tools/VERSION
+++ b/tools/VERSION
@@ -27,5 +27,5 @@
MAJOR 3
MINOR 0
PATCH 0
-PRERELEASE 334
+PRERELEASE 335
PRERELEASE_PATCH 0
diff --git a/tools/bots/test_matrix.json b/tools/bots/test_matrix.json
index 775f809..67a998b 100644
--- a/tools/bots/test_matrix.json
+++ b/tools/bots/test_matrix.json
@@ -2976,8 +2976,7 @@
{
"builders": [
"analyzer-linux-release",
- "analyzer-mac-release",
- "analyzer-win-release"
+ "analyzer-mac-release"
],
"meta": {
"description": "This configuration is used by the analyzer builders."
@@ -3068,6 +3067,96 @@
},
{
"builders": [
+ "analyzer-win-release"
+ ],
+ "meta": {
+ "description": "This configuration is used by the analyzer windows builder."
+ },
+ "steps": [
+ {
+ "name": "build dart",
+ "script": "tools/build.py",
+ "arguments": [
+ "create_sdk",
+ "utils/dartanalyzer"
+ ]
+ },
+ {
+ "name": "analyzer unit tests",
+ "arguments": [
+ "-nanalyzer-unittest-asserts-${mode}-${system}",
+ "pkg/analyzer"
+ ],
+ "shards": 4,
+ "fileset": "analyzer_unit_tests"
+ },
+ {
+ "name": "analysis_server unit tests",
+ "arguments": [
+ "-nanalyzer-unittest-asserts-${mode}-${system}",
+ "pkg/analysis_server"
+ ],
+ "shards": 4,
+ "fileset": "analyzer_unit_tests"
+ },
+ {
+ "name": "nnbd_migration unit tests",
+ "arguments": [
+ "-nanalyzer-unittest-asserts-${mode}-${system}",
+ "pkg/nnbd_migration"
+ ],
+ "shards": 1,
+ "fileset": "analyzer_unit_tests"
+ },
+ {
+ "name": "analyze tests enable-asserts",
+ "arguments": [
+ "-nanalyzer-asserts-${system}"
+ ]
+ },
+ {
+ "name": "analyze migrated tests enable-asserts",
+ "arguments": [
+ "-nanalyzer-asserts-${system}",
+ "corelib",
+ "ffi",
+ "language",
+ "lib",
+ "standalone"
+ ]
+ },
+ {
+ "name": "analyze pkg tests enable-asserts",
+ "arguments": [
+ "-nanalyzer-asserts-${system}",
+ "pkg"
+ ]
+ },
+ {
+ "name": "analyzer_cli unit tests",
+ "arguments": [
+ "-nanalyzer-unittest-asserts-${mode}-${system}",
+ "pkg/analyzer_cli"
+ ]
+ },
+ {
+ "name": "analyzer_plugin unit tests",
+ "arguments": [
+ "-nanalyzer-unittest-asserts-${mode}-${system}",
+ "pkg/analyzer_plugin"
+ ]
+ },
+ {
+ "name": "benchmark analysis server",
+ "script": "out/ReleaseX64/dart-sdk/bin/dart",
+ "arguments": [
+ "pkg/analysis_server/benchmark/perf/benchmark_uploader.dart"
+ ]
+ }
+ ]
+ },
+ {
+ "builders": [
"analyzer-analysis-server-linux"
],
"meta": {