Merge the null_safety branch into master (#49)
diff --git a/.travis.yml b/.travis.yml
index a49fc6f..785c7c2 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,21 +1,34 @@
language: dart
dart:
- - dev
-dart_task:
- - test: -p vm,chrome
- - dartanalyzer
+ - be/raw/latest
-matrix:
+jobs:
include:
- # Only validate formatting using the dev release
- - dart: dev
- dart_task: dartfmt
+ - stage: analyze_and_format
+ name: "Analyze"
+ dart: be/raw/latest
+ os: linux
+ script: dartanalyzer --enable-experiment=non-nullable --fatal-warnings --fatal-infos .
+ - stage: analyze_and_format
+ name: "Format"
+ dart: be/raw/latest
+ os: linux
+ script: dartfmt -n --set-exit-if-changed .
+ - stage: test
+ name: "Vm Tests"
+ dart: be/raw/latest
+ os: linux
+ script: pub run --enable-experiment=non-nullable test -p vm
+
+stages:
+ - analyze_and_format
+ - test
# Only building master means that we don't run two builds for each pull request.
branches:
- only: [master]
+ only: [master, null_safety]
cache:
- directories:
- - $HOME/.pub-cache
+ directories:
+ - $HOME/.pub-cache
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 321c371..bc7c9ef 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,7 @@
+## 0.11.0-nullsafety
+
+* Migrate to null safety
+
## 0.10.9
* Fix a number of document comment issues.
diff --git a/analysis_options.yaml b/analysis_options.yaml
index 4f9dfb0..86c8367 100644
--- a/analysis_options.yaml
+++ b/analysis_options.yaml
@@ -1,5 +1,9 @@
include: package:pedantic/analysis_options.yaml
+analyzer:
+ enable-experiment:
+ - non-nullable
+
linter:
rules:
- comment_references
diff --git a/lib/builder.dart b/lib/builder.dart
index e6b8d82..5c56ca4 100644
--- a/lib/builder.dart
+++ b/lib/builder.dart
@@ -21,9 +21,7 @@
/// Adds an entry mapping the [targetOffset] to [source].
void addFromOffset(SourceLocation source, SourceFile targetFile,
int targetOffset, String identifier) {
- if (targetFile == null) {
- throw ArgumentError('targetFile cannot be null');
- }
+ ArgumentError.checkNotNull(targetFile, 'targetFile');
_entries.add(Entry(source, targetFile.location(targetOffset), identifier));
}
@@ -33,7 +31,7 @@
/// `isIdentifier` set to true, this entry is considered to represent an
/// identifier whose value will be stored in the source map. [isIdentifier]
/// takes precedence over [target]'s `isIdentifier` value.
- void addSpan(SourceSpan source, SourceSpan target, {bool isIdentifier}) {
+ void addSpan(SourceSpan source, SourceSpan target, {bool? isIdentifier}) {
isIdentifier ??= source is SourceMapSpan ? source.isIdentifier : false;
var name = isIdentifier ? source.text : null;
@@ -42,7 +40,7 @@
/// Adds an entry mapping [target] to [source].
void addLocation(
- SourceLocation source, SourceLocation target, String identifier) {
+ SourceLocation source, SourceLocation target, String? identifier) {
_entries.add(Entry(source, target, identifier));
}
@@ -64,7 +62,7 @@
final SourceLocation target;
/// An identifier name, when this location is the start of an identifier.
- final String identifierName;
+ final String? identifierName;
/// Creates a new [Entry] mapping [target] to [source].
Entry(this.source, this.target, this.identifierName);
diff --git a/lib/parser.dart b/lib/parser.dart
index e3044aa..1f8b817 100644
--- a/lib/parser.dart
+++ b/lib/parser.dart
@@ -23,7 +23,8 @@
// the string represenation.
// TODO(tjblasi): Ignore the first line of [jsonMap] if the JSON safety string
// `)]}'` begins the string representation of the map.
-Mapping parse(String jsonMap, {Map<String, Map> otherMaps, mapUrl}) =>
+Mapping parse(String jsonMap,
+ {Map<String, Map>? otherMaps, /*String|Uri*/ Object? mapUrl}) =>
parseJson(jsonDecode(jsonMap), otherMaps: otherMaps, mapUrl: mapUrl);
/// Parses a source map or source map bundle directly from a json string.
@@ -31,7 +32,8 @@
/// [mapUrl], which may be either a [String] or a [Uri], indicates the URL of
/// the source map file itself. If it's passed, any URLs in the source
/// map will be interpreted as relative to this URL when generating spans.
-Mapping parseExtended(String jsonMap, {Map<String, Map> otherMaps, mapUrl}) =>
+Mapping parseExtended(String jsonMap,
+ {Map<String, Map>? otherMaps, /*String|Uri*/ Object? mapUrl}) =>
parseJsonExtended(jsonDecode(jsonMap),
otherMaps: otherMaps, mapUrl: mapUrl);
@@ -40,8 +42,8 @@
/// [mapUrl], which may be either a [String] or a [Uri], indicates the URL of
/// the source map file itself. If it's passed, any URLs in the source
/// map will be interpreted as relative to this URL when generating spans.
-Mapping parseJsonExtended(/*List|Map*/ json,
- {Map<String, Map> otherMaps, mapUrl}) {
+Mapping parseJsonExtended(/*List|Map*/ Object? json,
+ {Map<String, Map>? otherMaps, /*String|Uri*/ Object? mapUrl}) {
if (json is List) {
return MappingBundle.fromJson(json, mapUrl: mapUrl);
}
@@ -53,7 +55,8 @@
/// [mapUrl], which may be either a [String] or a [Uri], indicates the URL of
/// the source map file itself. If it's passed, any URLs in the source
/// map will be interpreted as relative to this URL when generating spans.
-Mapping parseJson(Map map, {Map<String, Map> otherMaps, mapUrl}) {
+Mapping parseJson(Map map,
+ {Map<String, Map>? otherMaps, /*String|Uri*/ Object? mapUrl}) {
if (map['version'] != 3) {
throw ArgumentError('unexpected source map version: ${map["version"]}. '
'Only version 3 is supported.');
@@ -79,12 +82,12 @@
/// [uri] is the optional location of the output file to find the span for
/// to disambiguate cases where a mapping may have different mappings for
/// different output files.
- SourceMapSpan spanFor(int line, int column,
- {Map<String, SourceFile> files, String uri});
+ SourceMapSpan? spanFor(int line, int column,
+ {Map<String, SourceFile>? files, String? uri});
/// Returns the span associated with [location].
- SourceMapSpan spanForLocation(SourceLocation location,
- {Map<String, SourceFile> files}) {
+ SourceMapSpan? spanForLocation(SourceLocation location,
+ {Map<String, SourceFile>? files}) {
return spanFor(location.line, location.column,
uri: location.sourceUrl?.toString(), files: files);
}
@@ -103,8 +106,8 @@
final List<Mapping> _maps = <Mapping>[];
/// Creates a section mapping from json.
- MultiSectionMapping.fromJson(List sections, Map<String, Map> otherMaps,
- {mapUrl}) {
+ MultiSectionMapping.fromJson(List sections, Map<String, Map>? otherMaps,
+ {/*String|Uri*/ Object? mapUrl}) {
for (var section in sections) {
var offset = section['offset'];
if (offset == null) throw FormatException('section missing offset');
@@ -124,12 +127,13 @@
if (url != null && map != null) {
throw FormatException("section can't use both url and map entries");
} else if (url != null) {
- if (otherMaps == null || otherMaps[url] == null) {
+ var other = otherMaps?[url];
+ if (otherMaps == null || other == null) {
throw FormatException(
'section contains refers to $url, but no map was '
'given for it. Make sure a map is passed in "otherMaps"');
}
- _maps.add(parseJson(otherMaps[url], otherMaps: otherMaps, mapUrl: url));
+ _maps.add(parseJson(other, otherMaps: otherMaps, mapUrl: url));
} else if (map != null) {
_maps.add(parseJson(map, otherMaps: otherMaps, mapUrl: mapUrl));
} else {
@@ -141,7 +145,7 @@
}
}
- int _indexFor(line, column) {
+ int _indexFor(int line, int column) {
for (var i = 0; i < _lineStart.length; i++) {
if (line < _lineStart[i]) return i - 1;
if (line == _lineStart[i] && column < _columnStart[i]) return i - 1;
@@ -150,8 +154,8 @@
}
@override
- SourceMapSpan spanFor(int line, int column,
- {Map<String, SourceFile> files, String uri}) {
+ SourceMapSpan? spanFor(int line, int column,
+ {Map<String, SourceFile>? files, String? uri}) {
// TODO(jacobr): perhaps verify that targetUrl matches the actual uri
// or at least ends in the same file name.
var index = _indexFor(line, column);
@@ -183,7 +187,7 @@
MappingBundle();
- MappingBundle.fromJson(List json, {String mapUrl}) {
+ MappingBundle.fromJson(List json, {/*String|Uri*/ Object? mapUrl}) {
for (var map in json) {
addMapping(parseJson(map, mapUrl: mapUrl) as SingleMapping);
}
@@ -192,7 +196,10 @@
void addMapping(SingleMapping mapping) {
// TODO(jacobr): verify that targetUrl is valid uri instead of a windows
// path.
- _mappings[mapping.targetUrl] = mapping;
+ // TODO: Remove type arg https://github.com/dart-lang/sdk/issues/42227
+ var targetUrl = ArgumentError.checkNotNull<String>(
+ mapping.targetUrl, 'mapping.targetUrl');
+ _mappings[targetUrl] = mapping;
}
/// Encodes the Mapping mappings as a json map.
@@ -210,11 +217,10 @@
bool containsMapping(String url) => _mappings.containsKey(url);
@override
- SourceMapSpan spanFor(int line, int column,
- {Map<String, SourceFile> files, String uri}) {
- if (uri == null) {
- throw ArgumentError.notNull('uri');
- }
+ SourceMapSpan? spanFor(int line, int column,
+ {Map<String, SourceFile>? files, String? uri}) {
+ // TODO: Remove type arg https://github.com/dart-lang/sdk/issues/42227
+ uri = ArgumentError.checkNotNull<String>(uri, 'uri');
// Find the longest suffix of the uri that matches the sourcemap
// where the suffix starts after a path segment boundary.
@@ -232,9 +238,10 @@
for (var i = 0; i < uri.length; ++i) {
if (onBoundary) {
var candidate = uri.substring(i);
- if (_mappings.containsKey(candidate)) {
- return _mappings[candidate]
- .spanFor(line, column, files: files, uri: candidate);
+ var candidateMapping = _mappings[candidate];
+ if (candidateMapping != null) {
+ return candidateMapping.spanFor(line, column,
+ files: files, uri: candidate);
}
}
onBoundary = separatorCodeUnits.contains(uri.codeUnitAt(i));
@@ -270,18 +277,18 @@
/// field.
///
/// Files whose contents aren't available are `null`.
- final List<SourceFile> files;
+ final List<SourceFile?> files;
/// Entries indicating the beginning of each span.
final List<TargetLineEntry> lines;
/// Url of the target file.
- String targetUrl;
+ String? targetUrl;
/// Source root prepended to all entries in [urls].
- String sourceRoot;
+ String? sourceRoot;
- final Uri _mapUrl;
+ final Uri? _mapUrl;
final Map<String, dynamic> extensions;
@@ -290,9 +297,9 @@
extensions = {};
factory SingleMapping.fromEntries(Iterable<builder.Entry> entries,
- [String fileUrl]) {
+ [String? fileUrl]) {
// The entries needs to be sorted by the target offsets.
- var sourceEntries = List.from(entries)..sort();
+ var sourceEntries = entries.toList()..sort();
var lines = <TargetLineEntry>[];
// Indices associated with file urls that will be part of the source map. We
@@ -307,7 +314,7 @@
var files = <int, SourceFile>{};
var lineNum;
- List<TargetEntry> targetEntries;
+ late List<TargetEntry> targetEntries;
for (var sourceEntry in sourceEntries) {
if (lineNum == null || sourceEntry.target.line > lineNum) {
lineNum = sourceEntry.target.line;
@@ -315,24 +322,21 @@
lines.add(TargetLineEntry(lineNum, targetEntries));
}
- if (sourceEntry.source == null) {
- targetEntries.add(TargetEntry(sourceEntry.target.column));
- } else {
- var sourceUrl = sourceEntry.source.sourceUrl;
- var urlId = urls.putIfAbsent(
- sourceUrl == null ? '' : sourceUrl.toString(), () => urls.length);
+ var sourceUrl = sourceEntry.source.sourceUrl;
+ var urlId = urls.putIfAbsent(
+ sourceUrl == null ? '' : sourceUrl.toString(), () => urls.length);
- if (sourceEntry.source is FileLocation) {
- files.putIfAbsent(
- urlId, () => (sourceEntry.source as FileLocation).file);
- }
-
- var srcNameId = sourceEntry.identifierName == null
- ? null
- : names.putIfAbsent(sourceEntry.identifierName, () => names.length);
- targetEntries.add(TargetEntry(sourceEntry.target.column, urlId,
- sourceEntry.source.line, sourceEntry.source.column, srcNameId));
+ if (sourceEntry.source is FileLocation) {
+ files.putIfAbsent(
+ urlId, () => (sourceEntry.source as FileLocation).file);
}
+
+ var sourceEntryIdentifierName = sourceEntry.identifierName;
+ var srcNameId = sourceEntryIdentifierName == null
+ ? null
+ : names.putIfAbsent(sourceEntryIdentifierName, () => names.length);
+ targetEntries.add(TargetEntry(sourceEntry.target.column, urlId,
+ sourceEntry.source.line, sourceEntry.source.column, srcNameId));
}
return SingleMapping._(fileUrl, urls.values.map((i) => files[i]).toList(),
urls.keys.toList(), names.keys.toList(), lines);
@@ -342,14 +346,14 @@
: targetUrl = map['file'],
urls = List<String>.from(map['sources']),
names = List<String>.from(map['names'] ?? []),
- files = List(map['sources'].length),
+ files = List.filled(map['sources'].length, null),
sourceRoot = map['sourceRoot'],
lines = <TargetLineEntry>[],
_mapUrl = mapUrl is String ? Uri.parse(mapUrl) : mapUrl,
extensions = {} {
var sourcesContent = map['sourcesContent'] == null
- ? const []
- : List<String>.from(map['sourcesContent']);
+ ? const <String?>[]
+ : List<String?>.from(map['sourcesContent']);
for (var i = 0; i < urls.length && i < sourcesContent.length; i++) {
var source = sourcesContent[i];
if (source == null) continue;
@@ -459,11 +463,11 @@
var newUrlId = segment.sourceUrlId;
if (newUrlId == null) continue;
srcUrlId = _append(buff, srcUrlId, newUrlId);
- srcLine = _append(buff, srcLine, segment.sourceLine);
- srcColumn = _append(buff, srcColumn, segment.sourceColumn);
+ srcLine = _append(buff, srcLine, segment.sourceLine!);
+ srcColumn = _append(buff, srcColumn, segment.sourceColumn!);
if (segment.sourceNameId == null) continue;
- srcNameId = _append(buff, srcNameId, segment.sourceNameId);
+ srcNameId = _append(buff, srcNameId, segment.sourceNameId!);
}
}
@@ -474,7 +478,7 @@
'names': names,
'mappings': buff.toString()
};
- if (targetUrl != null) result['file'] = targetUrl;
+ if (targetUrl != null) result['file'] = targetUrl!;
if (includeSourceContents) {
result['sourcesContent'] = files.map((file) => file?.getText(0)).toList();
@@ -498,7 +502,7 @@
/// Returns [TargetLineEntry] which includes the location in the target [line]
/// number. In particular, the resulting entry is the last entry whose line
/// number is lower or equal to [line].
- TargetLineEntry _findLine(int line) {
+ TargetLineEntry? _findLine(int line) {
var index = binarySearch(lines, (e) => e.line > line);
return (index <= 0) ? null : lines[index - 1];
}
@@ -508,7 +512,7 @@
/// the last entry whose column is lower or equal than [column]. If
/// [lineEntry] corresponds to a line prior to [line], then the result will be
/// the very last entry on that line.
- TargetEntry _findColumn(int line, int column, TargetLineEntry lineEntry) {
+ TargetEntry? _findColumn(int line, int column, TargetLineEntry? lineEntry) {
if (lineEntry == null || lineEntry.entries.isEmpty) return null;
if (lineEntry.line != line) return lineEntry.entries.last;
var entries = lineEntry.entries;
@@ -517,33 +521,39 @@
}
@override
- SourceMapSpan spanFor(int line, int column,
- {Map<String, SourceFile> files, String uri}) {
+ SourceMapSpan? spanFor(int line, int column,
+ {Map<String, SourceFile>? files, String? uri}) {
var entry = _findColumn(line, column, _findLine(line));
- if (entry == null || entry.sourceUrlId == null) return null;
- var url = urls[entry.sourceUrlId];
+ if (entry == null) return null;
+
+ var sourceUrlId = entry.sourceUrlId;
+ if (sourceUrlId == null) return null;
+
+ var url = urls[sourceUrlId];
if (sourceRoot != null) {
url = '${sourceRoot}${url}';
}
- if (files != null && files[url] != null) {
- var file = files[url];
- var start = file.getOffset(entry.sourceLine, entry.sourceColumn);
- if (entry.sourceNameId != null) {
- var text = names[entry.sourceNameId];
- return SourceMapFileSpan(files[url].span(start, start + text.length),
+
+ var sourceNameId = entry.sourceNameId;
+ var file = files?[url];
+ if (file != null) {
+ var start = file.getOffset(entry.sourceLine!, entry.sourceColumn);
+ if (sourceNameId != null) {
+ var text = names[sourceNameId];
+ return SourceMapFileSpan(file.span(start, start + text.length),
isIdentifier: true);
} else {
- return SourceMapFileSpan(files[url].location(start).pointSpan());
+ return SourceMapFileSpan(file.location(start).pointSpan());
}
} else {
var start = SourceLocation(0,
- sourceUrl: _mapUrl == null ? url : _mapUrl.resolve(url),
+ sourceUrl: _mapUrl?.resolve(url) ?? url,
line: entry.sourceLine,
column: entry.sourceColumn);
// Offset and other context is not available.
- if (entry.sourceNameId != null) {
- return SourceMapSpan.identifier(start, names[entry.sourceNameId]);
+ if (sourceNameId != null) {
+ return SourceMapSpan.identifier(start, names[sourceNameId]);
} else {
return SourceMapSpan(start, start, '');
}
@@ -578,18 +588,20 @@
..write(line)
..write(':')
..write(entry.column);
- if (entry.sourceUrlId != null) {
+ var sourceUrlId = entry.sourceUrlId;
+ if (sourceUrlId != null) {
buff
..write(' --> ')
..write(sourceRoot)
- ..write(urls[entry.sourceUrlId])
+ ..write(urls[sourceUrlId])
..write(': ')
..write(entry.sourceLine)
..write(':')
..write(entry.sourceColumn);
}
- if (entry.sourceNameId != null) {
- buff..write(' (')..write(names[entry.sourceNameId])..write(')');
+ var sourceNameId = entry.sourceNameId;
+ if (sourceNameId != null) {
+ buff..write(' (')..write(names[sourceNameId])..write(')');
}
buff.write('\n');
}
@@ -611,10 +623,10 @@
/// A target segment entry read from a source map
class TargetEntry {
final int column;
- final int sourceUrlId;
- final int sourceLine;
- final int sourceColumn;
- final int sourceNameId;
+ final int? sourceUrlId;
+ final int? sourceLine;
+ final int? sourceColumn;
+ final int? sourceNameId;
TargetEntry(this.column,
[this.sourceUrlId,
@@ -639,9 +651,11 @@
// Iterator API is used by decodeVlq to consume VLQ entries.
@override
bool moveNext() => ++index < _length;
+
@override
- String get current =>
- (index >= 0 && index < _length) ? _internal[index] : null;
+ String get current => (index >= 0 && index < _length)
+ ? _internal[index]
+ : throw RangeError.index(index, _internal);
bool get hasTokens => index < _length - 1 && _length > 0;
@@ -671,7 +685,9 @@
buff.write(_internal[i]);
}
buff.write('[31m');
- buff.write(current ?? '');
+ try {
+ buff.write(current);
+ } on RangeError catch (_) {}
buff.write('[0m');
for (var i = index + 1; i < _internal.length; i++) {
buff.write(_internal[i]);
diff --git a/lib/printer.dart b/lib/printer.dart
index d79d2cb..922c7fe 100644
--- a/lib/printer.dart
+++ b/lib/printer.dart
@@ -23,7 +23,7 @@
String get map => _maps.toJson(filename);
/// Current source location mapping.
- SourceLocation _loc;
+ SourceLocation? _loc;
/// Current line in the buffer;
int _line = 0;
@@ -47,13 +47,21 @@
// Return not followed by line-feed is treated as a new line.
_line++;
_column = 0;
- if (projectMarks && _loc != null) {
- if (_loc is FileLocation) {
- var file = (_loc as FileLocation).file;
- mark(file.location(file.getOffset(_loc.line + 1)));
- } else {
- mark(SourceLocation(0,
- sourceUrl: _loc.sourceUrl, line: _loc.line + 1, column: 0));
+ {
+ // **Warning**: Any calls to `mark` will change the value of `_loc`,
+ // so this local variable is no longer up to date after that point.
+ //
+ // This is why it has been put inside its own block to limit the
+ // scope in which it is available.
+ var loc = _loc;
+ if (projectMarks && loc != null) {
+ if (loc is FileLocation) {
+ var file = loc.file;
+ mark(file.location(file.getOffset(loc.line + 1)));
+ } else {
+ mark(SourceLocation(0,
+ sourceUrl: loc.sourceUrl, line: loc.line + 1, column: 0));
+ }
}
}
} else {
@@ -78,8 +86,8 @@
/// this also records the name of the identifier in the source map
/// information.
void mark(mark) {
- SourceLocation loc;
- String identifier;
+ late final SourceLocation loc;
+ String? identifier;
if (mark is SourceLocation) {
loc = mark;
} else if (mark is SourceSpan) {
@@ -106,11 +114,20 @@
final _items = <dynamic>[];
/// Internal buffer to merge consecutive strings added to this printer.
- StringBuffer _buff;
+ StringBuffer? _buff;
/// Current indentation, which can be updated from outside this class.
int indent;
+ /// [Printer] used during the last call to [build], if any.
+ Printer? printer;
+
+ /// Returns the text produced after calling [build].
+ String? get text => printer?.text;
+
+ /// Returns the source-map information produced after calling [build].
+ String? get map => printer?.map;
+
/// Item used to indicate that the following item is copied from the original
/// source code, and hence we should preserve source-maps on every new line.
static final _ORIGINAL = Object();
@@ -133,7 +150,7 @@
/// Setting [isOriginal] will make this printer propagate source map locations
/// on every line-break.
void add(object,
- {SourceLocation location, SourceSpan span, bool isOriginal = false}) {
+ {SourceLocation? location, SourceSpan? span, bool isOriginal = false}) {
if (object is! String || location != null || span != null || isOriginal) {
_flush();
assert(location == null || span == null);
@@ -162,7 +179,7 @@
/// The [location] and [span] parameters indicate the corresponding source map
/// location of [line] in the original input. Only one, [location] or
/// [span], should be provided at a time.
- void addLine(String line, {SourceLocation location, SourceSpan span}) {
+ void addLine(String? line, {SourceLocation? location, SourceSpan? span}) {
if (location != null || span != null) {
_flush();
assert(location == null || span == null);
@@ -180,8 +197,8 @@
/// Appends a string merging it with any previous strings, if possible.
void _appendString(String s) {
- _buff ??= StringBuffer();
- _buff.write(s);
+ var buf = _buff ??= StringBuffer();
+ buf.write(s);
}
/// Adds all of the current [_buff] contents as a string item.
@@ -206,15 +223,6 @@
return (StringBuffer()..writeAll(_items)).toString();
}
- /// [Printer] used during the last call to [build], if any.
- Printer printer;
-
- /// Returns the text produced after calling [build].
- String get text => printer.text;
-
- /// Returns the source-map information produced after calling [build].
- String get map => printer.map;
-
/// Builds the output of this printer and source map information. After
/// calling this function, you can use [text] and [map] to retrieve the
/// geenrated code and source map information, respectively.
diff --git a/lib/refactor.dart b/lib/refactor.dart
index 5e117e8..64fd610 100644
--- a/lib/refactor.dart
+++ b/lib/refactor.dart
@@ -17,7 +17,7 @@
/// Applies a series of edits using original location
/// information, and composes them into the edited string.
class TextEditTransaction {
- final SourceFile file;
+ final SourceFile? file;
final String original;
final _edits = <_TextEdit>[];
@@ -33,9 +33,9 @@
_edits.add(_TextEdit(begin, end, replacement));
}
- /// Create a source map [SourceLocation] for [offset].
- SourceLocation _loc(int offset) =>
- file != null ? file.location(offset) : null;
+ /// Create a source map [SourceLocation] for [offset], if [file] is not
+ /// `null`.
+ SourceLocation? _loc(int offset) => file?.location(offset);
/// Applies all pending [edit]s and returns a [NestedPrinter] containing the
/// rewritten string and source map information. [file]`.location` is given to
@@ -58,7 +58,7 @@
if (consumed > edit.begin) {
var sb = StringBuffer();
sb
- ..write(file.location(edit.begin).toolString)
+ ..write(file?.location(edit.begin).toolString)
..write(': overlapping edits. Insert at offset ')
..write(edit.begin)
..write(' but have consumed ')
diff --git a/lib/src/source_map_span.dart b/lib/src/source_map_span.dart
index b8f1152..65574ca 100644
--- a/lib/src/source_map_span.dart
+++ b/lib/src/source_map_span.dart
@@ -52,7 +52,7 @@
@override
String get context => _inner.context;
@override
- Uri get sourceUrl => _inner.sourceUrl;
+ Uri? get sourceUrl => _inner.sourceUrl;
@override
int get length => _inner.length;
diff --git a/lib/src/vlq.dart b/lib/src/vlq.dart
index d4e29a1..951ea8b 100644
--- a/lib/src/vlq.dart
+++ b/lib/src/vlq.dart
@@ -33,8 +33,8 @@
return map;
}();
-final int MAX_INT32 = pow(2, 31) - 1;
-final int MIN_INT32 = -pow(2, 31);
+final int MAX_INT32 = (pow(2, 31) as int) - 1;
+final int MIN_INT32 = -(pow(2, 31) as int);
/// Creates the VLQ encoding of [value] as a sequence of characters
Iterable<String> encodeVlq(int value) {
@@ -70,10 +70,10 @@
while (!stop) {
if (!chars.moveNext()) throw StateError('incomplete VLQ value');
var char = chars.current;
- if (!_digits.containsKey(char)) {
+ var digit = _digits[char];
+ if (digit == null) {
throw FormatException('invalid character in VLQ encoding: $char');
}
- var digit = _digits[char];
stop = (digit & VLQ_CONTINUATION_BIT) == 0;
digit &= VLQ_BASE_MASK;
result += (digit << shift);
diff --git a/pubspec.yaml b/pubspec.yaml
index 871c40a..a59c77d 100644
--- a/pubspec.yaml
+++ b/pubspec.yaml
@@ -1,16 +1,93 @@
name: source_maps
-version: 0.10.9
+version: 0.11.0-nullsafety
description: Library to programmatically manipulate source map files.
homepage: https://github.com/dart-lang/source_maps
environment:
- sdk: '>=2.0.0 <3.0.0'
+ sdk: '>=2.9.0-18.0 <2.9.0'
dependencies:
- source_span: ^1.3.0
+ source_span: '>=1.8.0-nullsafety <1.8.0'
dev_dependencies:
- source_span: ^1.5.4
test: ^1.2.0
term_glyph: ^1.0.0
+
+dependency_overrides:
+ # Overrides required for a version solve
+ coverage: 0.14.0
+ # NNBD Branches
+ async:
+ git:
+ url: git://github.com/dart-lang/async.git
+ ref: null_safety
+ boolean_selector:
+ git:
+ url: git://github.com/dart-lang/boolean_selector.git
+ ref: null_safety
+ charcode:
+ git:
+ url: git://github.com/dart-lang/charcode.git
+ ref: null_safety
+ collection: 1.15.0-nullsafety
+ js:
+ git:
+ url: git://github.com/dart-lang/sdk.git
+ path: pkg/js
+ matcher:
+ git:
+ url: git://github.com/dart-lang/matcher.git
+ ref: null_safety
+ meta: 1.3.0-nullsafety
+ path:
+ git:
+ url: git://github.com/dart-lang/path.git
+ ref: null_safety
+ pedantic:
+ git:
+ url: git://github.com/dart-lang/pedantic.git
+ ref: null_safety
+ pool:
+ git:
+ url: git://github.com/dart-lang/pool.git
+ ref: null_safety
+ source_map_stack_trace:
+ git:
+ url: git://github.com/dart-lang/source_map_stack_trace.git
+ ref: null_safety
+ source_span:
+ git:
+ url: git://github.com/dart-lang/source_span.git
+ ref: null_safety
+ stack_trace:
+ git:
+ url: git://github.com/dart-lang/stack_trace.git
+ ref: null_safety
+ stream_channel:
+ git:
+ url: git://github.com/dart-lang/stream_channel.git
+ ref: null_safety
+ string_scanner:
+ git:
+ url: git://github.com/dart-lang/string_scanner.git
+ ref: null_safety
+ term_glyph:
+ git:
+ url: git://github.com/dart-lang/term_glyph.git
+ ref: null_safety
+ test:
+ git:
+ url: git://github.com/dart-lang/test.git
+ ref: null_safety
+ path: pkgs/test
+ test_api:
+ git:
+ url: git://github.com/dart-lang/test.git
+ ref: null_safety
+ path: pkgs/test_api
+ test_core:
+ git:
+ url: git://github.com/dart-lang/test.git
+ ref: null_safety
+ path: pkgs/test_core
diff --git a/test/common.dart b/test/common.dart
index c0bed68..6ba1c67 100644
--- a/test/common.dart
+++ b/test/common.dart
@@ -76,8 +76,8 @@
var line = outputSpan.start.line;
var column = outputSpan.start.column;
var files = realOffsets ? {'input.dart': input} : null;
- var span = mapping.spanFor(line, column, files: files);
- var span2 = mapping.spanForLocation(outputSpan.start, files: files);
+ var span = mapping.spanFor(line, column, files: files)!;
+ var span2 = mapping.spanForLocation(outputSpan.start, files: files)!;
// Both mapping APIs are equivalent.
expect(span.start.offset, span2.start.offset);
diff --git a/test/parser_test.dart b/test/parser_test.dart
index 1b73f13..1b3ae6f 100644
--- a/test/parser_test.dart
+++ b/test/parser_test.dart
@@ -104,7 +104,7 @@
});
test('parse with no source location', () {
- SingleMapping map = parse(jsonEncode(MAP_WITH_NO_SOURCE_LOCATION));
+ var map = parse(jsonEncode(MAP_WITH_NO_SOURCE_LOCATION)) as SingleMapping;
expect(map.lines.length, 1);
expect(map.lines.first.entries.length, 1);
var entry = map.lines.first.entries.first;
@@ -117,7 +117,7 @@
});
test('parse with source location and no name', () {
- SingleMapping map = parse(jsonEncode(MAP_WITH_SOURCE_LOCATION));
+ var map = parse(jsonEncode(MAP_WITH_SOURCE_LOCATION)) as SingleMapping;
expect(map.lines.length, 1);
expect(map.lines.first.entries.length, 1);
var entry = map.lines.first.entries.first;
@@ -130,8 +130,8 @@
});
test('parse with source location and missing names entry', () {
- SingleMapping map =
- parse(jsonEncode(MAP_WITH_SOURCE_LOCATION_AND_MISSING_NAMES));
+ var map = parse(jsonEncode(MAP_WITH_SOURCE_LOCATION_AND_MISSING_NAMES))
+ as SingleMapping;
expect(map.lines.length, 1);
expect(map.lines.first.entries.length, 1);
var entry = map.lines.first.entries.first;
@@ -144,7 +144,8 @@
});
test('parse with source location and name', () {
- SingleMapping map = parse(jsonEncode(MAP_WITH_SOURCE_LOCATION_AND_NAME));
+ var map =
+ parse(jsonEncode(MAP_WITH_SOURCE_LOCATION_AND_NAME)) as SingleMapping;
expect(map.lines.length, 1);
expect(map.lines.first.entries.length, 1);
var entry = map.lines.first.entries.first;
@@ -160,12 +161,12 @@
var inputMap = Map.from(MAP_WITH_SOURCE_LOCATION);
inputMap['sourceRoot'] = '/pkg/';
var mapping = parseJson(inputMap) as SingleMapping;
- expect(mapping.spanFor(0, 0).sourceUrl, Uri.parse('/pkg/input.dart'));
+ expect(mapping.spanFor(0, 0)?.sourceUrl, Uri.parse('/pkg/input.dart'));
expect(
mapping
.spanForLocation(
SourceLocation(0, sourceUrl: Uri.parse('ignored.dart')))
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('/pkg/input.dart'));
var newSourceRoot = '/new/';
@@ -180,7 +181,7 @@
var inputMap = Map.from(MAP_WITH_SOURCE_LOCATION);
inputMap['sourceRoot'] = 'pkg/';
var mapping = parseJson(inputMap, mapUrl: 'file:///path/to/map');
- expect(mapping.spanFor(0, 0).sourceUrl,
+ expect(mapping.spanFor(0, 0)?.sourceUrl,
Uri.parse('file:///path/to/pkg/input.dart'));
});
@@ -193,29 +194,31 @@
mapping
.spanForLocation(SourceLocation(0,
sourceUrl: Uri.file('/path/to/output.dart')))
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input1.dart'));
expect(
mapping
.spanForLocation(SourceLocation(0,
sourceUrl: Uri.file('/path/to/output2.dart')))
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input2.dart'));
expect(
mapping
.spanForLocation(SourceLocation(0,
sourceUrl: Uri.file('/path/to/3/output.dart')))
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input3.dart'));
expect(
- mapping.spanFor(0, 0, uri: 'file:///path/to/output.dart').sourceUrl,
+ mapping.spanFor(0, 0, uri: 'file:///path/to/output.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input1.dart'));
expect(
- mapping.spanFor(0, 0, uri: 'file:///path/to/output2.dart').sourceUrl,
+ mapping.spanFor(0, 0, uri: 'file:///path/to/output2.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input2.dart'));
expect(
- mapping.spanFor(0, 0, uri: 'file:///path/to/3/output.dart').sourceUrl,
+ mapping
+ .spanFor(0, 0, uri: 'file:///path/to/3/output.dart')
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input3.dart'));
});
@@ -224,36 +227,36 @@
mapping
.spanForLocation(SourceLocation(0,
sourceUrl: Uri.parse('package:1/output.dart')))
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input1.dart'));
expect(
mapping
.spanForLocation(SourceLocation(0,
sourceUrl: Uri.parse('package:2/output2.dart')))
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input2.dart'));
expect(
mapping
.spanForLocation(SourceLocation(0,
sourceUrl: Uri.parse('package:3/output.dart')))
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input3.dart'));
- expect(mapping.spanFor(0, 0, uri: 'package:1/output.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: 'package:1/output.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input1.dart'));
- expect(mapping.spanFor(0, 0, uri: 'package:2/output2.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: 'package:2/output2.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input2.dart'));
- expect(mapping.spanFor(0, 0, uri: 'package:3/output.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: 'package:3/output.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input3.dart'));
});
test('unmapped path', () {
- var span = mapping.spanFor(0, 0, uri: 'unmapped_output.dart');
+ var span = mapping.spanFor(0, 0, uri: 'unmapped_output.dart')!;
expect(span.sourceUrl, Uri.parse('unmapped_output.dart'));
expect(span.start.line, equals(0));
expect(span.start.column, equals(0));
- span = mapping.spanFor(10, 5, uri: 'unmapped_output.dart');
+ span = mapping.spanFor(10, 5, uri: 'unmapped_output.dart')!;
expect(span.sourceUrl, Uri.parse('unmapped_output.dart'));
expect(span.start.line, equals(10));
expect(span.start.column, equals(5));
@@ -264,11 +267,11 @@
});
test('incomplete paths', () {
- expect(mapping.spanFor(0, 0, uri: 'output.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: 'output.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input1.dart'));
- expect(mapping.spanFor(0, 0, uri: 'output2.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: 'output2.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input2.dart'));
- expect(mapping.spanFor(0, 0, uri: '3/output.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: '3/output.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input3.dart'));
});
@@ -276,11 +279,11 @@
var mapping = parseExtended(jsonEncode(SOURCE_MAP_BUNDLE),
mapUrl: 'file:///path/to/map');
- expect(mapping.spanFor(0, 0, uri: 'output.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: 'output.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input1.dart'));
- expect(mapping.spanFor(0, 0, uri: 'output2.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: 'output2.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input2.dart'));
- expect(mapping.spanFor(0, 0, uri: '3/output.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: '3/output.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input3.dart'));
});
@@ -288,22 +291,22 @@
var mapping = MappingBundle();
mapping.addMapping(parseJson(MAP_WITH_SOURCE_LOCATION_AND_NAME_1,
- mapUrl: 'file:///path/to/map'));
- expect(mapping.spanFor(0, 0, uri: 'output.dart').sourceUrl,
+ mapUrl: 'file:///path/to/map') as SingleMapping);
+ expect(mapping.spanFor(0, 0, uri: 'output.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input1.dart'));
expect(mapping.containsMapping('output2.dart'), isFalse);
mapping.addMapping(parseJson(MAP_WITH_SOURCE_LOCATION_AND_NAME_2,
- mapUrl: 'file:///path/to/map'));
+ mapUrl: 'file:///path/to/map') as SingleMapping);
expect(mapping.containsMapping('output2.dart'), isTrue);
- expect(mapping.spanFor(0, 0, uri: 'output2.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: 'output2.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input2.dart'));
expect(mapping.containsMapping('3/output.dart'), isFalse);
mapping.addMapping(parseJson(MAP_WITH_SOURCE_LOCATION_AND_NAME_3,
- mapUrl: 'file:///path/to/map'));
+ mapUrl: 'file:///path/to/map') as SingleMapping);
expect(mapping.containsMapping('3/output.dart'), isTrue);
- expect(mapping.spanFor(0, 0, uri: '3/output.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: '3/output.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input3.dart'));
});
@@ -315,31 +318,33 @@
mapping
.spanForLocation(SourceLocation(0,
sourceUrl: Uri.parse('http://localhost/output.dart')))
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input1.dart'));
expect(
mapping
.spanForLocation(SourceLocation(0,
sourceUrl: Uri.parse('http://localhost/output2.dart')))
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input2.dart'));
expect(
mapping
.spanForLocation(SourceLocation(0,
sourceUrl: Uri.parse('http://localhost/3/output.dart')))
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input3.dart'));
expect(
- mapping.spanFor(0, 0, uri: 'http://localhost/output.dart').sourceUrl,
+ mapping.spanFor(0, 0, uri: 'http://localhost/output.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input1.dart'));
expect(
- mapping.spanFor(0, 0, uri: 'http://localhost/output2.dart').sourceUrl,
+ mapping
+ .spanFor(0, 0, uri: 'http://localhost/output2.dart')
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input2.dart'));
expect(
mapping
.spanFor(0, 0, uri: 'http://localhost/3/output.dart')
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input3.dart'));
});
});
@@ -351,10 +356,10 @@
MAP_WITH_SOURCE_LOCATION,
MAP_WITH_SOURCE_LOCATION_AND_NAME
]) {
- SingleMapping mapping = parseJson(expected);
+ var mapping = parseJson(expected) as SingleMapping;
expect(mapping.toJson(), equals(expected));
- mapping = parseJsonExtended(expected);
+ mapping = parseJsonExtended(expected) as SingleMapping;
expect(mapping.toJson(), equals(expected));
}
@@ -366,7 +371,7 @@
var map = Map.from(EXPECTED_MAP);
map['x_foo'] = 'a';
map['x_bar'] = [3];
- SingleMapping mapping = parseJson(map);
+ var mapping = parseJson(map) as SingleMapping;
expect(mapping.toJson(), equals(map));
expect(mapping.extensions['x_foo'], equals('a'));
expect(mapping.extensions['x_bar'].first, equals(3));
@@ -415,7 +420,7 @@
map['sourcesContent'] = ['hello, world!'];
var mapping = parseJson(map) as SingleMapping;
- var file = mapping.files[0];
+ var file = mapping.files[0]!;
expect(file.url, equals(Uri.parse('input.dart')));
expect(file.getText(0), equals('hello, world!'));
});
diff --git a/test/refactor_test.dart b/test/refactor_test.dart
index 36b934a..9a403a1 100644
--- a/test/refactor_test.dart
+++ b/test/refactor_test.dart
@@ -60,7 +60,7 @@
txn.edit(34, 35, '___');
var printer = (txn.commit()..build(''));
var output = printer.text;
- var map = parse(printer.map);
+ var map = parse(printer.map!);
expect(output,
'0123456789\n0*23456789\n01*34__\n 789\na___cdefghij\nabcd*fghij\n');
@@ -197,7 +197,5 @@
});
}
-String _span(int line, int column, Mapping map, SourceFile file) {
- var span = map.spanFor(line - 1, column - 1, files: {'': file});
- return span == null ? null : span.message('').trim();
-}
+String? _span(int line, int column, Mapping map, SourceFile file) =>
+ map.spanFor(line - 1, column - 1, files: {'': file})?.message('').trim();
diff --git a/test/vlq_test.dart b/test/vlq_test.dart
index 6021519..92a8f4a 100644
--- a/test/vlq_test.dart
+++ b/test/vlq_test.dart
@@ -27,8 +27,8 @@
});
test('only 32-bit ints allowed', () {
- var max_int = pow(2, 31) - 1;
- var min_int = -pow(2, 31);
+ var max_int = (pow(2, 31) as int) - 1;
+ var min_int = -(pow(2, 31) as int);
_checkEncodeDecode(max_int - 1);
_checkEncodeDecode(min_int + 1);
_checkEncodeDecode(max_int);