Merge pull request #45 from dart-lang/srawlins-patch-1
Correct markdown in library dartdoc
diff --git a/.github/workflows/test-package.yml b/.github/workflows/test-package.yml
new file mode 100644
index 0000000..e47bf66
--- /dev/null
+++ b/.github/workflows/test-package.yml
@@ -0,0 +1,61 @@
+name: Dart CI
+
+on:
+ # Run on PRs and pushes to the default branch.
+ push:
+ branches: [ master ]
+ pull_request:
+ branches: [ master ]
+ schedule:
+ - cron: "0 0 * * 0"
+
+env:
+ PUB_ENVIRONMENT: bot.github
+
+jobs:
+ # Check code formatting and static analysis on a single OS (linux)
+ # against Dart dev.
+ analyze:
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: false
+ matrix:
+ sdk: [dev]
+ steps:
+ - uses: actions/checkout@v2
+ - uses: dart-lang/setup-dart@v1.0
+ with:
+ sdk: ${{ matrix.sdk }}
+ - id: install
+ name: Install dependencies
+ run: dart pub get
+ - name: Check formatting
+ run: dart format --output=none --set-exit-if-changed .
+ if: always() && steps.install.outcome == 'success'
+ - name: Analyze code
+ run: dart analyze --fatal-infos
+ if: always() && steps.install.outcome == 'success'
+
+ # Run tests on a matrix consisting of two dimensions:
+ # 1. OS: ubuntu-latest, (macos-latest, windows-latest)
+ # 2. release channel: dev
+ test:
+ needs: analyze
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: false
+ matrix:
+ # Add macos-latest and/or windows-latest if relevant for this package.
+ os: [ubuntu-latest]
+ sdk: [2.12.0, dev]
+ steps:
+ - uses: actions/checkout@v2
+ - uses: dart-lang/setup-dart@v1.0
+ with:
+ sdk: ${{ matrix.sdk }}
+ - id: install
+ name: Install dependencies
+ run: dart pub get
+ - name: Run VM tests
+ run: dart test --platform vm
+ if: always() && steps.install.outcome == 'success'
diff --git a/.test_config b/.test_config
deleted file mode 100644
index 412fc5c..0000000
--- a/.test_config
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "test_package": true
-}
\ No newline at end of file
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index a49fc6f..0000000
--- a/.travis.yml
+++ /dev/null
@@ -1,21 +0,0 @@
-language: dart
-
-dart:
- - dev
-dart_task:
- - test: -p vm,chrome
- - dartanalyzer
-
-matrix:
- include:
- # Only validate formatting using the dev release
- - dart: dev
- dart_task: dartfmt
-
-# Only building master means that we don't run two builds for each pull request.
-branches:
- only: [master]
-
-cache:
- directories:
- - $HOME/.pub-cache
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 321c371..366dce2 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,9 @@
+# 0.10.11-dev
+
+## 0.10.10
+
+* Stable release for null safety.
+
## 0.10.9
* Fix a number of document comment issues.
diff --git a/LICENSE b/LICENSE
index 5c60afe..162572a 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,4 +1,5 @@
-Copyright 2014, the Dart project authors. All rights reserved.
+Copyright 2014, the Dart project authors.
+
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
@@ -9,7 +10,7 @@
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
- * Neither the name of Google Inc. nor the names of its
+ * Neither the name of Google LLC nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
diff --git a/analysis_options.yaml b/analysis_options.yaml
index 4f9dfb0..c09985a 100644
--- a/analysis_options.yaml
+++ b/analysis_options.yaml
@@ -1,4 +1,4 @@
-include: package:pedantic/analysis_options.yaml
+include: package:lints/recommended.yaml
linter:
rules:
diff --git a/lib/builder.dart b/lib/builder.dart
index e6b8d82..5c56ca4 100644
--- a/lib/builder.dart
+++ b/lib/builder.dart
@@ -21,9 +21,7 @@
/// Adds an entry mapping the [targetOffset] to [source].
void addFromOffset(SourceLocation source, SourceFile targetFile,
int targetOffset, String identifier) {
- if (targetFile == null) {
- throw ArgumentError('targetFile cannot be null');
- }
+ ArgumentError.checkNotNull(targetFile, 'targetFile');
_entries.add(Entry(source, targetFile.location(targetOffset), identifier));
}
@@ -33,7 +31,7 @@
/// `isIdentifier` set to true, this entry is considered to represent an
/// identifier whose value will be stored in the source map. [isIdentifier]
/// takes precedence over [target]'s `isIdentifier` value.
- void addSpan(SourceSpan source, SourceSpan target, {bool isIdentifier}) {
+ void addSpan(SourceSpan source, SourceSpan target, {bool? isIdentifier}) {
isIdentifier ??= source is SourceMapSpan ? source.isIdentifier : false;
var name = isIdentifier ? source.text : null;
@@ -42,7 +40,7 @@
/// Adds an entry mapping [target] to [source].
void addLocation(
- SourceLocation source, SourceLocation target, String identifier) {
+ SourceLocation source, SourceLocation target, String? identifier) {
_entries.add(Entry(source, target, identifier));
}
@@ -64,7 +62,7 @@
final SourceLocation target;
/// An identifier name, when this location is the start of an identifier.
- final String identifierName;
+ final String? identifierName;
/// Creates a new [Entry] mapping [target] to [source].
Entry(this.source, this.target, this.identifierName);
diff --git a/lib/parser.dart b/lib/parser.dart
index e3044aa..e3c7179 100644
--- a/lib/parser.dart
+++ b/lib/parser.dart
@@ -23,7 +23,8 @@
// the string represenation.
// TODO(tjblasi): Ignore the first line of [jsonMap] if the JSON safety string
// `)]}'` begins the string representation of the map.
-Mapping parse(String jsonMap, {Map<String, Map> otherMaps, mapUrl}) =>
+Mapping parse(String jsonMap,
+ {Map<String, Map>? otherMaps, /*String|Uri*/ Object? mapUrl}) =>
parseJson(jsonDecode(jsonMap), otherMaps: otherMaps, mapUrl: mapUrl);
/// Parses a source map or source map bundle directly from a json string.
@@ -31,7 +32,8 @@
/// [mapUrl], which may be either a [String] or a [Uri], indicates the URL of
/// the source map file itself. If it's passed, any URLs in the source
/// map will be interpreted as relative to this URL when generating spans.
-Mapping parseExtended(String jsonMap, {Map<String, Map> otherMaps, mapUrl}) =>
+Mapping parseExtended(String jsonMap,
+ {Map<String, Map>? otherMaps, /*String|Uri*/ Object? mapUrl}) =>
parseJsonExtended(jsonDecode(jsonMap),
otherMaps: otherMaps, mapUrl: mapUrl);
@@ -40,8 +42,8 @@
/// [mapUrl], which may be either a [String] or a [Uri], indicates the URL of
/// the source map file itself. If it's passed, any URLs in the source
/// map will be interpreted as relative to this URL when generating spans.
-Mapping parseJsonExtended(/*List|Map*/ json,
- {Map<String, Map> otherMaps, mapUrl}) {
+Mapping parseJsonExtended(/*List|Map*/ Object? json,
+ {Map<String, Map>? otherMaps, /*String|Uri*/ Object? mapUrl}) {
if (json is List) {
return MappingBundle.fromJson(json, mapUrl: mapUrl);
}
@@ -53,7 +55,8 @@
/// [mapUrl], which may be either a [String] or a [Uri], indicates the URL of
/// the source map file itself. If it's passed, any URLs in the source
/// map will be interpreted as relative to this URL when generating spans.
-Mapping parseJson(Map map, {Map<String, Map> otherMaps, mapUrl}) {
+Mapping parseJson(Map map,
+ {Map<String, Map>? otherMaps, /*String|Uri*/ Object? mapUrl}) {
if (map['version'] != 3) {
throw ArgumentError('unexpected source map version: ${map["version"]}. '
'Only version 3 is supported.');
@@ -79,12 +82,12 @@
/// [uri] is the optional location of the output file to find the span for
/// to disambiguate cases where a mapping may have different mappings for
/// different output files.
- SourceMapSpan spanFor(int line, int column,
- {Map<String, SourceFile> files, String uri});
+ SourceMapSpan? spanFor(int line, int column,
+ {Map<String, SourceFile>? files, String? uri});
/// Returns the span associated with [location].
- SourceMapSpan spanForLocation(SourceLocation location,
- {Map<String, SourceFile> files}) {
+ SourceMapSpan? spanForLocation(SourceLocation location,
+ {Map<String, SourceFile>? files}) {
return spanFor(location.line, location.column,
uri: location.sourceUrl?.toString(), files: files);
}
@@ -103,8 +106,8 @@
final List<Mapping> _maps = <Mapping>[];
/// Creates a section mapping from json.
- MultiSectionMapping.fromJson(List sections, Map<String, Map> otherMaps,
- {mapUrl}) {
+ MultiSectionMapping.fromJson(List sections, Map<String, Map>? otherMaps,
+ {/*String|Uri*/ Object? mapUrl}) {
for (var section in sections) {
var offset = section['offset'];
if (offset == null) throw FormatException('section missing offset');
@@ -124,12 +127,13 @@
if (url != null && map != null) {
throw FormatException("section can't use both url and map entries");
} else if (url != null) {
- if (otherMaps == null || otherMaps[url] == null) {
+ var other = otherMaps?[url];
+ if (otherMaps == null || other == null) {
throw FormatException(
'section contains refers to $url, but no map was '
'given for it. Make sure a map is passed in "otherMaps"');
}
- _maps.add(parseJson(otherMaps[url], otherMaps: otherMaps, mapUrl: url));
+ _maps.add(parseJson(other, otherMaps: otherMaps, mapUrl: url));
} else if (map != null) {
_maps.add(parseJson(map, otherMaps: otherMaps, mapUrl: mapUrl));
} else {
@@ -141,7 +145,7 @@
}
}
- int _indexFor(line, column) {
+ int _indexFor(int line, int column) {
for (var i = 0; i < _lineStart.length; i++) {
if (line < _lineStart[i]) return i - 1;
if (line == _lineStart[i] && column < _columnStart[i]) return i - 1;
@@ -150,8 +154,8 @@
}
@override
- SourceMapSpan spanFor(int line, int column,
- {Map<String, SourceFile> files, String uri}) {
+ SourceMapSpan? spanFor(int line, int column,
+ {Map<String, SourceFile>? files, String? uri}) {
// TODO(jacobr): perhaps verify that targetUrl matches the actual uri
// or at least ends in the same file name.
var index = _indexFor(line, column);
@@ -183,7 +187,7 @@
MappingBundle();
- MappingBundle.fromJson(List json, {String mapUrl}) {
+ MappingBundle.fromJson(List json, {/*String|Uri*/ Object? mapUrl}) {
for (var map in json) {
addMapping(parseJson(map, mapUrl: mapUrl) as SingleMapping);
}
@@ -192,7 +196,10 @@
void addMapping(SingleMapping mapping) {
// TODO(jacobr): verify that targetUrl is valid uri instead of a windows
// path.
- _mappings[mapping.targetUrl] = mapping;
+ // TODO: Remove type arg https://github.com/dart-lang/sdk/issues/42227
+ var targetUrl = ArgumentError.checkNotNull<String>(
+ mapping.targetUrl, 'mapping.targetUrl');
+ _mappings[targetUrl] = mapping;
}
/// Encodes the Mapping mappings as a json map.
@@ -210,11 +217,10 @@
bool containsMapping(String url) => _mappings.containsKey(url);
@override
- SourceMapSpan spanFor(int line, int column,
- {Map<String, SourceFile> files, String uri}) {
- if (uri == null) {
- throw ArgumentError.notNull('uri');
- }
+ SourceMapSpan? spanFor(int line, int column,
+ {Map<String, SourceFile>? files, String? uri}) {
+ // TODO: Remove type arg https://github.com/dart-lang/sdk/issues/42227
+ uri = ArgumentError.checkNotNull<String>(uri, 'uri');
// Find the longest suffix of the uri that matches the sourcemap
// where the suffix starts after a path segment boundary.
@@ -232,9 +238,10 @@
for (var i = 0; i < uri.length; ++i) {
if (onBoundary) {
var candidate = uri.substring(i);
- if (_mappings.containsKey(candidate)) {
- return _mappings[candidate]
- .spanFor(line, column, files: files, uri: candidate);
+ var candidateMapping = _mappings[candidate];
+ if (candidateMapping != null) {
+ return candidateMapping.spanFor(line, column,
+ files: files, uri: candidate);
}
}
onBoundary = separatorCodeUnits.contains(uri.codeUnitAt(i));
@@ -270,18 +277,18 @@
/// field.
///
/// Files whose contents aren't available are `null`.
- final List<SourceFile> files;
+ final List<SourceFile?> files;
/// Entries indicating the beginning of each span.
final List<TargetLineEntry> lines;
/// Url of the target file.
- String targetUrl;
+ String? targetUrl;
/// Source root prepended to all entries in [urls].
- String sourceRoot;
+ String? sourceRoot;
- final Uri _mapUrl;
+ final Uri? _mapUrl;
final Map<String, dynamic> extensions;
@@ -290,9 +297,9 @@
extensions = {};
factory SingleMapping.fromEntries(Iterable<builder.Entry> entries,
- [String fileUrl]) {
+ [String? fileUrl]) {
// The entries needs to be sorted by the target offsets.
- var sourceEntries = List.from(entries)..sort();
+ var sourceEntries = entries.toList()..sort();
var lines = <TargetLineEntry>[];
// Indices associated with file urls that will be part of the source map. We
@@ -306,8 +313,8 @@
/// The file for each URL, indexed by [urls]' values.
var files = <int, SourceFile>{};
- var lineNum;
- List<TargetEntry> targetEntries;
+ int? lineNum;
+ late List<TargetEntry> targetEntries;
for (var sourceEntry in sourceEntries) {
if (lineNum == null || sourceEntry.target.line > lineNum) {
lineNum = sourceEntry.target.line;
@@ -315,24 +322,21 @@
lines.add(TargetLineEntry(lineNum, targetEntries));
}
- if (sourceEntry.source == null) {
- targetEntries.add(TargetEntry(sourceEntry.target.column));
- } else {
- var sourceUrl = sourceEntry.source.sourceUrl;
- var urlId = urls.putIfAbsent(
- sourceUrl == null ? '' : sourceUrl.toString(), () => urls.length);
+ var sourceUrl = sourceEntry.source.sourceUrl;
+ var urlId = urls.putIfAbsent(
+ sourceUrl == null ? '' : sourceUrl.toString(), () => urls.length);
- if (sourceEntry.source is FileLocation) {
- files.putIfAbsent(
- urlId, () => (sourceEntry.source as FileLocation).file);
- }
-
- var srcNameId = sourceEntry.identifierName == null
- ? null
- : names.putIfAbsent(sourceEntry.identifierName, () => names.length);
- targetEntries.add(TargetEntry(sourceEntry.target.column, urlId,
- sourceEntry.source.line, sourceEntry.source.column, srcNameId));
+ if (sourceEntry.source is FileLocation) {
+ files.putIfAbsent(
+ urlId, () => (sourceEntry.source as FileLocation).file);
}
+
+ var sourceEntryIdentifierName = sourceEntry.identifierName;
+ var srcNameId = sourceEntryIdentifierName == null
+ ? null
+ : names.putIfAbsent(sourceEntryIdentifierName, () => names.length);
+ targetEntries.add(TargetEntry(sourceEntry.target.column, urlId,
+ sourceEntry.source.line, sourceEntry.source.column, srcNameId));
}
return SingleMapping._(fileUrl, urls.values.map((i) => files[i]).toList(),
urls.keys.toList(), names.keys.toList(), lines);
@@ -342,14 +346,14 @@
: targetUrl = map['file'],
urls = List<String>.from(map['sources']),
names = List<String>.from(map['names'] ?? []),
- files = List(map['sources'].length),
+ files = List.filled(map['sources'].length, null),
sourceRoot = map['sourceRoot'],
lines = <TargetLineEntry>[],
_mapUrl = mapUrl is String ? Uri.parse(mapUrl) : mapUrl,
extensions = {} {
var sourcesContent = map['sourcesContent'] == null
- ? const []
- : List<String>.from(map['sourcesContent']);
+ ? const <String?>[]
+ : List<String?>.from(map['sourcesContent']);
for (var i = 0; i < urls.length && i < sourcesContent.length; i++) {
var source = sourcesContent[i];
if (source == null) continue;
@@ -459,11 +463,11 @@
var newUrlId = segment.sourceUrlId;
if (newUrlId == null) continue;
srcUrlId = _append(buff, srcUrlId, newUrlId);
- srcLine = _append(buff, srcLine, segment.sourceLine);
- srcColumn = _append(buff, srcColumn, segment.sourceColumn);
+ srcLine = _append(buff, srcLine, segment.sourceLine!);
+ srcColumn = _append(buff, srcColumn, segment.sourceColumn!);
if (segment.sourceNameId == null) continue;
- srcNameId = _append(buff, srcNameId, segment.sourceNameId);
+ srcNameId = _append(buff, srcNameId, segment.sourceNameId!);
}
}
@@ -474,7 +478,7 @@
'names': names,
'mappings': buff.toString()
};
- if (targetUrl != null) result['file'] = targetUrl;
+ if (targetUrl != null) result['file'] = targetUrl!;
if (includeSourceContents) {
result['sourcesContent'] = files.map((file) => file?.getText(0)).toList();
@@ -498,7 +502,7 @@
/// Returns [TargetLineEntry] which includes the location in the target [line]
/// number. In particular, the resulting entry is the last entry whose line
/// number is lower or equal to [line].
- TargetLineEntry _findLine(int line) {
+ TargetLineEntry? _findLine(int line) {
var index = binarySearch(lines, (e) => e.line > line);
return (index <= 0) ? null : lines[index - 1];
}
@@ -508,7 +512,7 @@
/// the last entry whose column is lower or equal than [column]. If
/// [lineEntry] corresponds to a line prior to [line], then the result will be
/// the very last entry on that line.
- TargetEntry _findColumn(int line, int column, TargetLineEntry lineEntry) {
+ TargetEntry? _findColumn(int line, int column, TargetLineEntry? lineEntry) {
if (lineEntry == null || lineEntry.entries.isEmpty) return null;
if (lineEntry.line != line) return lineEntry.entries.last;
var entries = lineEntry.entries;
@@ -517,33 +521,39 @@
}
@override
- SourceMapSpan spanFor(int line, int column,
- {Map<String, SourceFile> files, String uri}) {
+ SourceMapSpan? spanFor(int line, int column,
+ {Map<String, SourceFile>? files, String? uri}) {
var entry = _findColumn(line, column, _findLine(line));
- if (entry == null || entry.sourceUrlId == null) return null;
- var url = urls[entry.sourceUrlId];
+ if (entry == null) return null;
+
+ var sourceUrlId = entry.sourceUrlId;
+ if (sourceUrlId == null) return null;
+
+ var url = urls[sourceUrlId];
if (sourceRoot != null) {
- url = '${sourceRoot}${url}';
+ url = '$sourceRoot$url';
}
- if (files != null && files[url] != null) {
- var file = files[url];
- var start = file.getOffset(entry.sourceLine, entry.sourceColumn);
- if (entry.sourceNameId != null) {
- var text = names[entry.sourceNameId];
- return SourceMapFileSpan(files[url].span(start, start + text.length),
+
+ var sourceNameId = entry.sourceNameId;
+ var file = files?[url];
+ if (file != null) {
+ var start = file.getOffset(entry.sourceLine!, entry.sourceColumn);
+ if (sourceNameId != null) {
+ var text = names[sourceNameId];
+ return SourceMapFileSpan(file.span(start, start + text.length),
isIdentifier: true);
} else {
- return SourceMapFileSpan(files[url].location(start).pointSpan());
+ return SourceMapFileSpan(file.location(start).pointSpan());
}
} else {
var start = SourceLocation(0,
- sourceUrl: _mapUrl == null ? url : _mapUrl.resolve(url),
+ sourceUrl: _mapUrl?.resolve(url) ?? url,
line: entry.sourceLine,
column: entry.sourceColumn);
// Offset and other context is not available.
- if (entry.sourceNameId != null) {
- return SourceMapSpan.identifier(start, names[entry.sourceNameId]);
+ if (sourceNameId != null) {
+ return SourceMapSpan.identifier(start, names[sourceNameId]);
} else {
return SourceMapSpan(start, start, '');
}
@@ -578,18 +588,23 @@
..write(line)
..write(':')
..write(entry.column);
- if (entry.sourceUrlId != null) {
+ var sourceUrlId = entry.sourceUrlId;
+ if (sourceUrlId != null) {
buff
..write(' --> ')
..write(sourceRoot)
- ..write(urls[entry.sourceUrlId])
+ ..write(urls[sourceUrlId])
..write(': ')
..write(entry.sourceLine)
..write(':')
..write(entry.sourceColumn);
}
- if (entry.sourceNameId != null) {
- buff..write(' (')..write(names[entry.sourceNameId])..write(')');
+ var sourceNameId = entry.sourceNameId;
+ if (sourceNameId != null) {
+ buff
+ ..write(' (')
+ ..write(names[sourceNameId])
+ ..write(')');
}
buff.write('\n');
}
@@ -611,10 +626,10 @@
/// A target segment entry read from a source map
class TargetEntry {
final int column;
- final int sourceUrlId;
- final int sourceLine;
- final int sourceColumn;
- final int sourceNameId;
+ final int? sourceUrlId;
+ final int? sourceLine;
+ final int? sourceColumn;
+ final int? sourceNameId;
TargetEntry(this.column,
[this.sourceUrlId,
@@ -639,18 +654,20 @@
// Iterator API is used by decodeVlq to consume VLQ entries.
@override
bool moveNext() => ++index < _length;
+
@override
- String get current =>
- (index >= 0 && index < _length) ? _internal[index] : null;
+ String get current => (index >= 0 && index < _length)
+ ? _internal[index]
+ : throw RangeError.index(index, _internal);
bool get hasTokens => index < _length - 1 && _length > 0;
_TokenKind get nextKind {
- if (!hasTokens) return _TokenKind.EOF;
+ if (!hasTokens) return _TokenKind.eof;
var next = _internal[index + 1];
- if (next == ';') return _TokenKind.LINE;
- if (next == ',') return _TokenKind.SEGMENT;
- return _TokenKind.VALUE;
+ if (next == ';') return _TokenKind.line;
+ if (next == ',') return _TokenKind.segment;
+ return _TokenKind.value;
}
int _consumeValue() => decodeVlq(this);
@@ -671,7 +688,9 @@
buff.write(_internal[i]);
}
buff.write('[31m');
- buff.write(current ?? '');
+ try {
+ buff.write(current);
+ } on RangeError catch (_) {}
buff.write('[0m');
for (var i = index + 1; i < _internal.length; i++) {
buff.write(_internal[i]);
@@ -682,10 +701,10 @@
}
class _TokenKind {
- static const _TokenKind LINE = _TokenKind(isNewLine: true);
- static const _TokenKind SEGMENT = _TokenKind(isNewSegment: true);
- static const _TokenKind EOF = _TokenKind(isEof: true);
- static const _TokenKind VALUE = _TokenKind();
+ static const _TokenKind line = _TokenKind(isNewLine: true);
+ static const _TokenKind segment = _TokenKind(isNewSegment: true);
+ static const _TokenKind eof = _TokenKind(isEof: true);
+ static const _TokenKind value = _TokenKind();
final bool isNewLine;
final bool isNewSegment;
final bool isEof;
diff --git a/lib/printer.dart b/lib/printer.dart
index d79d2cb..7d128f7 100644
--- a/lib/printer.dart
+++ b/lib/printer.dart
@@ -9,9 +9,7 @@
import 'builder.dart';
import 'src/source_map_span.dart';
-
-const int _LF = 10;
-const int _CR = 13;
+import 'src/utils.dart';
/// A simple printer that keeps track of offset locations and records source
/// maps locations.
@@ -23,7 +21,7 @@
String get map => _maps.toJson(filename);
/// Current source location mapping.
- SourceLocation _loc;
+ SourceLocation? _loc;
/// Current line in the buffer;
int _line = 0;
@@ -43,17 +41,27 @@
var length = chars.length;
for (var i = 0; i < length; i++) {
var c = chars[i];
- if (c == _LF || (c == _CR && (i + 1 == length || chars[i + 1] != _LF))) {
+ if (c == lineFeed ||
+ (c == carriageReturn &&
+ (i + 1 == length || chars[i + 1] != lineFeed))) {
// Return not followed by line-feed is treated as a new line.
_line++;
_column = 0;
- if (projectMarks && _loc != null) {
- if (_loc is FileLocation) {
- var file = (_loc as FileLocation).file;
- mark(file.location(file.getOffset(_loc.line + 1)));
- } else {
- mark(SourceLocation(0,
- sourceUrl: _loc.sourceUrl, line: _loc.line + 1, column: 0));
+ {
+ // **Warning**: Any calls to `mark` will change the value of `_loc`,
+ // so this local variable is no longer up to date after that point.
+ //
+ // This is why it has been put inside its own block to limit the
+ // scope in which it is available.
+ var loc = _loc;
+ if (projectMarks && loc != null) {
+ if (loc is FileLocation) {
+ var file = loc.file;
+ mark(file.location(file.getOffset(loc.line + 1)));
+ } else {
+ mark(SourceLocation(0,
+ sourceUrl: loc.sourceUrl, line: loc.line + 1, column: 0));
+ }
}
}
} else {
@@ -78,8 +86,8 @@
/// this also records the name of the identifier in the source map
/// information.
void mark(mark) {
- SourceLocation loc;
- String identifier;
+ late final SourceLocation loc;
+ String? identifier;
if (mark is SourceLocation) {
loc = mark;
} else if (mark is SourceSpan) {
@@ -106,14 +114,23 @@
final _items = <dynamic>[];
/// Internal buffer to merge consecutive strings added to this printer.
- StringBuffer _buff;
+ StringBuffer? _buff;
/// Current indentation, which can be updated from outside this class.
int indent;
+ /// [Printer] used during the last call to [build], if any.
+ Printer? printer;
+
+ /// Returns the text produced after calling [build].
+ String? get text => printer?.text;
+
+ /// Returns the source-map information produced after calling [build].
+ String? get map => printer?.map;
+
/// Item used to indicate that the following item is copied from the original
/// source code, and hence we should preserve source-maps on every new line.
- static final _ORIGINAL = Object();
+ static final _original = Object();
NestedPrinter([this.indent = 0]);
@@ -133,13 +150,13 @@
/// Setting [isOriginal] will make this printer propagate source map locations
/// on every line-break.
void add(object,
- {SourceLocation location, SourceSpan span, bool isOriginal = false}) {
+ {SourceLocation? location, SourceSpan? span, bool isOriginal = false}) {
if (object is! String || location != null || span != null || isOriginal) {
_flush();
assert(location == null || span == null);
if (location != null) _items.add(location);
if (span != null) _items.add(span);
- if (isOriginal) _items.add(_ORIGINAL);
+ if (isOriginal) _items.add(_original);
}
if (object is String) {
@@ -162,7 +179,7 @@
/// The [location] and [span] parameters indicate the corresponding source map
/// location of [line] in the original input. Only one, [location] or
/// [span], should be provided at a time.
- void addLine(String line, {SourceLocation location, SourceSpan span}) {
+ void addLine(String? line, {SourceLocation? location, SourceSpan? span}) {
if (location != null || span != null) {
_flush();
assert(location == null || span == null);
@@ -180,8 +197,8 @@
/// Appends a string merging it with any previous strings, if possible.
void _appendString(String s) {
- _buff ??= StringBuffer();
- _buff.write(s);
+ var buf = _buff ??= StringBuffer();
+ buf.write(s);
}
/// Adds all of the current [_buff] contents as a string item.
@@ -206,15 +223,6 @@
return (StringBuffer()..writeAll(_items)).toString();
}
- /// [Printer] used during the last call to [build], if any.
- Printer printer;
-
- /// Returns the text produced after calling [build].
- String get text => printer.text;
-
- /// Returns the source-map information produced after calling [build].
- String get map => printer.map;
-
/// Builds the output of this printer and source map information. After
/// calling this function, you can use [text] and [map] to retrieve the
/// geenrated code and source map information, respectively.
@@ -235,7 +243,7 @@
propagate = false;
} else if (item is SourceLocation || item is SourceSpan) {
printer.mark(item);
- } else if (item == _ORIGINAL) {
+ } else if (item == _original) {
// we insert booleans when we are about to quote text that was copied
// from the original source. In such case, we will propagate marks on
// every new-line.
diff --git a/lib/refactor.dart b/lib/refactor.dart
index 5e117e8..97bd2a7 100644
--- a/lib/refactor.dart
+++ b/lib/refactor.dart
@@ -11,13 +11,14 @@
import 'package:source_span/source_span.dart';
import 'printer.dart';
+import 'src/utils.dart';
/// Editable text transaction.
///
/// Applies a series of edits using original location
/// information, and composes them into the edited string.
class TextEditTransaction {
- final SourceFile file;
+ final SourceFile? file;
final String original;
final _edits = <_TextEdit>[];
@@ -33,9 +34,9 @@
_edits.add(_TextEdit(begin, end, replacement));
}
- /// Create a source map [SourceLocation] for [offset].
- SourceLocation _loc(int offset) =>
- file != null ? file.location(offset) : null;
+ /// Create a source map [SourceLocation] for [offset], if [file] is not
+ /// `null`.
+ SourceLocation? _loc(int offset) => file?.location(offset);
/// Applies all pending [edit]s and returns a [NestedPrinter] containing the
/// rewritten string and source map information. [file]`.location` is given to
@@ -58,14 +59,16 @@
if (consumed > edit.begin) {
var sb = StringBuffer();
sb
- ..write(file.location(edit.begin).toolString)
+ ..write(file?.location(edit.begin).toolString)
..write(': overlapping edits. Insert at offset ')
..write(edit.begin)
..write(' but have consumed ')
..write(consumed)
..write(' input characters. List of edits:');
for (var e in _edits) {
- sb..write('\n ')..write(e);
+ sb
+ ..write('\n ')
+ ..write(e);
}
throw UnsupportedError(sb.toString());
}
@@ -91,7 +94,7 @@
final int end;
/// The replacement used by the edit, can be a string or a [NestedPrinter].
- final replace;
+ final Object replace;
_TextEdit(this.begin, this.end, this.replace);
@@ -114,7 +117,7 @@
var lineStart = 0;
for (var i = charOffset - 1; i >= 0; i--) {
var c = code.codeUnitAt(i);
- if (c == _LF || c == _CR) {
+ if (c == lineFeed || c == carriageReturn) {
lineStart = i + 1;
break;
}
@@ -124,7 +127,7 @@
var whitespaceEnd = code.length;
for (var i = lineStart; i < code.length; i++) {
var c = code.codeUnitAt(i);
- if (c != _SPACE && c != _TAB) {
+ if (c != _space && c != _tab) {
whitespaceEnd = i;
break;
}
@@ -133,7 +136,5 @@
return code.substring(lineStart, whitespaceEnd);
}
-const int _CR = 13;
-const int _LF = 10;
-const int _TAB = 9;
-const int _SPACE = 32;
+const int _tab = 9;
+const int _space = 32;
diff --git a/lib/src/source_map_span.dart b/lib/src/source_map_span.dart
index b8f1152..65574ca 100644
--- a/lib/src/source_map_span.dart
+++ b/lib/src/source_map_span.dart
@@ -52,7 +52,7 @@
@override
String get context => _inner.context;
@override
- Uri get sourceUrl => _inner.sourceUrl;
+ Uri? get sourceUrl => _inner.sourceUrl;
@override
int get length => _inner.length;
diff --git a/lib/src/utils.dart b/lib/src/utils.dart
index f9870d2..eb23834 100644
--- a/lib/src/utils.dart
+++ b/lib/src/utils.dart
@@ -27,3 +27,6 @@
}
return max;
}
+
+const int lineFeed = 10;
+const int carriageReturn = 13;
diff --git a/lib/src/vlq.dart b/lib/src/vlq.dart
index d4e29a1..6c41b6e 100644
--- a/lib/src/vlq.dart
+++ b/lib/src/vlq.dart
@@ -14,31 +14,31 @@
import 'dart:math';
-const int VLQ_BASE_SHIFT = 5;
+const int vlqBaseShift = 5;
-const int VLQ_BASE_MASK = (1 << 5) - 1;
+const int vlqBaseMask = (1 << 5) - 1;
-const int VLQ_CONTINUATION_BIT = 1 << 5;
+const int vlqContinuationBit = 1 << 5;
-const int VLQ_CONTINUATION_MASK = 1 << 5;
+const int vlqContinuationMask = 1 << 5;
-const String BASE64_DIGITS =
+const String base64Digits =
'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
final Map<String, int> _digits = () {
var map = <String, int>{};
for (var i = 0; i < 64; i++) {
- map[BASE64_DIGITS[i]] = i;
+ map[base64Digits[i]] = i;
}
return map;
}();
-final int MAX_INT32 = pow(2, 31) - 1;
-final int MIN_INT32 = -pow(2, 31);
+final int maxInt32 = (pow(2, 31) as int) - 1;
+final int minInt32 = -(pow(2, 31) as int);
/// Creates the VLQ encoding of [value] as a sequence of characters
Iterable<String> encodeVlq(int value) {
- if (value < MIN_INT32 || value > MAX_INT32) {
+ if (value < minInt32 || value > maxInt32) {
throw ArgumentError('expected 32 bit int, got: $value');
}
var res = <String>[];
@@ -49,12 +49,12 @@
}
value = (value << 1) | signBit;
do {
- var digit = value & VLQ_BASE_MASK;
- value >>= VLQ_BASE_SHIFT;
+ var digit = value & vlqBaseMask;
+ value >>= vlqBaseShift;
if (value > 0) {
- digit |= VLQ_CONTINUATION_BIT;
+ digit |= vlqContinuationBit;
}
- res.add(BASE64_DIGITS[digit]);
+ res.add(base64Digits[digit]);
} while (value > 0);
return res;
}
@@ -62,7 +62,7 @@
/// Decodes a value written as a sequence of VLQ characters. The first input
/// character will be `chars.current` after calling `chars.moveNext` once. The
/// iterator is advanced until a stop character is found (a character without
-/// the [VLQ_CONTINUATION_BIT]).
+/// the [vlqContinuationBit]).
int decodeVlq(Iterator<String> chars) {
var result = 0;
var stop = false;
@@ -70,14 +70,14 @@
while (!stop) {
if (!chars.moveNext()) throw StateError('incomplete VLQ value');
var char = chars.current;
- if (!_digits.containsKey(char)) {
+ var digit = _digits[char];
+ if (digit == null) {
throw FormatException('invalid character in VLQ encoding: $char');
}
- var digit = _digits[char];
- stop = (digit & VLQ_CONTINUATION_BIT) == 0;
- digit &= VLQ_BASE_MASK;
+ stop = (digit & vlqContinuationBit) == 0;
+ digit &= vlqBaseMask;
result += (digit << shift);
- shift += VLQ_BASE_SHIFT;
+ shift += vlqBaseShift;
}
// Result uses the least significant bit as a sign bit. We convert it into a
@@ -93,7 +93,7 @@
result = negate ? -result : result;
// TODO(sigmund): can we detect this earlier?
- if (result < MIN_INT32 || result > MAX_INT32) {
+ if (result < minInt32 || result > maxInt32) {
throw FormatException(
'expected an encoded 32 bit int, but we got: $result');
}
diff --git a/pubspec.yaml b/pubspec.yaml
index 871c40a..23e53a5 100644
--- a/pubspec.yaml
+++ b/pubspec.yaml
@@ -1,16 +1,16 @@
name: source_maps
-version: 0.10.9
+version: 0.10.11-dev
description: Library to programmatically manipulate source map files.
homepage: https://github.com/dart-lang/source_maps
environment:
- sdk: '>=2.0.0 <3.0.0'
+ sdk: ">=2.12.0 <3.0.0"
dependencies:
- source_span: ^1.3.0
+ source_span: ^1.8.0
dev_dependencies:
- source_span: ^1.5.4
- test: ^1.2.0
- term_glyph: ^1.0.0
+ lints: ^1.0.0
+ test: ^1.16.0
+ term_glyph: ^1.2.0
diff --git a/test/builder_test.dart b/test/builder_test.dart
index fddf46c..b9bb9c7 100644
--- a/test/builder_test.dart
+++ b/test/builder_test.dart
@@ -5,8 +5,10 @@
library test.source_maps_test;
import 'dart:convert';
-import 'package:test/test.dart';
+
import 'package:source_maps/source_maps.dart';
+import 'package:test/test.dart';
+
import 'common.dart';
void main() {
@@ -17,7 +19,7 @@
..addSpan(inputVar2, outputVar2)
..addSpan(inputExpr, outputExpr))
.build(output.url.toString());
- expect(map, equals(EXPECTED_MAP));
+ expect(map, equals(expectedMap));
});
test('builder - with location', () {
@@ -27,6 +29,6 @@
..addLocation(inputVar2.start, outputVar2.start, 'longVar2')
..addLocation(inputExpr.start, outputExpr.start, null))
.toJson(output.url.toString());
- expect(str, jsonEncode(EXPECTED_MAP));
+ expect(str, jsonEncode(expectedMap));
});
}
diff --git a/test/common.dart b/test/common.dart
index c0bed68..f6139de 100644
--- a/test/common.dart
+++ b/test/common.dart
@@ -10,7 +10,7 @@
import 'package:test/test.dart';
/// Content of the source file
-const String INPUT = '''
+const String inputContent = '''
/** this is a comment. */
int longVar1 = 3;
@@ -19,7 +19,7 @@
return longVar1 + longVar2;
}
''';
-var input = SourceFile.fromString(INPUT, url: 'input.dart');
+final input = SourceFile.fromString(inputContent, url: 'input.dart');
/// A span in the input file
SourceMapSpan ispan(int start, int end, [bool isIdentifier = false]) =>
@@ -36,11 +36,11 @@
SourceMapSpan inputExpr = ispan(108, 127);
/// Content of the target file
-const String OUTPUT = '''
+const String outputContent = '''
var x = 3;
f(y) => x + y;
''';
-var output = SourceFile.fromString(OUTPUT, url: 'output.dart');
+final output = SourceFile.fromString(outputContent, url: 'output.dart');
/// A span in the output file
SourceMapSpan ospan(int start, int end, [bool isIdentifier = false]) =>
@@ -62,7 +62,7 @@
///
/// This mapping is stored in the tests so we can independently test the builder
/// and parser algorithms without relying entirely on end2end tests.
-const Map<String, dynamic> EXPECTED_MAP = {
+const Map<String, dynamic> expectedMap = {
'version': 3,
'sourceRoot': '',
'sources': ['input.dart'],
@@ -76,8 +76,8 @@
var line = outputSpan.start.line;
var column = outputSpan.start.column;
var files = realOffsets ? {'input.dart': input} : null;
- var span = mapping.spanFor(line, column, files: files);
- var span2 = mapping.spanForLocation(outputSpan.start, files: files);
+ var span = mapping.spanFor(line, column, files: files)!;
+ var span2 = mapping.spanForLocation(outputSpan.start, files: files)!;
// Both mapping APIs are equivalent.
expect(span.start.offset, span2.start.offset);
diff --git a/test/end2end_test.dart b/test/end2end_test.dart
index 954339f..153fcc2 100644
--- a/test/end2end_test.dart
+++ b/test/end2end_test.dart
@@ -4,9 +4,10 @@
library test.end2end_test;
-import 'package:test/test.dart';
import 'package:source_maps/source_maps.dart';
import 'package:source_span/source_span.dart';
+import 'package:test/test.dart';
+
import 'common.dart';
void main() {
@@ -106,12 +107,12 @@
});
test('printer projecting marks + parse', () {
- var out = INPUT.replaceAll('long', '_s');
+ var out = inputContent.replaceAll('long', '_s');
var file = SourceFile.fromString(out, url: 'output2.dart');
var printer = Printer('output2.dart');
printer.mark(ispan(0, 0));
- var segments = INPUT.split('long');
+ var segments = inputContent.split('long');
expect(segments.length, 6);
printer.add(segments[0], projectMarks: true);
printer.mark(inputVar1);
@@ -153,7 +154,7 @@
// Start of the last line
var oOffset = out.length - 2;
- var iOffset = INPUT.length - 2;
+ var iOffset = inputContent.length - 2;
check(file.span(oOffset, oOffset), mapping, ispan(iOffset, iOffset), true);
check(file.span(oOffset + 1, oOffset + 1), mapping, ispan(iOffset, iOffset),
true);
diff --git a/test/parser_test.dart b/test/parser_test.dart
index 1b73f13..7c7b142 100644
--- a/test/parser_test.dart
+++ b/test/parser_test.dart
@@ -2,15 +2,15 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
-library test.parser_test;
-
import 'dart:convert';
-import 'package:test/test.dart';
+
import 'package:source_maps/source_maps.dart';
import 'package:source_span/source_span.dart';
+import 'package:test/test.dart';
+
import 'common.dart';
-const Map<String, dynamic> MAP_WITH_NO_SOURCE_LOCATION = {
+const Map<String, dynamic> _mapWithNoSourceLocation = {
'version': 3,
'sourceRoot': '',
'sources': ['input.dart'],
@@ -19,7 +19,7 @@
'file': 'output.dart'
};
-const Map<String, dynamic> MAP_WITH_SOURCE_LOCATION = {
+const Map<String, dynamic> _mapWithSourceLocation = {
'version': 3,
'sourceRoot': '',
'sources': ['input.dart'],
@@ -28,7 +28,7 @@
'file': 'output.dart'
};
-const Map<String, dynamic> MAP_WITH_SOURCE_LOCATION_AND_MISSING_NAMES = {
+const Map<String, dynamic> _mapWithSourceLocationAndMissingNames = {
'version': 3,
'sourceRoot': '',
'sources': ['input.dart'],
@@ -36,7 +36,7 @@
'file': 'output.dart'
};
-const Map<String, dynamic> MAP_WITH_SOURCE_LOCATION_AND_NAME = {
+const Map<String, dynamic> _mapWithSourceLocationAndName = {
'version': 3,
'sourceRoot': '',
'sources': ['input.dart'],
@@ -45,7 +45,7 @@
'file': 'output.dart'
};
-const Map<String, dynamic> MAP_WITH_SOURCE_LOCATION_AND_NAME_1 = {
+const Map<String, dynamic> _mapWithSourceLocationAndName1 = {
'version': 3,
'sourceRoot': 'pkg/',
'sources': ['input1.dart'],
@@ -54,7 +54,7 @@
'file': 'output.dart'
};
-const Map<String, dynamic> MAP_WITH_SOURCE_LOCATION_AND_NAME_2 = {
+const Map<String, dynamic> _mapWithSourceLocationAndName2 = {
'version': 3,
'sourceRoot': 'pkg/',
'sources': ['input2.dart'],
@@ -63,7 +63,7 @@
'file': 'output2.dart'
};
-const Map<String, dynamic> MAP_WITH_SOURCE_LOCATION_AND_NAME_3 = {
+const Map<String, dynamic> _mapWithSourceLocationAndName3 = {
'version': 3,
'sourceRoot': 'pkg/',
'sources': ['input3.dart'],
@@ -72,15 +72,15 @@
'file': '3/output.dart'
};
-const List SOURCE_MAP_BUNDLE = [
- MAP_WITH_SOURCE_LOCATION_AND_NAME_1,
- MAP_WITH_SOURCE_LOCATION_AND_NAME_2,
- MAP_WITH_SOURCE_LOCATION_AND_NAME_3,
+const _sourceMapBundle = [
+ _mapWithSourceLocationAndName1,
+ _mapWithSourceLocationAndName2,
+ _mapWithSourceLocationAndName3,
];
void main() {
test('parse', () {
- var mapping = parseJson(EXPECTED_MAP);
+ var mapping = parseJson(expectedMap);
check(outputVar1, mapping, inputVar1, false);
check(outputVar2, mapping, inputVar2, false);
check(outputFunction, mapping, inputFunction, false);
@@ -88,7 +88,7 @@
});
test('parse + json', () {
- var mapping = parse(jsonEncode(EXPECTED_MAP));
+ var mapping = parse(jsonEncode(expectedMap));
check(outputVar1, mapping, inputVar1, false);
check(outputVar2, mapping, inputVar2, false);
check(outputFunction, mapping, inputFunction, false);
@@ -96,7 +96,7 @@
});
test('parse with file', () {
- var mapping = parseJson(EXPECTED_MAP);
+ var mapping = parseJson(expectedMap);
check(outputVar1, mapping, inputVar1, true);
check(outputVar2, mapping, inputVar2, true);
check(outputFunction, mapping, inputFunction, true);
@@ -104,7 +104,7 @@
});
test('parse with no source location', () {
- SingleMapping map = parse(jsonEncode(MAP_WITH_NO_SOURCE_LOCATION));
+ var map = parse(jsonEncode(_mapWithNoSourceLocation)) as SingleMapping;
expect(map.lines.length, 1);
expect(map.lines.first.entries.length, 1);
var entry = map.lines.first.entries.first;
@@ -117,7 +117,7 @@
});
test('parse with source location and no name', () {
- SingleMapping map = parse(jsonEncode(MAP_WITH_SOURCE_LOCATION));
+ var map = parse(jsonEncode(_mapWithSourceLocation)) as SingleMapping;
expect(map.lines.length, 1);
expect(map.lines.first.entries.length, 1);
var entry = map.lines.first.entries.first;
@@ -130,8 +130,8 @@
});
test('parse with source location and missing names entry', () {
- SingleMapping map =
- parse(jsonEncode(MAP_WITH_SOURCE_LOCATION_AND_MISSING_NAMES));
+ var map = parse(jsonEncode(_mapWithSourceLocationAndMissingNames))
+ as SingleMapping;
expect(map.lines.length, 1);
expect(map.lines.first.entries.length, 1);
var entry = map.lines.first.entries.first;
@@ -144,7 +144,7 @@
});
test('parse with source location and name', () {
- SingleMapping map = parse(jsonEncode(MAP_WITH_SOURCE_LOCATION_AND_NAME));
+ var map = parse(jsonEncode(_mapWithSourceLocationAndName)) as SingleMapping;
expect(map.lines.length, 1);
expect(map.lines.first.entries.length, 1);
var entry = map.lines.first.entries.first;
@@ -157,15 +157,15 @@
});
test('parse with source root', () {
- var inputMap = Map.from(MAP_WITH_SOURCE_LOCATION);
+ var inputMap = Map.from(_mapWithSourceLocation);
inputMap['sourceRoot'] = '/pkg/';
var mapping = parseJson(inputMap) as SingleMapping;
- expect(mapping.spanFor(0, 0).sourceUrl, Uri.parse('/pkg/input.dart'));
+ expect(mapping.spanFor(0, 0)?.sourceUrl, Uri.parse('/pkg/input.dart'));
expect(
mapping
.spanForLocation(
SourceLocation(0, sourceUrl: Uri.parse('ignored.dart')))
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('/pkg/input.dart'));
var newSourceRoot = '/new/';
@@ -177,45 +177,47 @@
});
test('parse with map URL', () {
- var inputMap = Map.from(MAP_WITH_SOURCE_LOCATION);
+ var inputMap = Map.from(_mapWithSourceLocation);
inputMap['sourceRoot'] = 'pkg/';
var mapping = parseJson(inputMap, mapUrl: 'file:///path/to/map');
- expect(mapping.spanFor(0, 0).sourceUrl,
+ expect(mapping.spanFor(0, 0)?.sourceUrl,
Uri.parse('file:///path/to/pkg/input.dart'));
});
group('parse with bundle', () {
var mapping =
- parseJsonExtended(SOURCE_MAP_BUNDLE, mapUrl: 'file:///path/to/map');
+ parseJsonExtended(_sourceMapBundle, mapUrl: 'file:///path/to/map');
test('simple', () {
expect(
mapping
.spanForLocation(SourceLocation(0,
sourceUrl: Uri.file('/path/to/output.dart')))
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input1.dart'));
expect(
mapping
.spanForLocation(SourceLocation(0,
sourceUrl: Uri.file('/path/to/output2.dart')))
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input2.dart'));
expect(
mapping
.spanForLocation(SourceLocation(0,
sourceUrl: Uri.file('/path/to/3/output.dart')))
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input3.dart'));
expect(
- mapping.spanFor(0, 0, uri: 'file:///path/to/output.dart').sourceUrl,
+ mapping.spanFor(0, 0, uri: 'file:///path/to/output.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input1.dart'));
expect(
- mapping.spanFor(0, 0, uri: 'file:///path/to/output2.dart').sourceUrl,
+ mapping.spanFor(0, 0, uri: 'file:///path/to/output2.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input2.dart'));
expect(
- mapping.spanFor(0, 0, uri: 'file:///path/to/3/output.dart').sourceUrl,
+ mapping
+ .spanFor(0, 0, uri: 'file:///path/to/3/output.dart')
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input3.dart'));
});
@@ -224,36 +226,36 @@
mapping
.spanForLocation(SourceLocation(0,
sourceUrl: Uri.parse('package:1/output.dart')))
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input1.dart'));
expect(
mapping
.spanForLocation(SourceLocation(0,
sourceUrl: Uri.parse('package:2/output2.dart')))
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input2.dart'));
expect(
mapping
.spanForLocation(SourceLocation(0,
sourceUrl: Uri.parse('package:3/output.dart')))
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input3.dart'));
- expect(mapping.spanFor(0, 0, uri: 'package:1/output.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: 'package:1/output.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input1.dart'));
- expect(mapping.spanFor(0, 0, uri: 'package:2/output2.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: 'package:2/output2.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input2.dart'));
- expect(mapping.spanFor(0, 0, uri: 'package:3/output.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: 'package:3/output.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input3.dart'));
});
test('unmapped path', () {
- var span = mapping.spanFor(0, 0, uri: 'unmapped_output.dart');
+ var span = mapping.spanFor(0, 0, uri: 'unmapped_output.dart')!;
expect(span.sourceUrl, Uri.parse('unmapped_output.dart'));
expect(span.start.line, equals(0));
expect(span.start.column, equals(0));
- span = mapping.spanFor(10, 5, uri: 'unmapped_output.dart');
+ span = mapping.spanFor(10, 5, uri: 'unmapped_output.dart')!;
expect(span.sourceUrl, Uri.parse('unmapped_output.dart'));
expect(span.start.line, equals(10));
expect(span.start.column, equals(5));
@@ -264,46 +266,46 @@
});
test('incomplete paths', () {
- expect(mapping.spanFor(0, 0, uri: 'output.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: 'output.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input1.dart'));
- expect(mapping.spanFor(0, 0, uri: 'output2.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: 'output2.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input2.dart'));
- expect(mapping.spanFor(0, 0, uri: '3/output.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: '3/output.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input3.dart'));
});
test('parseExtended', () {
- var mapping = parseExtended(jsonEncode(SOURCE_MAP_BUNDLE),
+ var mapping = parseExtended(jsonEncode(_sourceMapBundle),
mapUrl: 'file:///path/to/map');
- expect(mapping.spanFor(0, 0, uri: 'output.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: 'output.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input1.dart'));
- expect(mapping.spanFor(0, 0, uri: 'output2.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: 'output2.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input2.dart'));
- expect(mapping.spanFor(0, 0, uri: '3/output.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: '3/output.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input3.dart'));
});
test('build bundle incrementally', () {
var mapping = MappingBundle();
- mapping.addMapping(parseJson(MAP_WITH_SOURCE_LOCATION_AND_NAME_1,
- mapUrl: 'file:///path/to/map'));
- expect(mapping.spanFor(0, 0, uri: 'output.dart').sourceUrl,
+ mapping.addMapping(parseJson(_mapWithSourceLocationAndName1,
+ mapUrl: 'file:///path/to/map') as SingleMapping);
+ expect(mapping.spanFor(0, 0, uri: 'output.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input1.dart'));
expect(mapping.containsMapping('output2.dart'), isFalse);
- mapping.addMapping(parseJson(MAP_WITH_SOURCE_LOCATION_AND_NAME_2,
- mapUrl: 'file:///path/to/map'));
+ mapping.addMapping(parseJson(_mapWithSourceLocationAndName2,
+ mapUrl: 'file:///path/to/map') as SingleMapping);
expect(mapping.containsMapping('output2.dart'), isTrue);
- expect(mapping.spanFor(0, 0, uri: 'output2.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: 'output2.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input2.dart'));
expect(mapping.containsMapping('3/output.dart'), isFalse);
- mapping.addMapping(parseJson(MAP_WITH_SOURCE_LOCATION_AND_NAME_3,
- mapUrl: 'file:///path/to/map'));
+ mapping.addMapping(parseJson(_mapWithSourceLocationAndName3,
+ mapUrl: 'file:///path/to/map') as SingleMapping);
expect(mapping.containsMapping('3/output.dart'), isTrue);
- expect(mapping.spanFor(0, 0, uri: '3/output.dart').sourceUrl,
+ expect(mapping.spanFor(0, 0, uri: '3/output.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input3.dart'));
});
@@ -315,58 +317,60 @@
mapping
.spanForLocation(SourceLocation(0,
sourceUrl: Uri.parse('http://localhost/output.dart')))
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input1.dart'));
expect(
mapping
.spanForLocation(SourceLocation(0,
sourceUrl: Uri.parse('http://localhost/output2.dart')))
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input2.dart'));
expect(
mapping
.spanForLocation(SourceLocation(0,
sourceUrl: Uri.parse('http://localhost/3/output.dart')))
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input3.dart'));
expect(
- mapping.spanFor(0, 0, uri: 'http://localhost/output.dart').sourceUrl,
+ mapping.spanFor(0, 0, uri: 'http://localhost/output.dart')?.sourceUrl,
Uri.parse('file:///path/to/pkg/input1.dart'));
expect(
- mapping.spanFor(0, 0, uri: 'http://localhost/output2.dart').sourceUrl,
+ mapping
+ .spanFor(0, 0, uri: 'http://localhost/output2.dart')
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input2.dart'));
expect(
mapping
.spanFor(0, 0, uri: 'http://localhost/3/output.dart')
- .sourceUrl,
+ ?.sourceUrl,
Uri.parse('file:///path/to/pkg/input3.dart'));
});
});
test('parse and re-emit', () {
for (var expected in [
- EXPECTED_MAP,
- MAP_WITH_NO_SOURCE_LOCATION,
- MAP_WITH_SOURCE_LOCATION,
- MAP_WITH_SOURCE_LOCATION_AND_NAME
+ expectedMap,
+ _mapWithNoSourceLocation,
+ _mapWithSourceLocation,
+ _mapWithSourceLocationAndName
]) {
- SingleMapping mapping = parseJson(expected);
+ var mapping = parseJson(expected) as SingleMapping;
expect(mapping.toJson(), equals(expected));
- mapping = parseJsonExtended(expected);
+ mapping = parseJsonExtended(expected) as SingleMapping;
expect(mapping.toJson(), equals(expected));
}
- var mapping = parseJsonExtended(SOURCE_MAP_BUNDLE) as MappingBundle;
- expect(mapping.toJson(), equals(SOURCE_MAP_BUNDLE));
+ var mapping = parseJsonExtended(_sourceMapBundle) as MappingBundle;
+ expect(mapping.toJson(), equals(_sourceMapBundle));
});
test('parse extensions', () {
- var map = Map.from(EXPECTED_MAP);
+ var map = Map.from(expectedMap);
map['x_foo'] = 'a';
map['x_bar'] = [3];
- SingleMapping mapping = parseJson(map);
+ var mapping = parseJson(map) as SingleMapping;
expect(mapping.toJson(), equals(map));
expect(mapping.extensions['x_foo'], equals('a'));
expect(mapping.extensions['x_bar'].first, equals(3));
@@ -391,19 +395,19 @@
group('from parse()', () {
group('are null', () {
test('with no sourcesContent field', () {
- var mapping = parseJson(EXPECTED_MAP) as SingleMapping;
+ var mapping = parseJson(expectedMap) as SingleMapping;
expect(mapping.files, equals([null]));
});
test('with null sourcesContent values', () {
- var map = Map.from(EXPECTED_MAP);
+ var map = Map.from(expectedMap);
map['sourcesContent'] = [null];
var mapping = parseJson(map) as SingleMapping;
expect(mapping.files, equals([null]));
});
test('with a too-short sourcesContent', () {
- var map = Map.from(EXPECTED_MAP);
+ var map = Map.from(expectedMap);
map['sourcesContent'] = [];
var mapping = parseJson(map) as SingleMapping;
expect(mapping.files, equals([null]));
@@ -411,11 +415,11 @@
});
test('are parsed from sourcesContent', () {
- var map = Map.from(EXPECTED_MAP);
+ var map = Map.from(expectedMap);
map['sourcesContent'] = ['hello, world!'];
var mapping = parseJson(map) as SingleMapping;
- var file = mapping.files[0];
+ var file = mapping.files[0]!;
expect(file.url, equals(Uri.parse('input.dart')));
expect(file.getText(0), equals('hello, world!'));
});
diff --git a/test/printer_test.dart b/test/printer_test.dart
index fc79913..3db321d 100644
--- a/test/printer_test.dart
+++ b/test/printer_test.dart
@@ -5,9 +5,11 @@
library test.printer_test;
import 'dart:convert';
-import 'package:test/test.dart';
+
import 'package:source_maps/source_maps.dart';
import 'package:source_span/source_span.dart';
+import 'package:test/test.dart';
+
import 'common.dart';
void main() {
@@ -23,15 +25,15 @@
..add('y) => ')
..mark(inputExpr)
..add('x + y;\n');
- expect(printer.text, OUTPUT);
- expect(printer.map, jsonEncode(EXPECTED_MAP));
+ expect(printer.text, outputContent);
+ expect(printer.map, jsonEncode(expectedMap));
});
test('printer projecting marks', () {
- var out = INPUT.replaceAll('long', '_s');
+ var out = inputContent.replaceAll('long', '_s');
var printer = Printer('output2.dart');
- var segments = INPUT.split('long');
+ var segments = inputContent.split('long');
expect(segments.length, 6);
printer
..mark(ispan(0, 0))
@@ -92,8 +94,8 @@
..add('y) => ', span: inputVar2)
..add('x + y;\n', span: inputExpr)
..build('output.dart');
- expect(printer.text, OUTPUT);
- expect(printer.map, jsonEncode(EXPECTED_MAP));
+ expect(printer.text, outputContent);
+ expect(printer.map, jsonEncode(expectedMap));
});
test('nested use', () {
@@ -105,13 +107,13 @@
..add(NestedPrinter()..add('y) => ', span: inputVar2))
..add('x + y;\n', span: inputExpr)
..build('output.dart');
- expect(printer.text, OUTPUT);
- expect(printer.map, jsonEncode(EXPECTED_MAP));
+ expect(printer.text, outputContent);
+ expect(printer.map, jsonEncode(expectedMap));
});
test('add indentation', () {
- var out = INPUT.replaceAll('long', '_s');
- var lines = INPUT.trim().split('\n');
+ var out = inputContent.replaceAll('long', '_s');
+ var lines = inputContent.trim().split('\n');
expect(lines.length, 7);
var printer = NestedPrinter();
for (var i = 0; i < lines.length; i++) {
diff --git a/test/refactor_test.dart b/test/refactor_test.dart
index 36b934a..9a403a1 100644
--- a/test/refactor_test.dart
+++ b/test/refactor_test.dart
@@ -60,7 +60,7 @@
txn.edit(34, 35, '___');
var printer = (txn.commit()..build(''));
var output = printer.text;
- var map = parse(printer.map);
+ var map = parse(printer.map!);
expect(output,
'0123456789\n0*23456789\n01*34__\n 789\na___cdefghij\nabcd*fghij\n');
@@ -197,7 +197,5 @@
});
}
-String _span(int line, int column, Mapping map, SourceFile file) {
- var span = map.spanFor(line - 1, column - 1, files: {'': file});
- return span == null ? null : span.message('').trim();
-}
+String? _span(int line, int column, Mapping map, SourceFile file) =>
+ map.spanFor(line - 1, column - 1, files: {'': file})?.message('').trim();
diff --git a/test/vlq_test.dart b/test/vlq_test.dart
index 6021519..5a4f02a 100644
--- a/test/vlq_test.dart
+++ b/test/vlq_test.dart
@@ -27,20 +27,20 @@
});
test('only 32-bit ints allowed', () {
- var max_int = pow(2, 31) - 1;
- var min_int = -pow(2, 31);
- _checkEncodeDecode(max_int - 1);
- _checkEncodeDecode(min_int + 1);
- _checkEncodeDecode(max_int);
- _checkEncodeDecode(min_int);
+ var maxInt = (pow(2, 31) as int) - 1;
+ var minInt = -(pow(2, 31) as int);
+ _checkEncodeDecode(maxInt - 1);
+ _checkEncodeDecode(minInt + 1);
+ _checkEncodeDecode(maxInt);
+ _checkEncodeDecode(minInt);
- expect(encodeVlq(min_int).join(''), 'hgggggE');
- expect(decodeVlq('hgggggE'.split('').iterator), min_int);
+ expect(encodeVlq(minInt).join(''), 'hgggggE');
+ expect(decodeVlq('hgggggE'.split('').iterator), minInt);
- expect(() => encodeVlq(max_int + 1), throwsA(anything));
- expect(() => encodeVlq(max_int + 2), throwsA(anything));
- expect(() => encodeVlq(min_int - 1), throwsA(anything));
- expect(() => encodeVlq(min_int - 2), throwsA(anything));
+ expect(() => encodeVlq(maxInt + 1), throwsA(anything));
+ expect(() => encodeVlq(maxInt + 2), throwsA(anything));
+ expect(() => encodeVlq(minInt - 1), throwsA(anything));
+ expect(() => encodeVlq(minInt - 2), throwsA(anything));
// if we allowed more than 32 bits, these would be the expected encodings
// for the large numbers above.