add option to ImportCrawler to start from an already parsed document
diff --git a/CHANGELOG.md b/CHANGELOG.md
index f52cce3..6f0c931 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,8 @@
+#### 0.10.5
+  * Update `ImportCrawler` with support for pre-parsed initial documents. This
+    allows it to work better with other transformers in the same step (you can
+    pass in a modified document).
+
 #### 0.10.4+2
   * Fix `@CustomElement` test in internet explorer.
 
diff --git a/lib/build/import_crawler.dart b/lib/build/import_crawler.dart
index 5566efb..c655efc 100644
--- a/lib/build/import_crawler.dart
+++ b/lib/build/import_crawler.dart
@@ -30,7 +30,12 @@
   final BuildLogger _logger;
   final AssetId _primaryInputId;
 
-  ImportCrawler(this._transform, this._primaryInputId, this._logger);
+  // Optional parsed document for the primary id if available.
+  final Document _primaryDocument;
+
+  ImportCrawler(this._transform, this._primaryInputId, this._logger,
+      {Document primaryDocument})
+      : _primaryDocument = primaryDocument;
 
   /// Returns a post-ordered map of [AssetId]'s to [ImportData]. The [AssetId]'s
   /// represent an asset which was discovered via an html import, and the
@@ -40,13 +45,11 @@
     var documents = new LinkedHashMap<AssetId, ImportData>();
     var seen = new Set<AssetId>();
 
-    Future doCrawl(AssetId assetId, [Element import]) {
+    Future doCrawl(AssetId assetId, [Element import, Document document]) {
       if (seen.contains(assetId)) return null;
       seen.add(assetId);
 
-      return _transform.readInputAsString(assetId).then((html) {
-        var document = parseHtml(html, assetId.path);
-
+      Future crawlImports(Document document) {
         var imports = document.querySelectorAll('link[rel="import"]');
         var done =
             Future.forEach(imports, (i) => doCrawl(_importId(assetId, i), i));
@@ -55,14 +58,23 @@
         return done.then((_) {
           documents[assetId] = new ImportData(document, import);
         });
-      }).catchError((error) {
-        var span;
-        if (import != null) span = import.sourceSpan;
-        _logger.error(inlineImportFail.create({'error': error}), span: span);
-      });
+      }
+
+      if (document != null) {
+        return crawlImports(document);
+      } else {
+        return _transform.readInputAsString(assetId).then((html) {
+          return crawlImports(parseHtml(html, assetId.path));
+        }).catchError((error) {
+          var span;
+          if (import != null) span = import.sourceSpan;
+          _logger.error(inlineImportFail.create({'error': error}), span: span);
+        });
+      }
     }
 
-    return doCrawl(_primaryInputId).then((_) => documents);
+    return
+      doCrawl(_primaryInputId, null, _primaryDocument).then((_) => documents);
   }
 
   AssetId _importId(AssetId source, Element import) {
diff --git a/pubspec.yaml b/pubspec.yaml
index 3e6cf30..d11d54e 100644
--- a/pubspec.yaml
+++ b/pubspec.yaml
@@ -1,5 +1,5 @@
 name: web_components
-version: 0.10.4+2
+version: 0.10.5
 author: Polymer.dart Authors <web-ui-dev@dartlang.org>
 homepage: https://www.dartlang.org/polymer-dart/
 description: >
diff --git a/test/build/import_crawler_test.dart b/test/build/import_crawler_test.dart
index 776ee1c..c140c09 100644
--- a/test/build/import_crawler_test.dart
+++ b/test/build/import_crawler_test.dart
@@ -3,24 +3,42 @@
 // BSD-style license that can be found in the LICENSE file.
 library web_components.test.build.import_crawler_test;
 
+import 'dart:async';
 import 'package:barback/barback.dart';
 import 'package:code_transformers/tests.dart';
 import 'package:code_transformers/messages/build_logger.dart';
+import 'package:html5lib/dom.dart' show Document;
+import 'package:web_components/build/common.dart';
 import 'package:web_components/build/import_crawler.dart';
 import 'package:unittest/compact_vm_config.dart';
 
 class _TestTransformer extends Transformer {
   final String _entryPoint;
   Map<AssetId, ImportData> documents;
+  final bool _preParseDocument;
 
-  _TestTransformer(this._entryPoint);
+  _TestTransformer(this._entryPoint, [this._preParseDocument = false]);
 
   isPrimary(AssetId id) => id.path == _entryPoint;
 
   apply(Transform transform) {
     var primaryInput = transform.primaryInput;
     var logger = new BuildLogger(transform, primaryId: primaryInput.id);
-    var crawler = new ImportCrawler(transform, primaryInput.id, logger);
+    if (_preParseDocument) {
+      return primaryInput.readAsString().then((html) {
+        var document = parseHtml(html, primaryInput.id.path);
+        return crawlDocument(transform, logger, document);
+      });
+    } else {
+      return crawlDocument(transform, logger);
+    }
+  }
+
+  Future crawlDocument(
+      Transform transform, BuildLogger logger, [Document document]) {
+    var primaryInput = transform.primaryInput;
+    var crawler = new ImportCrawler(
+        transform, primaryInput.id, logger, primaryDocument: document);
     return crawler.crawlImports().then((docs) {
       documents = docs;
       transform.addOutput(new Asset.fromString(
@@ -31,8 +49,13 @@
 
 main() {
   useCompactVMConfiguration();
+  runTests([[new _TestTransformer('web/index.html')]]);
+  // Test with a preparsed original document as well.
+  runTests([[new _TestTransformer('web/index.html', true)]]);
+}
 
-  testPhases('basic', [[new _TestTransformer('web/index.html')]], {
+runTests(List<List<Transformer>> phases) {
+  testPhases('basic', phases, {
     'a|web/index.html': '''
       <link rel="import" href="foo.html">
       <link rel="import" href="packages/a/foo.html">
@@ -53,7 +76,7 @@
       ''',
   }, [], StringFormatter.noNewlinesOrSurroundingWhitespace);
 
-  testPhases('cycle', [[new _TestTransformer('web/index.html')]], {
+  testPhases('cycle', phases, {
     'a|web/index.html': '''
       <link rel="import" href="packages/a/foo.html">
       <div>a|web/index.html</div>
@@ -70,7 +93,7 @@
       ''',
   }, [], StringFormatter.noNewlinesOrSurroundingWhitespace);
 
-  testPhases('deep imports', [[new _TestTransformer('web/index.html')]], {
+  testPhases('deep imports', phases, {
     'a|web/index.html': '''
       <link rel="import" href="packages/a/foo.html">
       <div>a|web/index.html</div>